/cp
[official-gcc.git] / gcc / expr.c
blob899a42c7e7b14bd62a01fdf573bcaa6fd5e18dc1
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "predict.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "rtl.h"
28 #include "df.h"
29 #include "ssa.h"
30 #include "alias.h"
31 #include "fold-const.h"
32 #include "stor-layout.h"
33 #include "attribs.h"
34 #include "varasm.h"
35 #include "flags.h"
36 #include "regs.h"
37 #include "except.h"
38 #include "insn-config.h"
39 #include "insn-attr.h"
40 #include "expmed.h"
41 #include "dojump.h"
42 #include "explow.h"
43 #include "calls.h"
44 #include "emit-rtl.h"
45 #include "stmt.h"
46 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
47 #include "expr.h"
48 #include "insn-codes.h"
49 #include "optabs.h"
50 #include "libfuncs.h"
51 #include "recog.h"
52 #include "reload.h"
53 #include "typeclass.h"
54 #include "toplev.h"
55 #include "langhooks.h"
56 #include "intl.h"
57 #include "tm_p.h"
58 #include "tree-iterator.h"
59 #include "internal-fn.h"
60 #include "cgraph.h"
61 #include "target.h"
62 #include "common/common-target.h"
63 #include "timevar.h"
64 #include "diagnostic.h"
65 #include "tree-ssa-live.h"
66 #include "tree-outof-ssa.h"
67 #include "target-globals.h"
68 #include "params.h"
69 #include "tree-ssa-address.h"
70 #include "cfgexpand.h"
71 #include "builtins.h"
72 #include "tree-chkp.h"
73 #include "rtl-chkp.h"
74 #include "ccmp.h"
77 /* If this is nonzero, we do not bother generating VOLATILE
78 around volatile memory references, and we are willing to
79 output indirect addresses. If cse is to follow, we reject
80 indirect addresses so a useful potential cse is generated;
81 if it is used only once, instruction combination will produce
82 the same indirect address eventually. */
83 int cse_not_expected;
85 /* This structure is used by move_by_pieces to describe the move to
86 be performed. */
87 struct move_by_pieces_d
89 rtx to;
90 rtx to_addr;
91 int autinc_to;
92 int explicit_inc_to;
93 rtx from;
94 rtx from_addr;
95 int autinc_from;
96 int explicit_inc_from;
97 unsigned HOST_WIDE_INT len;
98 HOST_WIDE_INT offset;
99 int reverse;
102 /* This structure is used by store_by_pieces to describe the clear to
103 be performed. */
105 struct store_by_pieces_d
107 rtx to;
108 rtx to_addr;
109 int autinc_to;
110 int explicit_inc_to;
111 unsigned HOST_WIDE_INT len;
112 HOST_WIDE_INT offset;
113 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode);
114 void *constfundata;
115 int reverse;
118 static void move_by_pieces_1 (insn_gen_fn, machine_mode,
119 struct move_by_pieces_d *);
120 static bool block_move_libcall_safe_for_call_parm (void);
121 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
122 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
123 unsigned HOST_WIDE_INT);
124 static tree emit_block_move_libcall_fn (int);
125 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
126 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, machine_mode);
127 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
128 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
129 static void store_by_pieces_2 (insn_gen_fn, machine_mode,
130 struct store_by_pieces_d *);
131 static tree clear_storage_libcall_fn (int);
132 static rtx_insn *compress_float_constant (rtx, rtx);
133 static rtx get_subtarget (rtx);
134 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
135 HOST_WIDE_INT, machine_mode,
136 tree, int, alias_set_type);
137 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
138 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
139 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
140 machine_mode, tree, alias_set_type, bool);
142 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
144 static int is_aligning_offset (const_tree, const_tree);
145 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
146 static rtx do_store_flag (sepops, rtx, machine_mode);
147 #ifdef PUSH_ROUNDING
148 static void emit_single_push_insn (machine_mode, rtx, tree);
149 #endif
150 static void do_tablejump (rtx, machine_mode, rtx, rtx, rtx, int);
151 static rtx const_vector_from_tree (tree);
152 static tree tree_expr_size (const_tree);
153 static HOST_WIDE_INT int_expr_size (tree);
156 /* This is run to set up which modes can be used
157 directly in memory and to initialize the block move optab. It is run
158 at the beginning of compilation and when the target is reinitialized. */
160 void
161 init_expr_target (void)
163 rtx insn, pat;
164 machine_mode mode;
165 int num_clobbers;
166 rtx mem, mem1;
167 rtx reg;
169 /* Try indexing by frame ptr and try by stack ptr.
170 It is known that on the Convex the stack ptr isn't a valid index.
171 With luck, one or the other is valid on any machine. */
172 mem = gen_rtx_MEM (word_mode, stack_pointer_rtx);
173 mem1 = gen_rtx_MEM (word_mode, frame_pointer_rtx);
175 /* A scratch register we can modify in-place below to avoid
176 useless RTL allocations. */
177 reg = gen_rtx_REG (word_mode, LAST_VIRTUAL_REGISTER + 1);
179 insn = rtx_alloc (INSN);
180 pat = gen_rtx_SET (NULL_RTX, NULL_RTX);
181 PATTERN (insn) = pat;
183 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
184 mode = (machine_mode) ((int) mode + 1))
186 int regno;
188 direct_load[(int) mode] = direct_store[(int) mode] = 0;
189 PUT_MODE (mem, mode);
190 PUT_MODE (mem1, mode);
192 /* See if there is some register that can be used in this mode and
193 directly loaded or stored from memory. */
195 if (mode != VOIDmode && mode != BLKmode)
196 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
197 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
198 regno++)
200 if (! HARD_REGNO_MODE_OK (regno, mode))
201 continue;
203 set_mode_and_regno (reg, mode, regno);
205 SET_SRC (pat) = mem;
206 SET_DEST (pat) = reg;
207 if (recog (pat, insn, &num_clobbers) >= 0)
208 direct_load[(int) mode] = 1;
210 SET_SRC (pat) = mem1;
211 SET_DEST (pat) = reg;
212 if (recog (pat, insn, &num_clobbers) >= 0)
213 direct_load[(int) mode] = 1;
215 SET_SRC (pat) = reg;
216 SET_DEST (pat) = mem;
217 if (recog (pat, insn, &num_clobbers) >= 0)
218 direct_store[(int) mode] = 1;
220 SET_SRC (pat) = reg;
221 SET_DEST (pat) = mem1;
222 if (recog (pat, insn, &num_clobbers) >= 0)
223 direct_store[(int) mode] = 1;
227 mem = gen_rtx_MEM (VOIDmode, gen_raw_REG (Pmode, LAST_VIRTUAL_REGISTER + 1));
229 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
230 mode = GET_MODE_WIDER_MODE (mode))
232 machine_mode srcmode;
233 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
234 srcmode = GET_MODE_WIDER_MODE (srcmode))
236 enum insn_code ic;
238 ic = can_extend_p (mode, srcmode, 0);
239 if (ic == CODE_FOR_nothing)
240 continue;
242 PUT_MODE (mem, srcmode);
244 if (insn_operand_matches (ic, 1, mem))
245 float_extend_from_mem[mode][srcmode] = true;
250 /* This is run at the start of compiling a function. */
252 void
253 init_expr (void)
255 memset (&crtl->expr, 0, sizeof (crtl->expr));
258 /* Copy data from FROM to TO, where the machine modes are not the same.
259 Both modes may be integer, or both may be floating, or both may be
260 fixed-point.
261 UNSIGNEDP should be nonzero if FROM is an unsigned type.
262 This causes zero-extension instead of sign-extension. */
264 void
265 convert_move (rtx to, rtx from, int unsignedp)
267 machine_mode to_mode = GET_MODE (to);
268 machine_mode from_mode = GET_MODE (from);
269 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
270 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
271 enum insn_code code;
272 rtx libcall;
274 /* rtx code for making an equivalent value. */
275 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
276 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
279 gcc_assert (to_real == from_real);
280 gcc_assert (to_mode != BLKmode);
281 gcc_assert (from_mode != BLKmode);
283 /* If the source and destination are already the same, then there's
284 nothing to do. */
285 if (to == from)
286 return;
288 /* If FROM is a SUBREG that indicates that we have already done at least
289 the required extension, strip it. We don't handle such SUBREGs as
290 TO here. */
292 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
293 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
294 >= GET_MODE_PRECISION (to_mode))
295 && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp))
296 from = gen_lowpart (to_mode, from), from_mode = to_mode;
298 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
300 if (to_mode == from_mode
301 || (from_mode == VOIDmode && CONSTANT_P (from)))
303 emit_move_insn (to, from);
304 return;
307 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
309 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
311 if (VECTOR_MODE_P (to_mode))
312 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
313 else
314 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
316 emit_move_insn (to, from);
317 return;
320 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
322 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
323 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
324 return;
327 if (to_real)
329 rtx value;
330 rtx_insn *insns;
331 convert_optab tab;
333 gcc_assert ((GET_MODE_PRECISION (from_mode)
334 != GET_MODE_PRECISION (to_mode))
335 || (DECIMAL_FLOAT_MODE_P (from_mode)
336 != DECIMAL_FLOAT_MODE_P (to_mode)));
338 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
339 /* Conversion between decimal float and binary float, same size. */
340 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
341 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
342 tab = sext_optab;
343 else
344 tab = trunc_optab;
346 /* Try converting directly if the insn is supported. */
348 code = convert_optab_handler (tab, to_mode, from_mode);
349 if (code != CODE_FOR_nothing)
351 emit_unop_insn (code, to, from,
352 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
353 return;
356 /* Otherwise use a libcall. */
357 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
359 /* Is this conversion implemented yet? */
360 gcc_assert (libcall);
362 start_sequence ();
363 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
364 1, from, from_mode);
365 insns = get_insns ();
366 end_sequence ();
367 emit_libcall_block (insns, to, value,
368 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
369 from)
370 : gen_rtx_FLOAT_EXTEND (to_mode, from));
371 return;
374 /* Handle pointer conversion. */ /* SPEE 900220. */
375 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
377 convert_optab ctab;
379 if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
380 ctab = trunc_optab;
381 else if (unsignedp)
382 ctab = zext_optab;
383 else
384 ctab = sext_optab;
386 if (convert_optab_handler (ctab, to_mode, from_mode)
387 != CODE_FOR_nothing)
389 emit_unop_insn (convert_optab_handler (ctab, to_mode, from_mode),
390 to, from, UNKNOWN);
391 return;
395 /* Targets are expected to provide conversion insns between PxImode and
396 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
397 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
399 machine_mode full_mode
400 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
402 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
403 != CODE_FOR_nothing);
405 if (full_mode != from_mode)
406 from = convert_to_mode (full_mode, from, unsignedp);
407 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
408 to, from, UNKNOWN);
409 return;
411 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
413 rtx new_from;
414 machine_mode full_mode
415 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
416 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
417 enum insn_code icode;
419 icode = convert_optab_handler (ctab, full_mode, from_mode);
420 gcc_assert (icode != CODE_FOR_nothing);
422 if (to_mode == full_mode)
424 emit_unop_insn (icode, to, from, UNKNOWN);
425 return;
428 new_from = gen_reg_rtx (full_mode);
429 emit_unop_insn (icode, new_from, from, UNKNOWN);
431 /* else proceed to integer conversions below. */
432 from_mode = full_mode;
433 from = new_from;
436 /* Make sure both are fixed-point modes or both are not. */
437 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
438 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
439 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
441 /* If we widen from_mode to to_mode and they are in the same class,
442 we won't saturate the result.
443 Otherwise, always saturate the result to play safe. */
444 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
445 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
446 expand_fixed_convert (to, from, 0, 0);
447 else
448 expand_fixed_convert (to, from, 0, 1);
449 return;
452 /* Now both modes are integers. */
454 /* Handle expanding beyond a word. */
455 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
456 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
458 rtx_insn *insns;
459 rtx lowpart;
460 rtx fill_value;
461 rtx lowfrom;
462 int i;
463 machine_mode lowpart_mode;
464 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
466 /* Try converting directly if the insn is supported. */
467 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
468 != CODE_FOR_nothing)
470 /* If FROM is a SUBREG, put it into a register. Do this
471 so that we always generate the same set of insns for
472 better cse'ing; if an intermediate assignment occurred,
473 we won't be doing the operation directly on the SUBREG. */
474 if (optimize > 0 && GET_CODE (from) == SUBREG)
475 from = force_reg (from_mode, from);
476 emit_unop_insn (code, to, from, equiv_code);
477 return;
479 /* Next, try converting via full word. */
480 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
481 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
482 != CODE_FOR_nothing))
484 rtx word_to = gen_reg_rtx (word_mode);
485 if (REG_P (to))
487 if (reg_overlap_mentioned_p (to, from))
488 from = force_reg (from_mode, from);
489 emit_clobber (to);
491 convert_move (word_to, from, unsignedp);
492 emit_unop_insn (code, to, word_to, equiv_code);
493 return;
496 /* No special multiword conversion insn; do it by hand. */
497 start_sequence ();
499 /* Since we will turn this into a no conflict block, we must ensure the
500 the source does not overlap the target so force it into an isolated
501 register when maybe so. Likewise for any MEM input, since the
502 conversion sequence might require several references to it and we
503 must ensure we're getting the same value every time. */
505 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
506 from = force_reg (from_mode, from);
508 /* Get a copy of FROM widened to a word, if necessary. */
509 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
510 lowpart_mode = word_mode;
511 else
512 lowpart_mode = from_mode;
514 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
516 lowpart = gen_lowpart (lowpart_mode, to);
517 emit_move_insn (lowpart, lowfrom);
519 /* Compute the value to put in each remaining word. */
520 if (unsignedp)
521 fill_value = const0_rtx;
522 else
523 fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
524 LT, lowfrom, const0_rtx,
525 lowpart_mode, 0, -1);
527 /* Fill the remaining words. */
528 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
530 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
531 rtx subword = operand_subword (to, index, 1, to_mode);
533 gcc_assert (subword);
535 if (fill_value != subword)
536 emit_move_insn (subword, fill_value);
539 insns = get_insns ();
540 end_sequence ();
542 emit_insn (insns);
543 return;
546 /* Truncating multi-word to a word or less. */
547 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
548 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
550 if (!((MEM_P (from)
551 && ! MEM_VOLATILE_P (from)
552 && direct_load[(int) to_mode]
553 && ! mode_dependent_address_p (XEXP (from, 0),
554 MEM_ADDR_SPACE (from)))
555 || REG_P (from)
556 || GET_CODE (from) == SUBREG))
557 from = force_reg (from_mode, from);
558 convert_move (to, gen_lowpart (word_mode, from), 0);
559 return;
562 /* Now follow all the conversions between integers
563 no more than a word long. */
565 /* For truncation, usually we can just refer to FROM in a narrower mode. */
566 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
567 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
569 if (!((MEM_P (from)
570 && ! MEM_VOLATILE_P (from)
571 && direct_load[(int) to_mode]
572 && ! mode_dependent_address_p (XEXP (from, 0),
573 MEM_ADDR_SPACE (from)))
574 || REG_P (from)
575 || GET_CODE (from) == SUBREG))
576 from = force_reg (from_mode, from);
577 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
578 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
579 from = copy_to_reg (from);
580 emit_move_insn (to, gen_lowpart (to_mode, from));
581 return;
584 /* Handle extension. */
585 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
587 /* Convert directly if that works. */
588 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
589 != CODE_FOR_nothing)
591 emit_unop_insn (code, to, from, equiv_code);
592 return;
594 else
596 machine_mode intermediate;
597 rtx tmp;
598 int shift_amount;
600 /* Search for a mode to convert via. */
601 for (intermediate = from_mode; intermediate != VOIDmode;
602 intermediate = GET_MODE_WIDER_MODE (intermediate))
603 if (((can_extend_p (to_mode, intermediate, unsignedp)
604 != CODE_FOR_nothing)
605 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
606 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
607 && (can_extend_p (intermediate, from_mode, unsignedp)
608 != CODE_FOR_nothing))
610 convert_move (to, convert_to_mode (intermediate, from,
611 unsignedp), unsignedp);
612 return;
615 /* No suitable intermediate mode.
616 Generate what we need with shifts. */
617 shift_amount = (GET_MODE_PRECISION (to_mode)
618 - GET_MODE_PRECISION (from_mode));
619 from = gen_lowpart (to_mode, force_reg (from_mode, from));
620 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
621 to, unsignedp);
622 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
623 to, unsignedp);
624 if (tmp != to)
625 emit_move_insn (to, tmp);
626 return;
630 /* Support special truncate insns for certain modes. */
631 if (convert_optab_handler (trunc_optab, to_mode,
632 from_mode) != CODE_FOR_nothing)
634 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
635 to, from, UNKNOWN);
636 return;
639 /* Handle truncation of volatile memrefs, and so on;
640 the things that couldn't be truncated directly,
641 and for which there was no special instruction.
643 ??? Code above formerly short-circuited this, for most integer
644 mode pairs, with a force_reg in from_mode followed by a recursive
645 call to this routine. Appears always to have been wrong. */
646 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
648 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
649 emit_move_insn (to, temp);
650 return;
653 /* Mode combination is not recognized. */
654 gcc_unreachable ();
657 /* Return an rtx for a value that would result
658 from converting X to mode MODE.
659 Both X and MODE may be floating, or both integer.
660 UNSIGNEDP is nonzero if X is an unsigned value.
661 This can be done by referring to a part of X in place
662 or by copying to a new temporary with conversion. */
665 convert_to_mode (machine_mode mode, rtx x, int unsignedp)
667 return convert_modes (mode, VOIDmode, x, unsignedp);
670 /* Return an rtx for a value that would result
671 from converting X from mode OLDMODE to mode MODE.
672 Both modes may be floating, or both integer.
673 UNSIGNEDP is nonzero if X is an unsigned value.
675 This can be done by referring to a part of X in place
676 or by copying to a new temporary with conversion.
678 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
681 convert_modes (machine_mode mode, machine_mode oldmode, rtx x, int unsignedp)
683 rtx temp;
685 /* If FROM is a SUBREG that indicates that we have already done at least
686 the required extension, strip it. */
688 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
689 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
690 && SUBREG_CHECK_PROMOTED_SIGN (x, unsignedp))
691 x = gen_lowpart (mode, SUBREG_REG (x));
693 if (GET_MODE (x) != VOIDmode)
694 oldmode = GET_MODE (x);
696 if (mode == oldmode)
697 return x;
699 if (CONST_SCALAR_INT_P (x) && GET_MODE_CLASS (mode) == MODE_INT)
701 /* If the caller did not tell us the old mode, then there is not
702 much to do with respect to canonicalization. We have to
703 assume that all the bits are significant. */
704 if (GET_MODE_CLASS (oldmode) != MODE_INT)
705 oldmode = MAX_MODE_INT;
706 wide_int w = wide_int::from (std::make_pair (x, oldmode),
707 GET_MODE_PRECISION (mode),
708 unsignedp ? UNSIGNED : SIGNED);
709 return immed_wide_int_const (w, mode);
712 /* We can do this with a gen_lowpart if both desired and current modes
713 are integer, and this is either a constant integer, a register, or a
714 non-volatile MEM. */
715 if (GET_MODE_CLASS (mode) == MODE_INT
716 && GET_MODE_CLASS (oldmode) == MODE_INT
717 && GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
718 && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) mode])
719 || (REG_P (x)
720 && (!HARD_REGISTER_P (x)
721 || HARD_REGNO_MODE_OK (REGNO (x), mode))
722 && TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (x)))))
724 return gen_lowpart (mode, x);
726 /* Converting from integer constant into mode is always equivalent to an
727 subreg operation. */
728 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
730 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
731 return simplify_gen_subreg (mode, x, oldmode, 0);
734 temp = gen_reg_rtx (mode);
735 convert_move (temp, x, unsignedp);
736 return temp;
739 /* Return the largest alignment we can use for doing a move (or store)
740 of MAX_PIECES. ALIGN is the largest alignment we could use. */
742 static unsigned int
743 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
745 machine_mode tmode;
747 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
748 if (align >= GET_MODE_ALIGNMENT (tmode))
749 align = GET_MODE_ALIGNMENT (tmode);
750 else
752 machine_mode tmode, xmode;
754 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
755 tmode != VOIDmode;
756 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
757 if (GET_MODE_SIZE (tmode) > max_pieces
758 || SLOW_UNALIGNED_ACCESS (tmode, align))
759 break;
761 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
764 return align;
767 /* Return the widest integer mode no wider than SIZE. If no such mode
768 can be found, return VOIDmode. */
770 static machine_mode
771 widest_int_mode_for_size (unsigned int size)
773 machine_mode tmode, mode = VOIDmode;
775 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
776 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
777 if (GET_MODE_SIZE (tmode) < size)
778 mode = tmode;
780 return mode;
783 /* Determine whether the LEN bytes can be moved by using several move
784 instructions. Return nonzero if a call to move_by_pieces should
785 succeed. */
788 can_move_by_pieces (unsigned HOST_WIDE_INT len,
789 unsigned int align)
791 return targetm.use_by_pieces_infrastructure_p (len, align, MOVE_BY_PIECES,
792 optimize_insn_for_speed_p ());
795 /* Generate several move instructions to copy LEN bytes from block FROM to
796 block TO. (These are MEM rtx's with BLKmode).
798 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
799 used to push FROM to the stack.
801 ALIGN is maximum stack alignment we can assume.
803 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
804 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
805 stpcpy. */
808 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
809 unsigned int align, int endp)
811 struct move_by_pieces_d data;
812 machine_mode to_addr_mode;
813 machine_mode from_addr_mode = get_address_mode (from);
814 rtx to_addr, from_addr = XEXP (from, 0);
815 unsigned int max_size = MOVE_MAX_PIECES + 1;
816 enum insn_code icode;
818 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
820 data.offset = 0;
821 data.from_addr = from_addr;
822 if (to)
824 to_addr_mode = get_address_mode (to);
825 to_addr = XEXP (to, 0);
826 data.to = to;
827 data.autinc_to
828 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
829 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
830 data.reverse
831 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
833 else
835 to_addr_mode = VOIDmode;
836 to_addr = NULL_RTX;
837 data.to = NULL_RTX;
838 data.autinc_to = 1;
839 if (STACK_GROWS_DOWNWARD)
840 data.reverse = 1;
841 else
842 data.reverse = 0;
844 data.to_addr = to_addr;
845 data.from = from;
846 data.autinc_from
847 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
848 || GET_CODE (from_addr) == POST_INC
849 || GET_CODE (from_addr) == POST_DEC);
851 data.explicit_inc_from = 0;
852 data.explicit_inc_to = 0;
853 if (data.reverse) data.offset = len;
854 data.len = len;
856 /* If copying requires more than two move insns,
857 copy addresses to registers (to make displacements shorter)
858 and use post-increment if available. */
859 if (!(data.autinc_from && data.autinc_to)
860 && move_by_pieces_ninsns (len, align, max_size) > 2)
862 /* Find the mode of the largest move...
863 MODE might not be used depending on the definitions of the
864 USE_* macros below. */
865 machine_mode mode ATTRIBUTE_UNUSED
866 = widest_int_mode_for_size (max_size);
868 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
870 data.from_addr = copy_to_mode_reg (from_addr_mode,
871 plus_constant (from_addr_mode,
872 from_addr, len));
873 data.autinc_from = 1;
874 data.explicit_inc_from = -1;
876 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
878 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
879 data.autinc_from = 1;
880 data.explicit_inc_from = 1;
882 if (!data.autinc_from && CONSTANT_P (from_addr))
883 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
884 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
886 data.to_addr = copy_to_mode_reg (to_addr_mode,
887 plus_constant (to_addr_mode,
888 to_addr, len));
889 data.autinc_to = 1;
890 data.explicit_inc_to = -1;
892 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
894 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
895 data.autinc_to = 1;
896 data.explicit_inc_to = 1;
898 if (!data.autinc_to && CONSTANT_P (to_addr))
899 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
902 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
904 /* First move what we can in the largest integer mode, then go to
905 successively smaller modes. */
907 while (max_size > 1 && data.len > 0)
909 machine_mode mode = widest_int_mode_for_size (max_size);
911 if (mode == VOIDmode)
912 break;
914 icode = optab_handler (mov_optab, mode);
915 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
916 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
918 max_size = GET_MODE_SIZE (mode);
921 /* The code above should have handled everything. */
922 gcc_assert (!data.len);
924 if (endp)
926 rtx to1;
928 gcc_assert (!data.reverse);
929 if (data.autinc_to)
931 if (endp == 2)
933 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
934 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
935 else
936 data.to_addr = copy_to_mode_reg (to_addr_mode,
937 plus_constant (to_addr_mode,
938 data.to_addr,
939 -1));
941 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
942 data.offset);
944 else
946 if (endp == 2)
947 --data.offset;
948 to1 = adjust_address (data.to, QImode, data.offset);
950 return to1;
952 else
953 return data.to;
956 /* Return number of insns required to move L bytes by pieces.
957 ALIGN (in bits) is maximum alignment we can assume. */
959 unsigned HOST_WIDE_INT
960 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
961 unsigned int max_size)
963 unsigned HOST_WIDE_INT n_insns = 0;
965 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
967 while (max_size > 1 && l > 0)
969 machine_mode mode;
970 enum insn_code icode;
972 mode = widest_int_mode_for_size (max_size);
974 if (mode == VOIDmode)
975 break;
977 icode = optab_handler (mov_optab, mode);
978 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
979 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
981 max_size = GET_MODE_SIZE (mode);
984 gcc_assert (!l);
985 return n_insns;
988 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
989 with move instructions for mode MODE. GENFUN is the gen_... function
990 to make a move insn for that mode. DATA has all the other info. */
992 static void
993 move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
994 struct move_by_pieces_d *data)
996 unsigned int size = GET_MODE_SIZE (mode);
997 rtx to1 = NULL_RTX, from1;
999 while (data->len >= size)
1001 if (data->reverse)
1002 data->offset -= size;
1004 if (data->to)
1006 if (data->autinc_to)
1007 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1008 data->offset);
1009 else
1010 to1 = adjust_address (data->to, mode, data->offset);
1013 if (data->autinc_from)
1014 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1015 data->offset);
1016 else
1017 from1 = adjust_address (data->from, mode, data->offset);
1019 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1020 emit_insn (gen_add2_insn (data->to_addr,
1021 gen_int_mode (-(HOST_WIDE_INT) size,
1022 GET_MODE (data->to_addr))));
1023 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1024 emit_insn (gen_add2_insn (data->from_addr,
1025 gen_int_mode (-(HOST_WIDE_INT) size,
1026 GET_MODE (data->from_addr))));
1028 if (data->to)
1029 emit_insn ((*genfun) (to1, from1));
1030 else
1032 #ifdef PUSH_ROUNDING
1033 emit_single_push_insn (mode, from1, NULL);
1034 #else
1035 gcc_unreachable ();
1036 #endif
1039 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1040 emit_insn (gen_add2_insn (data->to_addr,
1041 gen_int_mode (size,
1042 GET_MODE (data->to_addr))));
1043 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1044 emit_insn (gen_add2_insn (data->from_addr,
1045 gen_int_mode (size,
1046 GET_MODE (data->from_addr))));
1048 if (! data->reverse)
1049 data->offset += size;
1051 data->len -= size;
1055 /* Emit code to move a block Y to a block X. This may be done with
1056 string-move instructions, with multiple scalar move instructions,
1057 or with a library call.
1059 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1060 SIZE is an rtx that says how long they are.
1061 ALIGN is the maximum alignment we can assume they have.
1062 METHOD describes what kind of copy this is, and what mechanisms may be used.
1063 MIN_SIZE is the minimal size of block to move
1064 MAX_SIZE is the maximal size of block to move, if it can not be represented
1065 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1067 Return the address of the new block, if memcpy is called and returns it,
1068 0 otherwise. */
1071 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1072 unsigned int expected_align, HOST_WIDE_INT expected_size,
1073 unsigned HOST_WIDE_INT min_size,
1074 unsigned HOST_WIDE_INT max_size,
1075 unsigned HOST_WIDE_INT probable_max_size)
1077 bool may_use_call;
1078 rtx retval = 0;
1079 unsigned int align;
1081 gcc_assert (size);
1082 if (CONST_INT_P (size)
1083 && INTVAL (size) == 0)
1084 return 0;
1086 switch (method)
1088 case BLOCK_OP_NORMAL:
1089 case BLOCK_OP_TAILCALL:
1090 may_use_call = true;
1091 break;
1093 case BLOCK_OP_CALL_PARM:
1094 may_use_call = block_move_libcall_safe_for_call_parm ();
1096 /* Make inhibit_defer_pop nonzero around the library call
1097 to force it to pop the arguments right away. */
1098 NO_DEFER_POP;
1099 break;
1101 case BLOCK_OP_NO_LIBCALL:
1102 may_use_call = false;
1103 break;
1105 default:
1106 gcc_unreachable ();
1109 gcc_assert (MEM_P (x) && MEM_P (y));
1110 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1111 gcc_assert (align >= BITS_PER_UNIT);
1113 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1114 block copy is more efficient for other large modes, e.g. DCmode. */
1115 x = adjust_address (x, BLKmode, 0);
1116 y = adjust_address (y, BLKmode, 0);
1118 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1119 can be incorrect is coming from __builtin_memcpy. */
1120 if (CONST_INT_P (size))
1122 x = shallow_copy_rtx (x);
1123 y = shallow_copy_rtx (y);
1124 set_mem_size (x, INTVAL (size));
1125 set_mem_size (y, INTVAL (size));
1128 if (CONST_INT_P (size) && can_move_by_pieces (INTVAL (size), align))
1129 move_by_pieces (x, y, INTVAL (size), align, 0);
1130 else if (emit_block_move_via_movmem (x, y, size, align,
1131 expected_align, expected_size,
1132 min_size, max_size, probable_max_size))
1134 else if (may_use_call
1135 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1136 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1138 /* Since x and y are passed to a libcall, mark the corresponding
1139 tree EXPR as addressable. */
1140 tree y_expr = MEM_EXPR (y);
1141 tree x_expr = MEM_EXPR (x);
1142 if (y_expr)
1143 mark_addressable (y_expr);
1144 if (x_expr)
1145 mark_addressable (x_expr);
1146 retval = emit_block_move_via_libcall (x, y, size,
1147 method == BLOCK_OP_TAILCALL);
1150 else
1151 emit_block_move_via_loop (x, y, size, align);
1153 if (method == BLOCK_OP_CALL_PARM)
1154 OK_DEFER_POP;
1156 return retval;
1160 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1162 unsigned HOST_WIDE_INT max, min = 0;
1163 if (GET_CODE (size) == CONST_INT)
1164 min = max = UINTVAL (size);
1165 else
1166 max = GET_MODE_MASK (GET_MODE (size));
1167 return emit_block_move_hints (x, y, size, method, 0, -1,
1168 min, max, max);
1171 /* A subroutine of emit_block_move. Returns true if calling the
1172 block move libcall will not clobber any parameters which may have
1173 already been placed on the stack. */
1175 static bool
1176 block_move_libcall_safe_for_call_parm (void)
1178 #if defined (REG_PARM_STACK_SPACE)
1179 tree fn;
1180 #endif
1182 /* If arguments are pushed on the stack, then they're safe. */
1183 if (PUSH_ARGS)
1184 return true;
1186 /* If registers go on the stack anyway, any argument is sure to clobber
1187 an outgoing argument. */
1188 #if defined (REG_PARM_STACK_SPACE)
1189 fn = emit_block_move_libcall_fn (false);
1190 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1191 depend on its argument. */
1192 (void) fn;
1193 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1194 && REG_PARM_STACK_SPACE (fn) != 0)
1195 return false;
1196 #endif
1198 /* If any argument goes in memory, then it might clobber an outgoing
1199 argument. */
1201 CUMULATIVE_ARGS args_so_far_v;
1202 cumulative_args_t args_so_far;
1203 tree fn, arg;
1205 fn = emit_block_move_libcall_fn (false);
1206 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1207 args_so_far = pack_cumulative_args (&args_so_far_v);
1209 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1210 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1212 machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1213 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1214 NULL_TREE, true);
1215 if (!tmp || !REG_P (tmp))
1216 return false;
1217 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1218 return false;
1219 targetm.calls.function_arg_advance (args_so_far, mode,
1220 NULL_TREE, true);
1223 return true;
1226 /* A subroutine of emit_block_move. Expand a movmem pattern;
1227 return true if successful. */
1229 static bool
1230 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1231 unsigned int expected_align, HOST_WIDE_INT expected_size,
1232 unsigned HOST_WIDE_INT min_size,
1233 unsigned HOST_WIDE_INT max_size,
1234 unsigned HOST_WIDE_INT probable_max_size)
1236 int save_volatile_ok = volatile_ok;
1237 machine_mode mode;
1239 if (expected_align < align)
1240 expected_align = align;
1241 if (expected_size != -1)
1243 if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1244 expected_size = probable_max_size;
1245 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1246 expected_size = min_size;
1249 /* Since this is a move insn, we don't care about volatility. */
1250 volatile_ok = 1;
1252 /* Try the most limited insn first, because there's no point
1253 including more than one in the machine description unless
1254 the more limited one has some advantage. */
1256 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1257 mode = GET_MODE_WIDER_MODE (mode))
1259 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1261 if (code != CODE_FOR_nothing
1262 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1263 here because if SIZE is less than the mode mask, as it is
1264 returned by the macro, it will definitely be less than the
1265 actual mode mask. Since SIZE is within the Pmode address
1266 space, we limit MODE to Pmode. */
1267 && ((CONST_INT_P (size)
1268 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1269 <= (GET_MODE_MASK (mode) >> 1)))
1270 || max_size <= (GET_MODE_MASK (mode) >> 1)
1271 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1273 struct expand_operand ops[9];
1274 unsigned int nops;
1276 /* ??? When called via emit_block_move_for_call, it'd be
1277 nice if there were some way to inform the backend, so
1278 that it doesn't fail the expansion because it thinks
1279 emitting the libcall would be more efficient. */
1280 nops = insn_data[(int) code].n_generator_args;
1281 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1283 create_fixed_operand (&ops[0], x);
1284 create_fixed_operand (&ops[1], y);
1285 /* The check above guarantees that this size conversion is valid. */
1286 create_convert_operand_to (&ops[2], size, mode, true);
1287 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1288 if (nops >= 6)
1290 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1291 create_integer_operand (&ops[5], expected_size);
1293 if (nops >= 8)
1295 create_integer_operand (&ops[6], min_size);
1296 /* If we can not represent the maximal size,
1297 make parameter NULL. */
1298 if ((HOST_WIDE_INT) max_size != -1)
1299 create_integer_operand (&ops[7], max_size);
1300 else
1301 create_fixed_operand (&ops[7], NULL);
1303 if (nops == 9)
1305 /* If we can not represent the maximal size,
1306 make parameter NULL. */
1307 if ((HOST_WIDE_INT) probable_max_size != -1)
1308 create_integer_operand (&ops[8], probable_max_size);
1309 else
1310 create_fixed_operand (&ops[8], NULL);
1312 if (maybe_expand_insn (code, nops, ops))
1314 volatile_ok = save_volatile_ok;
1315 return true;
1320 volatile_ok = save_volatile_ok;
1321 return false;
1324 /* A subroutine of emit_block_move. Expand a call to memcpy.
1325 Return the return value from memcpy, 0 otherwise. */
1328 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1330 rtx dst_addr, src_addr;
1331 tree call_expr, fn, src_tree, dst_tree, size_tree;
1332 machine_mode size_mode;
1333 rtx retval;
1335 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1336 pseudos. We can then place those new pseudos into a VAR_DECL and
1337 use them later. */
1339 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1340 src_addr = copy_addr_to_reg (XEXP (src, 0));
1342 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1343 src_addr = convert_memory_address (ptr_mode, src_addr);
1345 dst_tree = make_tree (ptr_type_node, dst_addr);
1346 src_tree = make_tree (ptr_type_node, src_addr);
1348 size_mode = TYPE_MODE (sizetype);
1350 size = convert_to_mode (size_mode, size, 1);
1351 size = copy_to_mode_reg (size_mode, size);
1353 /* It is incorrect to use the libcall calling conventions to call
1354 memcpy in this context. This could be a user call to memcpy and
1355 the user may wish to examine the return value from memcpy. For
1356 targets where libcalls and normal calls have different conventions
1357 for returning pointers, we could end up generating incorrect code. */
1359 size_tree = make_tree (sizetype, size);
1361 fn = emit_block_move_libcall_fn (true);
1362 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1363 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1365 retval = expand_normal (call_expr);
1367 return retval;
1370 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1371 for the function we use for block copies. */
1373 static GTY(()) tree block_move_fn;
1375 void
1376 init_block_move_fn (const char *asmspec)
1378 if (!block_move_fn)
1380 tree args, fn, attrs, attr_args;
1382 fn = get_identifier ("memcpy");
1383 args = build_function_type_list (ptr_type_node, ptr_type_node,
1384 const_ptr_type_node, sizetype,
1385 NULL_TREE);
1387 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1388 DECL_EXTERNAL (fn) = 1;
1389 TREE_PUBLIC (fn) = 1;
1390 DECL_ARTIFICIAL (fn) = 1;
1391 TREE_NOTHROW (fn) = 1;
1392 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1393 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1395 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1396 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1398 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1400 block_move_fn = fn;
1403 if (asmspec)
1404 set_user_assembler_name (block_move_fn, asmspec);
1407 static tree
1408 emit_block_move_libcall_fn (int for_call)
1410 static bool emitted_extern;
1412 if (!block_move_fn)
1413 init_block_move_fn (NULL);
1415 if (for_call && !emitted_extern)
1417 emitted_extern = true;
1418 make_decl_rtl (block_move_fn);
1421 return block_move_fn;
1424 /* A subroutine of emit_block_move. Copy the data via an explicit
1425 loop. This is used only when libcalls are forbidden. */
1426 /* ??? It'd be nice to copy in hunks larger than QImode. */
1428 static void
1429 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1430 unsigned int align ATTRIBUTE_UNUSED)
1432 rtx_code_label *cmp_label, *top_label;
1433 rtx iter, x_addr, y_addr, tmp;
1434 machine_mode x_addr_mode = get_address_mode (x);
1435 machine_mode y_addr_mode = get_address_mode (y);
1436 machine_mode iter_mode;
1438 iter_mode = GET_MODE (size);
1439 if (iter_mode == VOIDmode)
1440 iter_mode = word_mode;
1442 top_label = gen_label_rtx ();
1443 cmp_label = gen_label_rtx ();
1444 iter = gen_reg_rtx (iter_mode);
1446 emit_move_insn (iter, const0_rtx);
1448 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1449 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1450 do_pending_stack_adjust ();
1452 emit_jump (cmp_label);
1453 emit_label (top_label);
1455 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1456 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1458 if (x_addr_mode != y_addr_mode)
1459 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1460 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1462 x = change_address (x, QImode, x_addr);
1463 y = change_address (y, QImode, y_addr);
1465 emit_move_insn (x, y);
1467 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1468 true, OPTAB_LIB_WIDEN);
1469 if (tmp != iter)
1470 emit_move_insn (iter, tmp);
1472 emit_label (cmp_label);
1474 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1475 true, top_label, REG_BR_PROB_BASE * 90 / 100);
1478 /* Copy all or part of a value X into registers starting at REGNO.
1479 The number of registers to be filled is NREGS. */
1481 void
1482 move_block_to_reg (int regno, rtx x, int nregs, machine_mode mode)
1484 if (nregs == 0)
1485 return;
1487 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1488 x = validize_mem (force_const_mem (mode, x));
1490 /* See if the machine can do this with a load multiple insn. */
1491 if (targetm.have_load_multiple ())
1493 rtx_insn *last = get_last_insn ();
1494 rtx first = gen_rtx_REG (word_mode, regno);
1495 if (rtx_insn *pat = targetm.gen_load_multiple (first, x,
1496 GEN_INT (nregs)))
1498 emit_insn (pat);
1499 return;
1501 else
1502 delete_insns_since (last);
1505 for (int i = 0; i < nregs; i++)
1506 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1507 operand_subword_force (x, i, mode));
1510 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1511 The number of registers to be filled is NREGS. */
1513 void
1514 move_block_from_reg (int regno, rtx x, int nregs)
1516 if (nregs == 0)
1517 return;
1519 /* See if the machine can do this with a store multiple insn. */
1520 if (targetm.have_store_multiple ())
1522 rtx_insn *last = get_last_insn ();
1523 rtx first = gen_rtx_REG (word_mode, regno);
1524 if (rtx_insn *pat = targetm.gen_store_multiple (x, first,
1525 GEN_INT (nregs)))
1527 emit_insn (pat);
1528 return;
1530 else
1531 delete_insns_since (last);
1534 for (int i = 0; i < nregs; i++)
1536 rtx tem = operand_subword (x, i, 1, BLKmode);
1538 gcc_assert (tem);
1540 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1544 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1545 ORIG, where ORIG is a non-consecutive group of registers represented by
1546 a PARALLEL. The clone is identical to the original except in that the
1547 original set of registers is replaced by a new set of pseudo registers.
1548 The new set has the same modes as the original set. */
1551 gen_group_rtx (rtx orig)
1553 int i, length;
1554 rtx *tmps;
1556 gcc_assert (GET_CODE (orig) == PARALLEL);
1558 length = XVECLEN (orig, 0);
1559 tmps = XALLOCAVEC (rtx, length);
1561 /* Skip a NULL entry in first slot. */
1562 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1564 if (i)
1565 tmps[0] = 0;
1567 for (; i < length; i++)
1569 machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1570 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1572 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1575 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1578 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1579 except that values are placed in TMPS[i], and must later be moved
1580 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1582 static void
1583 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1585 rtx src;
1586 int start, i;
1587 machine_mode m = GET_MODE (orig_src);
1589 gcc_assert (GET_CODE (dst) == PARALLEL);
1591 if (m != VOIDmode
1592 && !SCALAR_INT_MODE_P (m)
1593 && !MEM_P (orig_src)
1594 && GET_CODE (orig_src) != CONCAT)
1596 machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1597 if (imode == BLKmode)
1598 src = assign_stack_temp (GET_MODE (orig_src), ssize);
1599 else
1600 src = gen_reg_rtx (imode);
1601 if (imode != BLKmode)
1602 src = gen_lowpart (GET_MODE (orig_src), src);
1603 emit_move_insn (src, orig_src);
1604 /* ...and back again. */
1605 if (imode != BLKmode)
1606 src = gen_lowpart (imode, src);
1607 emit_group_load_1 (tmps, dst, src, type, ssize);
1608 return;
1611 /* Check for a NULL entry, used to indicate that the parameter goes
1612 both on the stack and in registers. */
1613 if (XEXP (XVECEXP (dst, 0, 0), 0))
1614 start = 0;
1615 else
1616 start = 1;
1618 /* Process the pieces. */
1619 for (i = start; i < XVECLEN (dst, 0); i++)
1621 machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1622 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1623 unsigned int bytelen = GET_MODE_SIZE (mode);
1624 int shift = 0;
1626 /* Handle trailing fragments that run over the size of the struct. */
1627 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1629 /* Arrange to shift the fragment to where it belongs.
1630 extract_bit_field loads to the lsb of the reg. */
1631 if (
1632 #ifdef BLOCK_REG_PADDING
1633 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1634 == (BYTES_BIG_ENDIAN ? upward : downward)
1635 #else
1636 BYTES_BIG_ENDIAN
1637 #endif
1639 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1640 bytelen = ssize - bytepos;
1641 gcc_assert (bytelen > 0);
1644 /* If we won't be loading directly from memory, protect the real source
1645 from strange tricks we might play; but make sure that the source can
1646 be loaded directly into the destination. */
1647 src = orig_src;
1648 if (!MEM_P (orig_src)
1649 && (!CONSTANT_P (orig_src)
1650 || (GET_MODE (orig_src) != mode
1651 && GET_MODE (orig_src) != VOIDmode)))
1653 if (GET_MODE (orig_src) == VOIDmode)
1654 src = gen_reg_rtx (mode);
1655 else
1656 src = gen_reg_rtx (GET_MODE (orig_src));
1658 emit_move_insn (src, orig_src);
1661 /* Optimize the access just a bit. */
1662 if (MEM_P (src)
1663 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1664 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1665 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1666 && bytelen == GET_MODE_SIZE (mode))
1668 tmps[i] = gen_reg_rtx (mode);
1669 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1671 else if (COMPLEX_MODE_P (mode)
1672 && GET_MODE (src) == mode
1673 && bytelen == GET_MODE_SIZE (mode))
1674 /* Let emit_move_complex do the bulk of the work. */
1675 tmps[i] = src;
1676 else if (GET_CODE (src) == CONCAT)
1678 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1679 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1681 if ((bytepos == 0 && bytelen == slen0)
1682 || (bytepos != 0 && bytepos + bytelen <= slen))
1684 /* The following assumes that the concatenated objects all
1685 have the same size. In this case, a simple calculation
1686 can be used to determine the object and the bit field
1687 to be extracted. */
1688 tmps[i] = XEXP (src, bytepos / slen0);
1689 if (! CONSTANT_P (tmps[i])
1690 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1691 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1692 (bytepos % slen0) * BITS_PER_UNIT,
1693 1, NULL_RTX, mode, mode);
1695 else
1697 rtx mem;
1699 gcc_assert (!bytepos);
1700 mem = assign_stack_temp (GET_MODE (src), slen);
1701 emit_move_insn (mem, src);
1702 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1703 0, 1, NULL_RTX, mode, mode);
1706 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1707 SIMD register, which is currently broken. While we get GCC
1708 to emit proper RTL for these cases, let's dump to memory. */
1709 else if (VECTOR_MODE_P (GET_MODE (dst))
1710 && REG_P (src))
1712 int slen = GET_MODE_SIZE (GET_MODE (src));
1713 rtx mem;
1715 mem = assign_stack_temp (GET_MODE (src), slen);
1716 emit_move_insn (mem, src);
1717 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1719 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1720 && XVECLEN (dst, 0) > 1)
1721 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
1722 else if (CONSTANT_P (src))
1724 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1726 if (len == ssize)
1727 tmps[i] = src;
1728 else
1730 rtx first, second;
1732 /* TODO: const_wide_int can have sizes other than this... */
1733 gcc_assert (2 * len == ssize);
1734 split_double (src, &first, &second);
1735 if (i)
1736 tmps[i] = second;
1737 else
1738 tmps[i] = first;
1741 else if (REG_P (src) && GET_MODE (src) == mode)
1742 tmps[i] = src;
1743 else
1744 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1745 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1746 mode, mode);
1748 if (shift)
1749 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1750 shift, tmps[i], 0);
1754 /* Emit code to move a block SRC of type TYPE to a block DST,
1755 where DST is non-consecutive registers represented by a PARALLEL.
1756 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1757 if not known. */
1759 void
1760 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1762 rtx *tmps;
1763 int i;
1765 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1766 emit_group_load_1 (tmps, dst, src, type, ssize);
1768 /* Copy the extracted pieces into the proper (probable) hard regs. */
1769 for (i = 0; i < XVECLEN (dst, 0); i++)
1771 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1772 if (d == NULL)
1773 continue;
1774 emit_move_insn (d, tmps[i]);
1778 /* Similar, but load SRC into new pseudos in a format that looks like
1779 PARALLEL. This can later be fed to emit_group_move to get things
1780 in the right place. */
1783 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1785 rtvec vec;
1786 int i;
1788 vec = rtvec_alloc (XVECLEN (parallel, 0));
1789 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1791 /* Convert the vector to look just like the original PARALLEL, except
1792 with the computed values. */
1793 for (i = 0; i < XVECLEN (parallel, 0); i++)
1795 rtx e = XVECEXP (parallel, 0, i);
1796 rtx d = XEXP (e, 0);
1798 if (d)
1800 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1801 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1803 RTVEC_ELT (vec, i) = e;
1806 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1809 /* Emit code to move a block SRC to block DST, where SRC and DST are
1810 non-consecutive groups of registers, each represented by a PARALLEL. */
1812 void
1813 emit_group_move (rtx dst, rtx src)
1815 int i;
1817 gcc_assert (GET_CODE (src) == PARALLEL
1818 && GET_CODE (dst) == PARALLEL
1819 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1821 /* Skip first entry if NULL. */
1822 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1823 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1824 XEXP (XVECEXP (src, 0, i), 0));
1827 /* Move a group of registers represented by a PARALLEL into pseudos. */
1830 emit_group_move_into_temps (rtx src)
1832 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1833 int i;
1835 for (i = 0; i < XVECLEN (src, 0); i++)
1837 rtx e = XVECEXP (src, 0, i);
1838 rtx d = XEXP (e, 0);
1840 if (d)
1841 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1842 RTVEC_ELT (vec, i) = e;
1845 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1848 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1849 where SRC is non-consecutive registers represented by a PARALLEL.
1850 SSIZE represents the total size of block ORIG_DST, or -1 if not
1851 known. */
1853 void
1854 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1856 rtx *tmps, dst;
1857 int start, finish, i;
1858 machine_mode m = GET_MODE (orig_dst);
1860 gcc_assert (GET_CODE (src) == PARALLEL);
1862 if (!SCALAR_INT_MODE_P (m)
1863 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1865 machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1866 if (imode == BLKmode)
1867 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1868 else
1869 dst = gen_reg_rtx (imode);
1870 emit_group_store (dst, src, type, ssize);
1871 if (imode != BLKmode)
1872 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1873 emit_move_insn (orig_dst, dst);
1874 return;
1877 /* Check for a NULL entry, used to indicate that the parameter goes
1878 both on the stack and in registers. */
1879 if (XEXP (XVECEXP (src, 0, 0), 0))
1880 start = 0;
1881 else
1882 start = 1;
1883 finish = XVECLEN (src, 0);
1885 tmps = XALLOCAVEC (rtx, finish);
1887 /* Copy the (probable) hard regs into pseudos. */
1888 for (i = start; i < finish; i++)
1890 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1891 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1893 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1894 emit_move_insn (tmps[i], reg);
1896 else
1897 tmps[i] = reg;
1900 /* If we won't be storing directly into memory, protect the real destination
1901 from strange tricks we might play. */
1902 dst = orig_dst;
1903 if (GET_CODE (dst) == PARALLEL)
1905 rtx temp;
1907 /* We can get a PARALLEL dst if there is a conditional expression in
1908 a return statement. In that case, the dst and src are the same,
1909 so no action is necessary. */
1910 if (rtx_equal_p (dst, src))
1911 return;
1913 /* It is unclear if we can ever reach here, but we may as well handle
1914 it. Allocate a temporary, and split this into a store/load to/from
1915 the temporary. */
1916 temp = assign_stack_temp (GET_MODE (dst), ssize);
1917 emit_group_store (temp, src, type, ssize);
1918 emit_group_load (dst, temp, type, ssize);
1919 return;
1921 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1923 machine_mode outer = GET_MODE (dst);
1924 machine_mode inner;
1925 HOST_WIDE_INT bytepos;
1926 bool done = false;
1927 rtx temp;
1929 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1930 dst = gen_reg_rtx (outer);
1932 /* Make life a bit easier for combine. */
1933 /* If the first element of the vector is the low part
1934 of the destination mode, use a paradoxical subreg to
1935 initialize the destination. */
1936 if (start < finish)
1938 inner = GET_MODE (tmps[start]);
1939 bytepos = subreg_lowpart_offset (inner, outer);
1940 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1942 temp = simplify_gen_subreg (outer, tmps[start],
1943 inner, 0);
1944 if (temp)
1946 emit_move_insn (dst, temp);
1947 done = true;
1948 start++;
1953 /* If the first element wasn't the low part, try the last. */
1954 if (!done
1955 && start < finish - 1)
1957 inner = GET_MODE (tmps[finish - 1]);
1958 bytepos = subreg_lowpart_offset (inner, outer);
1959 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1961 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1962 inner, 0);
1963 if (temp)
1965 emit_move_insn (dst, temp);
1966 done = true;
1967 finish--;
1972 /* Otherwise, simply initialize the result to zero. */
1973 if (!done)
1974 emit_move_insn (dst, CONST0_RTX (outer));
1977 /* Process the pieces. */
1978 for (i = start; i < finish; i++)
1980 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1981 machine_mode mode = GET_MODE (tmps[i]);
1982 unsigned int bytelen = GET_MODE_SIZE (mode);
1983 unsigned int adj_bytelen;
1984 rtx dest = dst;
1986 /* Handle trailing fragments that run over the size of the struct. */
1987 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1988 adj_bytelen = ssize - bytepos;
1989 else
1990 adj_bytelen = bytelen;
1992 if (GET_CODE (dst) == CONCAT)
1994 if (bytepos + adj_bytelen
1995 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1996 dest = XEXP (dst, 0);
1997 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1999 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2000 dest = XEXP (dst, 1);
2002 else
2004 machine_mode dest_mode = GET_MODE (dest);
2005 machine_mode tmp_mode = GET_MODE (tmps[i]);
2007 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2009 if (GET_MODE_ALIGNMENT (dest_mode)
2010 >= GET_MODE_ALIGNMENT (tmp_mode))
2012 dest = assign_stack_temp (dest_mode,
2013 GET_MODE_SIZE (dest_mode));
2014 emit_move_insn (adjust_address (dest,
2015 tmp_mode,
2016 bytepos),
2017 tmps[i]);
2018 dst = dest;
2020 else
2022 dest = assign_stack_temp (tmp_mode,
2023 GET_MODE_SIZE (tmp_mode));
2024 emit_move_insn (dest, tmps[i]);
2025 dst = adjust_address (dest, dest_mode, bytepos);
2027 break;
2031 /* Handle trailing fragments that run over the size of the struct. */
2032 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2034 /* store_bit_field always takes its value from the lsb.
2035 Move the fragment to the lsb if it's not already there. */
2036 if (
2037 #ifdef BLOCK_REG_PADDING
2038 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2039 == (BYTES_BIG_ENDIAN ? upward : downward)
2040 #else
2041 BYTES_BIG_ENDIAN
2042 #endif
2045 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2046 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2047 shift, tmps[i], 0);
2050 /* Make sure not to write past the end of the struct. */
2051 store_bit_field (dest,
2052 adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2053 bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2054 VOIDmode, tmps[i]);
2057 /* Optimize the access just a bit. */
2058 else if (MEM_P (dest)
2059 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2060 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2061 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2062 && bytelen == GET_MODE_SIZE (mode))
2063 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2065 else
2066 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2067 0, 0, mode, tmps[i]);
2070 /* Copy from the pseudo into the (probable) hard reg. */
2071 if (orig_dst != dst)
2072 emit_move_insn (orig_dst, dst);
2075 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2076 of the value stored in X. */
2079 maybe_emit_group_store (rtx x, tree type)
2081 machine_mode mode = TYPE_MODE (type);
2082 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2083 if (GET_CODE (x) == PARALLEL)
2085 rtx result = gen_reg_rtx (mode);
2086 emit_group_store (result, x, type, int_size_in_bytes (type));
2087 return result;
2089 return x;
2092 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2094 This is used on targets that return BLKmode values in registers. */
2096 void
2097 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2099 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2100 rtx src = NULL, dst = NULL;
2101 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2102 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2103 machine_mode mode = GET_MODE (srcreg);
2104 machine_mode tmode = GET_MODE (target);
2105 machine_mode copy_mode;
2107 /* BLKmode registers created in the back-end shouldn't have survived. */
2108 gcc_assert (mode != BLKmode);
2110 /* If the structure doesn't take up a whole number of words, see whether
2111 SRCREG is padded on the left or on the right. If it's on the left,
2112 set PADDING_CORRECTION to the number of bits to skip.
2114 In most ABIs, the structure will be returned at the least end of
2115 the register, which translates to right padding on little-endian
2116 targets and left padding on big-endian targets. The opposite
2117 holds if the structure is returned at the most significant
2118 end of the register. */
2119 if (bytes % UNITS_PER_WORD != 0
2120 && (targetm.calls.return_in_msb (type)
2121 ? !BYTES_BIG_ENDIAN
2122 : BYTES_BIG_ENDIAN))
2123 padding_correction
2124 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2126 /* We can use a single move if we have an exact mode for the size. */
2127 else if (MEM_P (target)
2128 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2129 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2130 && bytes == GET_MODE_SIZE (mode))
2132 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2133 return;
2136 /* And if we additionally have the same mode for a register. */
2137 else if (REG_P (target)
2138 && GET_MODE (target) == mode
2139 && bytes == GET_MODE_SIZE (mode))
2141 emit_move_insn (target, srcreg);
2142 return;
2145 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2146 into a new pseudo which is a full word. */
2147 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2149 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2150 mode = word_mode;
2153 /* Copy the structure BITSIZE bits at a time. If the target lives in
2154 memory, take care of not reading/writing past its end by selecting
2155 a copy mode suited to BITSIZE. This should always be possible given
2156 how it is computed.
2158 If the target lives in register, make sure not to select a copy mode
2159 larger than the mode of the register.
2161 We could probably emit more efficient code for machines which do not use
2162 strict alignment, but it doesn't seem worth the effort at the current
2163 time. */
2165 copy_mode = word_mode;
2166 if (MEM_P (target))
2168 machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2169 if (mem_mode != BLKmode)
2170 copy_mode = mem_mode;
2172 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2173 copy_mode = tmode;
2175 for (bitpos = 0, xbitpos = padding_correction;
2176 bitpos < bytes * BITS_PER_UNIT;
2177 bitpos += bitsize, xbitpos += bitsize)
2179 /* We need a new source operand each time xbitpos is on a
2180 word boundary and when xbitpos == padding_correction
2181 (the first time through). */
2182 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2183 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2185 /* We need a new destination operand each time bitpos is on
2186 a word boundary. */
2187 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2188 dst = target;
2189 else if (bitpos % BITS_PER_WORD == 0)
2190 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2192 /* Use xbitpos for the source extraction (right justified) and
2193 bitpos for the destination store (left justified). */
2194 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2195 extract_bit_field (src, bitsize,
2196 xbitpos % BITS_PER_WORD, 1,
2197 NULL_RTX, copy_mode, copy_mode));
2201 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2202 register if it contains any data, otherwise return null.
2204 This is used on targets that return BLKmode values in registers. */
2207 copy_blkmode_to_reg (machine_mode mode, tree src)
2209 int i, n_regs;
2210 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2211 unsigned int bitsize;
2212 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2213 machine_mode dst_mode;
2215 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2217 x = expand_normal (src);
2219 bytes = int_size_in_bytes (TREE_TYPE (src));
2220 if (bytes == 0)
2221 return NULL_RTX;
2223 /* If the structure doesn't take up a whole number of words, see
2224 whether the register value should be padded on the left or on
2225 the right. Set PADDING_CORRECTION to the number of padding
2226 bits needed on the left side.
2228 In most ABIs, the structure will be returned at the least end of
2229 the register, which translates to right padding on little-endian
2230 targets and left padding on big-endian targets. The opposite
2231 holds if the structure is returned at the most significant
2232 end of the register. */
2233 if (bytes % UNITS_PER_WORD != 0
2234 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2235 ? !BYTES_BIG_ENDIAN
2236 : BYTES_BIG_ENDIAN))
2237 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2238 * BITS_PER_UNIT));
2240 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2241 dst_words = XALLOCAVEC (rtx, n_regs);
2242 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2244 /* Copy the structure BITSIZE bits at a time. */
2245 for (bitpos = 0, xbitpos = padding_correction;
2246 bitpos < bytes * BITS_PER_UNIT;
2247 bitpos += bitsize, xbitpos += bitsize)
2249 /* We need a new destination pseudo each time xbitpos is
2250 on a word boundary and when xbitpos == padding_correction
2251 (the first time through). */
2252 if (xbitpos % BITS_PER_WORD == 0
2253 || xbitpos == padding_correction)
2255 /* Generate an appropriate register. */
2256 dst_word = gen_reg_rtx (word_mode);
2257 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2259 /* Clear the destination before we move anything into it. */
2260 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2263 /* We need a new source operand each time bitpos is on a word
2264 boundary. */
2265 if (bitpos % BITS_PER_WORD == 0)
2266 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2268 /* Use bitpos for the source extraction (left justified) and
2269 xbitpos for the destination store (right justified). */
2270 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2271 0, 0, word_mode,
2272 extract_bit_field (src_word, bitsize,
2273 bitpos % BITS_PER_WORD, 1,
2274 NULL_RTX, word_mode, word_mode));
2277 if (mode == BLKmode)
2279 /* Find the smallest integer mode large enough to hold the
2280 entire structure. */
2281 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2282 mode != VOIDmode;
2283 mode = GET_MODE_WIDER_MODE (mode))
2284 /* Have we found a large enough mode? */
2285 if (GET_MODE_SIZE (mode) >= bytes)
2286 break;
2288 /* A suitable mode should have been found. */
2289 gcc_assert (mode != VOIDmode);
2292 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2293 dst_mode = word_mode;
2294 else
2295 dst_mode = mode;
2296 dst = gen_reg_rtx (dst_mode);
2298 for (i = 0; i < n_regs; i++)
2299 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2301 if (mode != dst_mode)
2302 dst = gen_lowpart (mode, dst);
2304 return dst;
2307 /* Add a USE expression for REG to the (possibly empty) list pointed
2308 to by CALL_FUSAGE. REG must denote a hard register. */
2310 void
2311 use_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2313 gcc_assert (REG_P (reg));
2315 if (!HARD_REGISTER_P (reg))
2316 return;
2318 *call_fusage
2319 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2322 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2323 to by CALL_FUSAGE. REG must denote a hard register. */
2325 void
2326 clobber_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2328 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2330 *call_fusage
2331 = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2334 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2335 starting at REGNO. All of these registers must be hard registers. */
2337 void
2338 use_regs (rtx *call_fusage, int regno, int nregs)
2340 int i;
2342 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2344 for (i = 0; i < nregs; i++)
2345 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2348 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2349 PARALLEL REGS. This is for calls that pass values in multiple
2350 non-contiguous locations. The Irix 6 ABI has examples of this. */
2352 void
2353 use_group_regs (rtx *call_fusage, rtx regs)
2355 int i;
2357 for (i = 0; i < XVECLEN (regs, 0); i++)
2359 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2361 /* A NULL entry means the parameter goes both on the stack and in
2362 registers. This can also be a MEM for targets that pass values
2363 partially on the stack and partially in registers. */
2364 if (reg != 0 && REG_P (reg))
2365 use_reg (call_fusage, reg);
2369 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2370 assigment and the code of the expresion on the RHS is CODE. Return
2371 NULL otherwise. */
2373 static gimple
2374 get_def_for_expr (tree name, enum tree_code code)
2376 gimple def_stmt;
2378 if (TREE_CODE (name) != SSA_NAME)
2379 return NULL;
2381 def_stmt = get_gimple_for_ssa_name (name);
2382 if (!def_stmt
2383 || gimple_assign_rhs_code (def_stmt) != code)
2384 return NULL;
2386 return def_stmt;
2389 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2390 assigment and the class of the expresion on the RHS is CLASS. Return
2391 NULL otherwise. */
2393 static gimple
2394 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2396 gimple def_stmt;
2398 if (TREE_CODE (name) != SSA_NAME)
2399 return NULL;
2401 def_stmt = get_gimple_for_ssa_name (name);
2402 if (!def_stmt
2403 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2404 return NULL;
2406 return def_stmt;
2410 /* Determine whether the LEN bytes generated by CONSTFUN can be
2411 stored to memory using several move instructions. CONSTFUNDATA is
2412 a pointer which will be passed as argument in every CONSTFUN call.
2413 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2414 a memset operation and false if it's a copy of a constant string.
2415 Return nonzero if a call to store_by_pieces should succeed. */
2418 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2419 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2420 void *constfundata, unsigned int align, bool memsetp)
2422 unsigned HOST_WIDE_INT l;
2423 unsigned int max_size;
2424 HOST_WIDE_INT offset = 0;
2425 machine_mode mode;
2426 enum insn_code icode;
2427 int reverse;
2428 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2429 rtx cst ATTRIBUTE_UNUSED;
2431 if (len == 0)
2432 return 1;
2434 if (!targetm.use_by_pieces_infrastructure_p (len, align,
2435 memsetp
2436 ? SET_BY_PIECES
2437 : STORE_BY_PIECES,
2438 optimize_insn_for_speed_p ()))
2439 return 0;
2441 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2443 /* We would first store what we can in the largest integer mode, then go to
2444 successively smaller modes. */
2446 for (reverse = 0;
2447 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2448 reverse++)
2450 l = len;
2451 max_size = STORE_MAX_PIECES + 1;
2452 while (max_size > 1 && l > 0)
2454 mode = widest_int_mode_for_size (max_size);
2456 if (mode == VOIDmode)
2457 break;
2459 icode = optab_handler (mov_optab, mode);
2460 if (icode != CODE_FOR_nothing
2461 && align >= GET_MODE_ALIGNMENT (mode))
2463 unsigned int size = GET_MODE_SIZE (mode);
2465 while (l >= size)
2467 if (reverse)
2468 offset -= size;
2470 cst = (*constfun) (constfundata, offset, mode);
2471 if (!targetm.legitimate_constant_p (mode, cst))
2472 return 0;
2474 if (!reverse)
2475 offset += size;
2477 l -= size;
2481 max_size = GET_MODE_SIZE (mode);
2484 /* The code above should have handled everything. */
2485 gcc_assert (!l);
2488 return 1;
2491 /* Generate several move instructions to store LEN bytes generated by
2492 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2493 pointer which will be passed as argument in every CONSTFUN call.
2494 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2495 a memset operation and false if it's a copy of a constant string.
2496 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2497 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2498 stpcpy. */
2501 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2502 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2503 void *constfundata, unsigned int align, bool memsetp, int endp)
2505 machine_mode to_addr_mode = get_address_mode (to);
2506 struct store_by_pieces_d data;
2508 if (len == 0)
2510 gcc_assert (endp != 2);
2511 return to;
2514 gcc_assert (targetm.use_by_pieces_infrastructure_p
2515 (len, align,
2516 memsetp
2517 ? SET_BY_PIECES
2518 : STORE_BY_PIECES,
2519 optimize_insn_for_speed_p ()));
2521 data.constfun = constfun;
2522 data.constfundata = constfundata;
2523 data.len = len;
2524 data.to = to;
2525 store_by_pieces_1 (&data, align);
2526 if (endp)
2528 rtx to1;
2530 gcc_assert (!data.reverse);
2531 if (data.autinc_to)
2533 if (endp == 2)
2535 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2536 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2537 else
2538 data.to_addr = copy_to_mode_reg (to_addr_mode,
2539 plus_constant (to_addr_mode,
2540 data.to_addr,
2541 -1));
2543 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2544 data.offset);
2546 else
2548 if (endp == 2)
2549 --data.offset;
2550 to1 = adjust_address (data.to, QImode, data.offset);
2552 return to1;
2554 else
2555 return data.to;
2558 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2559 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2561 static void
2562 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2564 struct store_by_pieces_d data;
2566 if (len == 0)
2567 return;
2569 data.constfun = clear_by_pieces_1;
2570 data.constfundata = NULL;
2571 data.len = len;
2572 data.to = to;
2573 store_by_pieces_1 (&data, align);
2576 /* Callback routine for clear_by_pieces.
2577 Return const0_rtx unconditionally. */
2579 static rtx
2580 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2581 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2582 machine_mode mode ATTRIBUTE_UNUSED)
2584 return const0_rtx;
2587 /* Subroutine of clear_by_pieces and store_by_pieces.
2588 Generate several move instructions to store LEN bytes of block TO. (A MEM
2589 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2591 static void
2592 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2593 unsigned int align ATTRIBUTE_UNUSED)
2595 machine_mode to_addr_mode = get_address_mode (data->to);
2596 rtx to_addr = XEXP (data->to, 0);
2597 unsigned int max_size = STORE_MAX_PIECES + 1;
2598 enum insn_code icode;
2600 data->offset = 0;
2601 data->to_addr = to_addr;
2602 data->autinc_to
2603 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2604 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2606 data->explicit_inc_to = 0;
2607 data->reverse
2608 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2609 if (data->reverse)
2610 data->offset = data->len;
2612 /* If storing requires more than two move insns,
2613 copy addresses to registers (to make displacements shorter)
2614 and use post-increment if available. */
2615 if (!data->autinc_to
2616 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2618 /* Determine the main mode we'll be using.
2619 MODE might not be used depending on the definitions of the
2620 USE_* macros below. */
2621 machine_mode mode ATTRIBUTE_UNUSED
2622 = widest_int_mode_for_size (max_size);
2624 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2626 data->to_addr = copy_to_mode_reg (to_addr_mode,
2627 plus_constant (to_addr_mode,
2628 to_addr,
2629 data->len));
2630 data->autinc_to = 1;
2631 data->explicit_inc_to = -1;
2634 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2635 && ! data->autinc_to)
2637 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2638 data->autinc_to = 1;
2639 data->explicit_inc_to = 1;
2642 if ( !data->autinc_to && CONSTANT_P (to_addr))
2643 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2646 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2648 /* First store what we can in the largest integer mode, then go to
2649 successively smaller modes. */
2651 while (max_size > 1 && data->len > 0)
2653 machine_mode mode = widest_int_mode_for_size (max_size);
2655 if (mode == VOIDmode)
2656 break;
2658 icode = optab_handler (mov_optab, mode);
2659 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2660 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2662 max_size = GET_MODE_SIZE (mode);
2665 /* The code above should have handled everything. */
2666 gcc_assert (!data->len);
2669 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2670 with move instructions for mode MODE. GENFUN is the gen_... function
2671 to make a move insn for that mode. DATA has all the other info. */
2673 static void
2674 store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
2675 struct store_by_pieces_d *data)
2677 unsigned int size = GET_MODE_SIZE (mode);
2678 rtx to1, cst;
2680 while (data->len >= size)
2682 if (data->reverse)
2683 data->offset -= size;
2685 if (data->autinc_to)
2686 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2687 data->offset);
2688 else
2689 to1 = adjust_address (data->to, mode, data->offset);
2691 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2692 emit_insn (gen_add2_insn (data->to_addr,
2693 gen_int_mode (-(HOST_WIDE_INT) size,
2694 GET_MODE (data->to_addr))));
2696 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2697 emit_insn ((*genfun) (to1, cst));
2699 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2700 emit_insn (gen_add2_insn (data->to_addr,
2701 gen_int_mode (size,
2702 GET_MODE (data->to_addr))));
2704 if (! data->reverse)
2705 data->offset += size;
2707 data->len -= size;
2711 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2712 its length in bytes. */
2715 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2716 unsigned int expected_align, HOST_WIDE_INT expected_size,
2717 unsigned HOST_WIDE_INT min_size,
2718 unsigned HOST_WIDE_INT max_size,
2719 unsigned HOST_WIDE_INT probable_max_size)
2721 machine_mode mode = GET_MODE (object);
2722 unsigned int align;
2724 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2726 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2727 just move a zero. Otherwise, do this a piece at a time. */
2728 if (mode != BLKmode
2729 && CONST_INT_P (size)
2730 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2732 rtx zero = CONST0_RTX (mode);
2733 if (zero != NULL)
2735 emit_move_insn (object, zero);
2736 return NULL;
2739 if (COMPLEX_MODE_P (mode))
2741 zero = CONST0_RTX (GET_MODE_INNER (mode));
2742 if (zero != NULL)
2744 write_complex_part (object, zero, 0);
2745 write_complex_part (object, zero, 1);
2746 return NULL;
2751 if (size == const0_rtx)
2752 return NULL;
2754 align = MEM_ALIGN (object);
2756 if (CONST_INT_P (size)
2757 && targetm.use_by_pieces_infrastructure_p (INTVAL (size), align,
2758 CLEAR_BY_PIECES,
2759 optimize_insn_for_speed_p ()))
2760 clear_by_pieces (object, INTVAL (size), align);
2761 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2762 expected_align, expected_size,
2763 min_size, max_size, probable_max_size))
2765 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2766 return set_storage_via_libcall (object, size, const0_rtx,
2767 method == BLOCK_OP_TAILCALL);
2768 else
2769 gcc_unreachable ();
2771 return NULL;
2775 clear_storage (rtx object, rtx size, enum block_op_methods method)
2777 unsigned HOST_WIDE_INT max, min = 0;
2778 if (GET_CODE (size) == CONST_INT)
2779 min = max = UINTVAL (size);
2780 else
2781 max = GET_MODE_MASK (GET_MODE (size));
2782 return clear_storage_hints (object, size, method, 0, -1, min, max, max);
2786 /* A subroutine of clear_storage. Expand a call to memset.
2787 Return the return value of memset, 0 otherwise. */
2790 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2792 tree call_expr, fn, object_tree, size_tree, val_tree;
2793 machine_mode size_mode;
2794 rtx retval;
2796 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2797 place those into new pseudos into a VAR_DECL and use them later. */
2799 object = copy_addr_to_reg (XEXP (object, 0));
2801 size_mode = TYPE_MODE (sizetype);
2802 size = convert_to_mode (size_mode, size, 1);
2803 size = copy_to_mode_reg (size_mode, size);
2805 /* It is incorrect to use the libcall calling conventions to call
2806 memset in this context. This could be a user call to memset and
2807 the user may wish to examine the return value from memset. For
2808 targets where libcalls and normal calls have different conventions
2809 for returning pointers, we could end up generating incorrect code. */
2811 object_tree = make_tree (ptr_type_node, object);
2812 if (!CONST_INT_P (val))
2813 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2814 size_tree = make_tree (sizetype, size);
2815 val_tree = make_tree (integer_type_node, val);
2817 fn = clear_storage_libcall_fn (true);
2818 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2819 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2821 retval = expand_normal (call_expr);
2823 return retval;
2826 /* A subroutine of set_storage_via_libcall. Create the tree node
2827 for the function we use for block clears. */
2829 tree block_clear_fn;
2831 void
2832 init_block_clear_fn (const char *asmspec)
2834 if (!block_clear_fn)
2836 tree fn, args;
2838 fn = get_identifier ("memset");
2839 args = build_function_type_list (ptr_type_node, ptr_type_node,
2840 integer_type_node, sizetype,
2841 NULL_TREE);
2843 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2844 DECL_EXTERNAL (fn) = 1;
2845 TREE_PUBLIC (fn) = 1;
2846 DECL_ARTIFICIAL (fn) = 1;
2847 TREE_NOTHROW (fn) = 1;
2848 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2849 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2851 block_clear_fn = fn;
2854 if (asmspec)
2855 set_user_assembler_name (block_clear_fn, asmspec);
2858 static tree
2859 clear_storage_libcall_fn (int for_call)
2861 static bool emitted_extern;
2863 if (!block_clear_fn)
2864 init_block_clear_fn (NULL);
2866 if (for_call && !emitted_extern)
2868 emitted_extern = true;
2869 make_decl_rtl (block_clear_fn);
2872 return block_clear_fn;
2875 /* Expand a setmem pattern; return true if successful. */
2877 bool
2878 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2879 unsigned int expected_align, HOST_WIDE_INT expected_size,
2880 unsigned HOST_WIDE_INT min_size,
2881 unsigned HOST_WIDE_INT max_size,
2882 unsigned HOST_WIDE_INT probable_max_size)
2884 /* Try the most limited insn first, because there's no point
2885 including more than one in the machine description unless
2886 the more limited one has some advantage. */
2888 machine_mode mode;
2890 if (expected_align < align)
2891 expected_align = align;
2892 if (expected_size != -1)
2894 if ((unsigned HOST_WIDE_INT)expected_size > max_size)
2895 expected_size = max_size;
2896 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
2897 expected_size = min_size;
2900 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2901 mode = GET_MODE_WIDER_MODE (mode))
2903 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2905 if (code != CODE_FOR_nothing
2906 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2907 here because if SIZE is less than the mode mask, as it is
2908 returned by the macro, it will definitely be less than the
2909 actual mode mask. Since SIZE is within the Pmode address
2910 space, we limit MODE to Pmode. */
2911 && ((CONST_INT_P (size)
2912 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2913 <= (GET_MODE_MASK (mode) >> 1)))
2914 || max_size <= (GET_MODE_MASK (mode) >> 1)
2915 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
2917 struct expand_operand ops[9];
2918 unsigned int nops;
2920 nops = insn_data[(int) code].n_generator_args;
2921 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
2923 create_fixed_operand (&ops[0], object);
2924 /* The check above guarantees that this size conversion is valid. */
2925 create_convert_operand_to (&ops[1], size, mode, true);
2926 create_convert_operand_from (&ops[2], val, byte_mode, true);
2927 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2928 if (nops >= 6)
2930 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2931 create_integer_operand (&ops[5], expected_size);
2933 if (nops >= 8)
2935 create_integer_operand (&ops[6], min_size);
2936 /* If we can not represent the maximal size,
2937 make parameter NULL. */
2938 if ((HOST_WIDE_INT) max_size != -1)
2939 create_integer_operand (&ops[7], max_size);
2940 else
2941 create_fixed_operand (&ops[7], NULL);
2943 if (nops == 9)
2945 /* If we can not represent the maximal size,
2946 make parameter NULL. */
2947 if ((HOST_WIDE_INT) probable_max_size != -1)
2948 create_integer_operand (&ops[8], probable_max_size);
2949 else
2950 create_fixed_operand (&ops[8], NULL);
2952 if (maybe_expand_insn (code, nops, ops))
2953 return true;
2957 return false;
2961 /* Write to one of the components of the complex value CPLX. Write VAL to
2962 the real part if IMAG_P is false, and the imaginary part if its true. */
2964 void
2965 write_complex_part (rtx cplx, rtx val, bool imag_p)
2967 machine_mode cmode;
2968 machine_mode imode;
2969 unsigned ibitsize;
2971 if (GET_CODE (cplx) == CONCAT)
2973 emit_move_insn (XEXP (cplx, imag_p), val);
2974 return;
2977 cmode = GET_MODE (cplx);
2978 imode = GET_MODE_INNER (cmode);
2979 ibitsize = GET_MODE_BITSIZE (imode);
2981 /* For MEMs simplify_gen_subreg may generate an invalid new address
2982 because, e.g., the original address is considered mode-dependent
2983 by the target, which restricts simplify_subreg from invoking
2984 adjust_address_nv. Instead of preparing fallback support for an
2985 invalid address, we call adjust_address_nv directly. */
2986 if (MEM_P (cplx))
2988 emit_move_insn (adjust_address_nv (cplx, imode,
2989 imag_p ? GET_MODE_SIZE (imode) : 0),
2990 val);
2991 return;
2994 /* If the sub-object is at least word sized, then we know that subregging
2995 will work. This special case is important, since store_bit_field
2996 wants to operate on integer modes, and there's rarely an OImode to
2997 correspond to TCmode. */
2998 if (ibitsize >= BITS_PER_WORD
2999 /* For hard regs we have exact predicates. Assume we can split
3000 the original object if it spans an even number of hard regs.
3001 This special case is important for SCmode on 64-bit platforms
3002 where the natural size of floating-point regs is 32-bit. */
3003 || (REG_P (cplx)
3004 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3005 && REG_NREGS (cplx) % 2 == 0))
3007 rtx part = simplify_gen_subreg (imode, cplx, cmode,
3008 imag_p ? GET_MODE_SIZE (imode) : 0);
3009 if (part)
3011 emit_move_insn (part, val);
3012 return;
3014 else
3015 /* simplify_gen_subreg may fail for sub-word MEMs. */
3016 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3019 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
3022 /* Extract one of the components of the complex value CPLX. Extract the
3023 real part if IMAG_P is false, and the imaginary part if it's true. */
3025 static rtx
3026 read_complex_part (rtx cplx, bool imag_p)
3028 machine_mode cmode, imode;
3029 unsigned ibitsize;
3031 if (GET_CODE (cplx) == CONCAT)
3032 return XEXP (cplx, imag_p);
3034 cmode = GET_MODE (cplx);
3035 imode = GET_MODE_INNER (cmode);
3036 ibitsize = GET_MODE_BITSIZE (imode);
3038 /* Special case reads from complex constants that got spilled to memory. */
3039 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3041 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3042 if (decl && TREE_CODE (decl) == COMPLEX_CST)
3044 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3045 if (CONSTANT_CLASS_P (part))
3046 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3050 /* For MEMs simplify_gen_subreg may generate an invalid new address
3051 because, e.g., the original address is considered mode-dependent
3052 by the target, which restricts simplify_subreg from invoking
3053 adjust_address_nv. Instead of preparing fallback support for an
3054 invalid address, we call adjust_address_nv directly. */
3055 if (MEM_P (cplx))
3056 return adjust_address_nv (cplx, imode,
3057 imag_p ? GET_MODE_SIZE (imode) : 0);
3059 /* If the sub-object is at least word sized, then we know that subregging
3060 will work. This special case is important, since extract_bit_field
3061 wants to operate on integer modes, and there's rarely an OImode to
3062 correspond to TCmode. */
3063 if (ibitsize >= BITS_PER_WORD
3064 /* For hard regs we have exact predicates. Assume we can split
3065 the original object if it spans an even number of hard regs.
3066 This special case is important for SCmode on 64-bit platforms
3067 where the natural size of floating-point regs is 32-bit. */
3068 || (REG_P (cplx)
3069 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3070 && REG_NREGS (cplx) % 2 == 0))
3072 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3073 imag_p ? GET_MODE_SIZE (imode) : 0);
3074 if (ret)
3075 return ret;
3076 else
3077 /* simplify_gen_subreg may fail for sub-word MEMs. */
3078 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3081 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3082 true, NULL_RTX, imode, imode);
3085 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3086 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3087 represented in NEW_MODE. If FORCE is true, this will never happen, as
3088 we'll force-create a SUBREG if needed. */
3090 static rtx
3091 emit_move_change_mode (machine_mode new_mode,
3092 machine_mode old_mode, rtx x, bool force)
3094 rtx ret;
3096 if (push_operand (x, GET_MODE (x)))
3098 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3099 MEM_COPY_ATTRIBUTES (ret, x);
3101 else if (MEM_P (x))
3103 /* We don't have to worry about changing the address since the
3104 size in bytes is supposed to be the same. */
3105 if (reload_in_progress)
3107 /* Copy the MEM to change the mode and move any
3108 substitutions from the old MEM to the new one. */
3109 ret = adjust_address_nv (x, new_mode, 0);
3110 copy_replacements (x, ret);
3112 else
3113 ret = adjust_address (x, new_mode, 0);
3115 else
3117 /* Note that we do want simplify_subreg's behavior of validating
3118 that the new mode is ok for a hard register. If we were to use
3119 simplify_gen_subreg, we would create the subreg, but would
3120 probably run into the target not being able to implement it. */
3121 /* Except, of course, when FORCE is true, when this is exactly what
3122 we want. Which is needed for CCmodes on some targets. */
3123 if (force)
3124 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3125 else
3126 ret = simplify_subreg (new_mode, x, old_mode, 0);
3129 return ret;
3132 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3133 an integer mode of the same size as MODE. Returns the instruction
3134 emitted, or NULL if such a move could not be generated. */
3136 static rtx_insn *
3137 emit_move_via_integer (machine_mode mode, rtx x, rtx y, bool force)
3139 machine_mode imode;
3140 enum insn_code code;
3142 /* There must exist a mode of the exact size we require. */
3143 imode = int_mode_for_mode (mode);
3144 if (imode == BLKmode)
3145 return NULL;
3147 /* The target must support moves in this mode. */
3148 code = optab_handler (mov_optab, imode);
3149 if (code == CODE_FOR_nothing)
3150 return NULL;
3152 x = emit_move_change_mode (imode, mode, x, force);
3153 if (x == NULL_RTX)
3154 return NULL;
3155 y = emit_move_change_mode (imode, mode, y, force);
3156 if (y == NULL_RTX)
3157 return NULL;
3158 return emit_insn (GEN_FCN (code) (x, y));
3161 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3162 Return an equivalent MEM that does not use an auto-increment. */
3165 emit_move_resolve_push (machine_mode mode, rtx x)
3167 enum rtx_code code = GET_CODE (XEXP (x, 0));
3168 HOST_WIDE_INT adjust;
3169 rtx temp;
3171 adjust = GET_MODE_SIZE (mode);
3172 #ifdef PUSH_ROUNDING
3173 adjust = PUSH_ROUNDING (adjust);
3174 #endif
3175 if (code == PRE_DEC || code == POST_DEC)
3176 adjust = -adjust;
3177 else if (code == PRE_MODIFY || code == POST_MODIFY)
3179 rtx expr = XEXP (XEXP (x, 0), 1);
3180 HOST_WIDE_INT val;
3182 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3183 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3184 val = INTVAL (XEXP (expr, 1));
3185 if (GET_CODE (expr) == MINUS)
3186 val = -val;
3187 gcc_assert (adjust == val || adjust == -val);
3188 adjust = val;
3191 /* Do not use anti_adjust_stack, since we don't want to update
3192 stack_pointer_delta. */
3193 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3194 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3195 0, OPTAB_LIB_WIDEN);
3196 if (temp != stack_pointer_rtx)
3197 emit_move_insn (stack_pointer_rtx, temp);
3199 switch (code)
3201 case PRE_INC:
3202 case PRE_DEC:
3203 case PRE_MODIFY:
3204 temp = stack_pointer_rtx;
3205 break;
3206 case POST_INC:
3207 case POST_DEC:
3208 case POST_MODIFY:
3209 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3210 break;
3211 default:
3212 gcc_unreachable ();
3215 return replace_equiv_address (x, temp);
3218 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3219 X is known to satisfy push_operand, and MODE is known to be complex.
3220 Returns the last instruction emitted. */
3222 rtx_insn *
3223 emit_move_complex_push (machine_mode mode, rtx x, rtx y)
3225 machine_mode submode = GET_MODE_INNER (mode);
3226 bool imag_first;
3228 #ifdef PUSH_ROUNDING
3229 unsigned int submodesize = GET_MODE_SIZE (submode);
3231 /* In case we output to the stack, but the size is smaller than the
3232 machine can push exactly, we need to use move instructions. */
3233 if (PUSH_ROUNDING (submodesize) != submodesize)
3235 x = emit_move_resolve_push (mode, x);
3236 return emit_move_insn (x, y);
3238 #endif
3240 /* Note that the real part always precedes the imag part in memory
3241 regardless of machine's endianness. */
3242 switch (GET_CODE (XEXP (x, 0)))
3244 case PRE_DEC:
3245 case POST_DEC:
3246 imag_first = true;
3247 break;
3248 case PRE_INC:
3249 case POST_INC:
3250 imag_first = false;
3251 break;
3252 default:
3253 gcc_unreachable ();
3256 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3257 read_complex_part (y, imag_first));
3258 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3259 read_complex_part (y, !imag_first));
3262 /* A subroutine of emit_move_complex. Perform the move from Y to X
3263 via two moves of the parts. Returns the last instruction emitted. */
3265 rtx_insn *
3266 emit_move_complex_parts (rtx x, rtx y)
3268 /* Show the output dies here. This is necessary for SUBREGs
3269 of pseudos since we cannot track their lifetimes correctly;
3270 hard regs shouldn't appear here except as return values. */
3271 if (!reload_completed && !reload_in_progress
3272 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3273 emit_clobber (x);
3275 write_complex_part (x, read_complex_part (y, false), false);
3276 write_complex_part (x, read_complex_part (y, true), true);
3278 return get_last_insn ();
3281 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3282 MODE is known to be complex. Returns the last instruction emitted. */
3284 static rtx_insn *
3285 emit_move_complex (machine_mode mode, rtx x, rtx y)
3287 bool try_int;
3289 /* Need to take special care for pushes, to maintain proper ordering
3290 of the data, and possibly extra padding. */
3291 if (push_operand (x, mode))
3292 return emit_move_complex_push (mode, x, y);
3294 /* See if we can coerce the target into moving both values at once, except
3295 for floating point where we favor moving as parts if this is easy. */
3296 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3297 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3298 && !(REG_P (x)
3299 && HARD_REGISTER_P (x)
3300 && REG_NREGS (x) == 1)
3301 && !(REG_P (y)
3302 && HARD_REGISTER_P (y)
3303 && REG_NREGS (y) == 1))
3304 try_int = false;
3305 /* Not possible if the values are inherently not adjacent. */
3306 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3307 try_int = false;
3308 /* Is possible if both are registers (or subregs of registers). */
3309 else if (register_operand (x, mode) && register_operand (y, mode))
3310 try_int = true;
3311 /* If one of the operands is a memory, and alignment constraints
3312 are friendly enough, we may be able to do combined memory operations.
3313 We do not attempt this if Y is a constant because that combination is
3314 usually better with the by-parts thing below. */
3315 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3316 && (!STRICT_ALIGNMENT
3317 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3318 try_int = true;
3319 else
3320 try_int = false;
3322 if (try_int)
3324 rtx_insn *ret;
3326 /* For memory to memory moves, optimal behavior can be had with the
3327 existing block move logic. */
3328 if (MEM_P (x) && MEM_P (y))
3330 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3331 BLOCK_OP_NO_LIBCALL);
3332 return get_last_insn ();
3335 ret = emit_move_via_integer (mode, x, y, true);
3336 if (ret)
3337 return ret;
3340 return emit_move_complex_parts (x, y);
3343 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3344 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3346 static rtx_insn *
3347 emit_move_ccmode (machine_mode mode, rtx x, rtx y)
3349 rtx_insn *ret;
3351 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3352 if (mode != CCmode)
3354 enum insn_code code = optab_handler (mov_optab, CCmode);
3355 if (code != CODE_FOR_nothing)
3357 x = emit_move_change_mode (CCmode, mode, x, true);
3358 y = emit_move_change_mode (CCmode, mode, y, true);
3359 return emit_insn (GEN_FCN (code) (x, y));
3363 /* Otherwise, find the MODE_INT mode of the same width. */
3364 ret = emit_move_via_integer (mode, x, y, false);
3365 gcc_assert (ret != NULL);
3366 return ret;
3369 /* Return true if word I of OP lies entirely in the
3370 undefined bits of a paradoxical subreg. */
3372 static bool
3373 undefined_operand_subword_p (const_rtx op, int i)
3375 machine_mode innermode, innermostmode;
3376 int offset;
3377 if (GET_CODE (op) != SUBREG)
3378 return false;
3379 innermode = GET_MODE (op);
3380 innermostmode = GET_MODE (SUBREG_REG (op));
3381 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3382 /* The SUBREG_BYTE represents offset, as if the value were stored in
3383 memory, except for a paradoxical subreg where we define
3384 SUBREG_BYTE to be 0; undo this exception as in
3385 simplify_subreg. */
3386 if (SUBREG_BYTE (op) == 0
3387 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3389 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3390 if (WORDS_BIG_ENDIAN)
3391 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3392 if (BYTES_BIG_ENDIAN)
3393 offset += difference % UNITS_PER_WORD;
3395 if (offset >= GET_MODE_SIZE (innermostmode)
3396 || offset <= -GET_MODE_SIZE (word_mode))
3397 return true;
3398 return false;
3401 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3402 MODE is any multi-word or full-word mode that lacks a move_insn
3403 pattern. Note that you will get better code if you define such
3404 patterns, even if they must turn into multiple assembler instructions. */
3406 static rtx_insn *
3407 emit_move_multi_word (machine_mode mode, rtx x, rtx y)
3409 rtx_insn *last_insn = 0;
3410 rtx_insn *seq;
3411 rtx inner;
3412 bool need_clobber;
3413 int i;
3415 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3417 /* If X is a push on the stack, do the push now and replace
3418 X with a reference to the stack pointer. */
3419 if (push_operand (x, mode))
3420 x = emit_move_resolve_push (mode, x);
3422 /* If we are in reload, see if either operand is a MEM whose address
3423 is scheduled for replacement. */
3424 if (reload_in_progress && MEM_P (x)
3425 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3426 x = replace_equiv_address_nv (x, inner);
3427 if (reload_in_progress && MEM_P (y)
3428 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3429 y = replace_equiv_address_nv (y, inner);
3431 start_sequence ();
3433 need_clobber = false;
3434 for (i = 0;
3435 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3436 i++)
3438 rtx xpart = operand_subword (x, i, 1, mode);
3439 rtx ypart;
3441 /* Do not generate code for a move if it would come entirely
3442 from the undefined bits of a paradoxical subreg. */
3443 if (undefined_operand_subword_p (y, i))
3444 continue;
3446 ypart = operand_subword (y, i, 1, mode);
3448 /* If we can't get a part of Y, put Y into memory if it is a
3449 constant. Otherwise, force it into a register. Then we must
3450 be able to get a part of Y. */
3451 if (ypart == 0 && CONSTANT_P (y))
3453 y = use_anchored_address (force_const_mem (mode, y));
3454 ypart = operand_subword (y, i, 1, mode);
3456 else if (ypart == 0)
3457 ypart = operand_subword_force (y, i, mode);
3459 gcc_assert (xpart && ypart);
3461 need_clobber |= (GET_CODE (xpart) == SUBREG);
3463 last_insn = emit_move_insn (xpart, ypart);
3466 seq = get_insns ();
3467 end_sequence ();
3469 /* Show the output dies here. This is necessary for SUBREGs
3470 of pseudos since we cannot track their lifetimes correctly;
3471 hard regs shouldn't appear here except as return values.
3472 We never want to emit such a clobber after reload. */
3473 if (x != y
3474 && ! (reload_in_progress || reload_completed)
3475 && need_clobber != 0)
3476 emit_clobber (x);
3478 emit_insn (seq);
3480 return last_insn;
3483 /* Low level part of emit_move_insn.
3484 Called just like emit_move_insn, but assumes X and Y
3485 are basically valid. */
3487 rtx_insn *
3488 emit_move_insn_1 (rtx x, rtx y)
3490 machine_mode mode = GET_MODE (x);
3491 enum insn_code code;
3493 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3495 code = optab_handler (mov_optab, mode);
3496 if (code != CODE_FOR_nothing)
3497 return emit_insn (GEN_FCN (code) (x, y));
3499 /* Expand complex moves by moving real part and imag part. */
3500 if (COMPLEX_MODE_P (mode))
3501 return emit_move_complex (mode, x, y);
3503 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3504 || ALL_FIXED_POINT_MODE_P (mode))
3506 rtx_insn *result = emit_move_via_integer (mode, x, y, true);
3508 /* If we can't find an integer mode, use multi words. */
3509 if (result)
3510 return result;
3511 else
3512 return emit_move_multi_word (mode, x, y);
3515 if (GET_MODE_CLASS (mode) == MODE_CC)
3516 return emit_move_ccmode (mode, x, y);
3518 /* Try using a move pattern for the corresponding integer mode. This is
3519 only safe when simplify_subreg can convert MODE constants into integer
3520 constants. At present, it can only do this reliably if the value
3521 fits within a HOST_WIDE_INT. */
3522 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3524 rtx_insn *ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3526 if (ret)
3528 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3529 return ret;
3533 return emit_move_multi_word (mode, x, y);
3536 /* Generate code to copy Y into X.
3537 Both Y and X must have the same mode, except that
3538 Y can be a constant with VOIDmode.
3539 This mode cannot be BLKmode; use emit_block_move for that.
3541 Return the last instruction emitted. */
3543 rtx_insn *
3544 emit_move_insn (rtx x, rtx y)
3546 machine_mode mode = GET_MODE (x);
3547 rtx y_cst = NULL_RTX;
3548 rtx_insn *last_insn;
3549 rtx set;
3551 gcc_assert (mode != BLKmode
3552 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3554 if (CONSTANT_P (y))
3556 if (optimize
3557 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3558 && (last_insn = compress_float_constant (x, y)))
3559 return last_insn;
3561 y_cst = y;
3563 if (!targetm.legitimate_constant_p (mode, y))
3565 y = force_const_mem (mode, y);
3567 /* If the target's cannot_force_const_mem prevented the spill,
3568 assume that the target's move expanders will also take care
3569 of the non-legitimate constant. */
3570 if (!y)
3571 y = y_cst;
3572 else
3573 y = use_anchored_address (y);
3577 /* If X or Y are memory references, verify that their addresses are valid
3578 for the machine. */
3579 if (MEM_P (x)
3580 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3581 MEM_ADDR_SPACE (x))
3582 && ! push_operand (x, GET_MODE (x))))
3583 x = validize_mem (x);
3585 if (MEM_P (y)
3586 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3587 MEM_ADDR_SPACE (y)))
3588 y = validize_mem (y);
3590 gcc_assert (mode != BLKmode);
3592 last_insn = emit_move_insn_1 (x, y);
3594 if (y_cst && REG_P (x)
3595 && (set = single_set (last_insn)) != NULL_RTX
3596 && SET_DEST (set) == x
3597 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3598 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3600 return last_insn;
3603 /* Generate the body of an instruction to copy Y into X.
3604 It may be a list of insns, if one insn isn't enough. */
3606 rtx_insn *
3607 gen_move_insn (rtx x, rtx y)
3609 rtx_insn *seq;
3611 start_sequence ();
3612 emit_move_insn_1 (x, y);
3613 seq = get_insns ();
3614 end_sequence ();
3615 return seq;
3618 /* If Y is representable exactly in a narrower mode, and the target can
3619 perform the extension directly from constant or memory, then emit the
3620 move as an extension. */
3622 static rtx_insn *
3623 compress_float_constant (rtx x, rtx y)
3625 machine_mode dstmode = GET_MODE (x);
3626 machine_mode orig_srcmode = GET_MODE (y);
3627 machine_mode srcmode;
3628 REAL_VALUE_TYPE r;
3629 int oldcost, newcost;
3630 bool speed = optimize_insn_for_speed_p ();
3632 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3634 if (targetm.legitimate_constant_p (dstmode, y))
3635 oldcost = set_src_cost (y, orig_srcmode, speed);
3636 else
3637 oldcost = set_src_cost (force_const_mem (dstmode, y), dstmode, speed);
3639 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3640 srcmode != orig_srcmode;
3641 srcmode = GET_MODE_WIDER_MODE (srcmode))
3643 enum insn_code ic;
3644 rtx trunc_y;
3645 rtx_insn *last_insn;
3647 /* Skip if the target can't extend this way. */
3648 ic = can_extend_p (dstmode, srcmode, 0);
3649 if (ic == CODE_FOR_nothing)
3650 continue;
3652 /* Skip if the narrowed value isn't exact. */
3653 if (! exact_real_truncate (srcmode, &r))
3654 continue;
3656 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3658 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3660 /* Skip if the target needs extra instructions to perform
3661 the extension. */
3662 if (!insn_operand_matches (ic, 1, trunc_y))
3663 continue;
3664 /* This is valid, but may not be cheaper than the original. */
3665 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3666 dstmode, speed);
3667 if (oldcost < newcost)
3668 continue;
3670 else if (float_extend_from_mem[dstmode][srcmode])
3672 trunc_y = force_const_mem (srcmode, trunc_y);
3673 /* This is valid, but may not be cheaper than the original. */
3674 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3675 dstmode, speed);
3676 if (oldcost < newcost)
3677 continue;
3678 trunc_y = validize_mem (trunc_y);
3680 else
3681 continue;
3683 /* For CSE's benefit, force the compressed constant pool entry
3684 into a new pseudo. This constant may be used in different modes,
3685 and if not, combine will put things back together for us. */
3686 trunc_y = force_reg (srcmode, trunc_y);
3688 /* If x is a hard register, perform the extension into a pseudo,
3689 so that e.g. stack realignment code is aware of it. */
3690 rtx target = x;
3691 if (REG_P (x) && HARD_REGISTER_P (x))
3692 target = gen_reg_rtx (dstmode);
3694 emit_unop_insn (ic, target, trunc_y, UNKNOWN);
3695 last_insn = get_last_insn ();
3697 if (REG_P (target))
3698 set_unique_reg_note (last_insn, REG_EQUAL, y);
3700 if (target != x)
3701 return emit_move_insn (x, target);
3702 return last_insn;
3705 return NULL;
3708 /* Pushing data onto the stack. */
3710 /* Push a block of length SIZE (perhaps variable)
3711 and return an rtx to address the beginning of the block.
3712 The value may be virtual_outgoing_args_rtx.
3714 EXTRA is the number of bytes of padding to push in addition to SIZE.
3715 BELOW nonzero means this padding comes at low addresses;
3716 otherwise, the padding comes at high addresses. */
3719 push_block (rtx size, int extra, int below)
3721 rtx temp;
3723 size = convert_modes (Pmode, ptr_mode, size, 1);
3724 if (CONSTANT_P (size))
3725 anti_adjust_stack (plus_constant (Pmode, size, extra));
3726 else if (REG_P (size) && extra == 0)
3727 anti_adjust_stack (size);
3728 else
3730 temp = copy_to_mode_reg (Pmode, size);
3731 if (extra != 0)
3732 temp = expand_binop (Pmode, add_optab, temp,
3733 gen_int_mode (extra, Pmode),
3734 temp, 0, OPTAB_LIB_WIDEN);
3735 anti_adjust_stack (temp);
3738 if (STACK_GROWS_DOWNWARD)
3740 temp = virtual_outgoing_args_rtx;
3741 if (extra != 0 && below)
3742 temp = plus_constant (Pmode, temp, extra);
3744 else
3746 if (CONST_INT_P (size))
3747 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3748 -INTVAL (size) - (below ? 0 : extra));
3749 else if (extra != 0 && !below)
3750 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3751 negate_rtx (Pmode, plus_constant (Pmode, size,
3752 extra)));
3753 else
3754 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3755 negate_rtx (Pmode, size));
3758 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3761 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3763 static rtx
3764 mem_autoinc_base (rtx mem)
3766 if (MEM_P (mem))
3768 rtx addr = XEXP (mem, 0);
3769 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3770 return XEXP (addr, 0);
3772 return NULL;
3775 /* A utility routine used here, in reload, and in try_split. The insns
3776 after PREV up to and including LAST are known to adjust the stack,
3777 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3778 placing notes as appropriate. PREV may be NULL, indicating the
3779 entire insn sequence prior to LAST should be scanned.
3781 The set of allowed stack pointer modifications is small:
3782 (1) One or more auto-inc style memory references (aka pushes),
3783 (2) One or more addition/subtraction with the SP as destination,
3784 (3) A single move insn with the SP as destination,
3785 (4) A call_pop insn,
3786 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3788 Insns in the sequence that do not modify the SP are ignored,
3789 except for noreturn calls.
3791 The return value is the amount of adjustment that can be trivially
3792 verified, via immediate operand or auto-inc. If the adjustment
3793 cannot be trivially extracted, the return value is INT_MIN. */
3795 HOST_WIDE_INT
3796 find_args_size_adjust (rtx_insn *insn)
3798 rtx dest, set, pat;
3799 int i;
3801 pat = PATTERN (insn);
3802 set = NULL;
3804 /* Look for a call_pop pattern. */
3805 if (CALL_P (insn))
3807 /* We have to allow non-call_pop patterns for the case
3808 of emit_single_push_insn of a TLS address. */
3809 if (GET_CODE (pat) != PARALLEL)
3810 return 0;
3812 /* All call_pop have a stack pointer adjust in the parallel.
3813 The call itself is always first, and the stack adjust is
3814 usually last, so search from the end. */
3815 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3817 set = XVECEXP (pat, 0, i);
3818 if (GET_CODE (set) != SET)
3819 continue;
3820 dest = SET_DEST (set);
3821 if (dest == stack_pointer_rtx)
3822 break;
3824 /* We'd better have found the stack pointer adjust. */
3825 if (i == 0)
3826 return 0;
3827 /* Fall through to process the extracted SET and DEST
3828 as if it was a standalone insn. */
3830 else if (GET_CODE (pat) == SET)
3831 set = pat;
3832 else if ((set = single_set (insn)) != NULL)
3834 else if (GET_CODE (pat) == PARALLEL)
3836 /* ??? Some older ports use a parallel with a stack adjust
3837 and a store for a PUSH_ROUNDING pattern, rather than a
3838 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3839 /* ??? See h8300 and m68k, pushqi1. */
3840 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3842 set = XVECEXP (pat, 0, i);
3843 if (GET_CODE (set) != SET)
3844 continue;
3845 dest = SET_DEST (set);
3846 if (dest == stack_pointer_rtx)
3847 break;
3849 /* We do not expect an auto-inc of the sp in the parallel. */
3850 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3851 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3852 != stack_pointer_rtx);
3854 if (i < 0)
3855 return 0;
3857 else
3858 return 0;
3860 dest = SET_DEST (set);
3862 /* Look for direct modifications of the stack pointer. */
3863 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3865 /* Look for a trivial adjustment, otherwise assume nothing. */
3866 /* Note that the SPU restore_stack_block pattern refers to
3867 the stack pointer in V4SImode. Consider that non-trivial. */
3868 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3869 && GET_CODE (SET_SRC (set)) == PLUS
3870 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3871 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3872 return INTVAL (XEXP (SET_SRC (set), 1));
3873 /* ??? Reload can generate no-op moves, which will be cleaned
3874 up later. Recognize it and continue searching. */
3875 else if (rtx_equal_p (dest, SET_SRC (set)))
3876 return 0;
3877 else
3878 return HOST_WIDE_INT_MIN;
3880 else
3882 rtx mem, addr;
3884 /* Otherwise only think about autoinc patterns. */
3885 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3887 mem = dest;
3888 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3889 != stack_pointer_rtx);
3891 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3892 mem = SET_SRC (set);
3893 else
3894 return 0;
3896 addr = XEXP (mem, 0);
3897 switch (GET_CODE (addr))
3899 case PRE_INC:
3900 case POST_INC:
3901 return GET_MODE_SIZE (GET_MODE (mem));
3902 case PRE_DEC:
3903 case POST_DEC:
3904 return -GET_MODE_SIZE (GET_MODE (mem));
3905 case PRE_MODIFY:
3906 case POST_MODIFY:
3907 addr = XEXP (addr, 1);
3908 gcc_assert (GET_CODE (addr) == PLUS);
3909 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3910 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3911 return INTVAL (XEXP (addr, 1));
3912 default:
3913 gcc_unreachable ();
3919 fixup_args_size_notes (rtx_insn *prev, rtx_insn *last, int end_args_size)
3921 int args_size = end_args_size;
3922 bool saw_unknown = false;
3923 rtx_insn *insn;
3925 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3927 HOST_WIDE_INT this_delta;
3929 if (!NONDEBUG_INSN_P (insn))
3930 continue;
3932 this_delta = find_args_size_adjust (insn);
3933 if (this_delta == 0)
3935 if (!CALL_P (insn)
3936 || ACCUMULATE_OUTGOING_ARGS
3937 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3938 continue;
3941 gcc_assert (!saw_unknown);
3942 if (this_delta == HOST_WIDE_INT_MIN)
3943 saw_unknown = true;
3945 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3946 if (STACK_GROWS_DOWNWARD)
3947 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
3949 args_size -= this_delta;
3952 return saw_unknown ? INT_MIN : args_size;
3955 #ifdef PUSH_ROUNDING
3956 /* Emit single push insn. */
3958 static void
3959 emit_single_push_insn_1 (machine_mode mode, rtx x, tree type)
3961 rtx dest_addr;
3962 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3963 rtx dest;
3964 enum insn_code icode;
3966 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3967 /* If there is push pattern, use it. Otherwise try old way of throwing
3968 MEM representing push operation to move expander. */
3969 icode = optab_handler (push_optab, mode);
3970 if (icode != CODE_FOR_nothing)
3972 struct expand_operand ops[1];
3974 create_input_operand (&ops[0], x, mode);
3975 if (maybe_expand_insn (icode, 1, ops))
3976 return;
3978 if (GET_MODE_SIZE (mode) == rounded_size)
3979 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3980 /* If we are to pad downward, adjust the stack pointer first and
3981 then store X into the stack location using an offset. This is
3982 because emit_move_insn does not know how to pad; it does not have
3983 access to type. */
3984 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3986 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3987 HOST_WIDE_INT offset;
3989 emit_move_insn (stack_pointer_rtx,
3990 expand_binop (Pmode,
3991 STACK_GROWS_DOWNWARD ? sub_optab
3992 : add_optab,
3993 stack_pointer_rtx,
3994 gen_int_mode (rounded_size, Pmode),
3995 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3997 offset = (HOST_WIDE_INT) padding_size;
3998 if (STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_DEC)
3999 /* We have already decremented the stack pointer, so get the
4000 previous value. */
4001 offset += (HOST_WIDE_INT) rounded_size;
4003 if (!STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_INC)
4004 /* We have already incremented the stack pointer, so get the
4005 previous value. */
4006 offset -= (HOST_WIDE_INT) rounded_size;
4008 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4009 gen_int_mode (offset, Pmode));
4011 else
4013 if (STACK_GROWS_DOWNWARD)
4014 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4015 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4016 gen_int_mode (-(HOST_WIDE_INT) rounded_size,
4017 Pmode));
4018 else
4019 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4020 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4021 gen_int_mode (rounded_size, Pmode));
4023 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4026 dest = gen_rtx_MEM (mode, dest_addr);
4028 if (type != 0)
4030 set_mem_attributes (dest, type, 1);
4032 if (cfun->tail_call_marked)
4033 /* Function incoming arguments may overlap with sibling call
4034 outgoing arguments and we cannot allow reordering of reads
4035 from function arguments with stores to outgoing arguments
4036 of sibling calls. */
4037 set_mem_alias_set (dest, 0);
4039 emit_move_insn (dest, x);
4042 /* Emit and annotate a single push insn. */
4044 static void
4045 emit_single_push_insn (machine_mode mode, rtx x, tree type)
4047 int delta, old_delta = stack_pointer_delta;
4048 rtx_insn *prev = get_last_insn ();
4049 rtx_insn *last;
4051 emit_single_push_insn_1 (mode, x, type);
4053 last = get_last_insn ();
4055 /* Notice the common case where we emitted exactly one insn. */
4056 if (PREV_INSN (last) == prev)
4058 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4059 return;
4062 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4063 gcc_assert (delta == INT_MIN || delta == old_delta);
4065 #endif
4067 /* If reading SIZE bytes from X will end up reading from
4068 Y return the number of bytes that overlap. Return -1
4069 if there is no overlap or -2 if we can't determine
4070 (for example when X and Y have different base registers). */
4072 static int
4073 memory_load_overlap (rtx x, rtx y, HOST_WIDE_INT size)
4075 rtx tmp = plus_constant (Pmode, x, size);
4076 rtx sub = simplify_gen_binary (MINUS, Pmode, tmp, y);
4078 if (!CONST_INT_P (sub))
4079 return -2;
4081 HOST_WIDE_INT val = INTVAL (sub);
4083 return IN_RANGE (val, 1, size) ? val : -1;
4086 /* Generate code to push X onto the stack, assuming it has mode MODE and
4087 type TYPE.
4088 MODE is redundant except when X is a CONST_INT (since they don't
4089 carry mode info).
4090 SIZE is an rtx for the size of data to be copied (in bytes),
4091 needed only if X is BLKmode.
4092 Return true if successful. May return false if asked to push a
4093 partial argument during a sibcall optimization (as specified by
4094 SIBCALL_P) and the incoming and outgoing pointers cannot be shown
4095 to not overlap.
4097 ALIGN (in bits) is maximum alignment we can assume.
4099 If PARTIAL and REG are both nonzero, then copy that many of the first
4100 bytes of X into registers starting with REG, and push the rest of X.
4101 The amount of space pushed is decreased by PARTIAL bytes.
4102 REG must be a hard register in this case.
4103 If REG is zero but PARTIAL is not, take any all others actions for an
4104 argument partially in registers, but do not actually load any
4105 registers.
4107 EXTRA is the amount in bytes of extra space to leave next to this arg.
4108 This is ignored if an argument block has already been allocated.
4110 On a machine that lacks real push insns, ARGS_ADDR is the address of
4111 the bottom of the argument block for this call. We use indexing off there
4112 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4113 argument block has not been preallocated.
4115 ARGS_SO_FAR is the size of args previously pushed for this call.
4117 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4118 for arguments passed in registers. If nonzero, it will be the number
4119 of bytes required. */
4121 bool
4122 emit_push_insn (rtx x, machine_mode mode, tree type, rtx size,
4123 unsigned int align, int partial, rtx reg, int extra,
4124 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4125 rtx alignment_pad, bool sibcall_p)
4127 rtx xinner;
4128 enum direction stack_direction = STACK_GROWS_DOWNWARD ? downward : upward;
4130 /* Decide where to pad the argument: `downward' for below,
4131 `upward' for above, or `none' for don't pad it.
4132 Default is below for small data on big-endian machines; else above. */
4133 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4135 /* Invert direction if stack is post-decrement.
4136 FIXME: why? */
4137 if (STACK_PUSH_CODE == POST_DEC)
4138 if (where_pad != none)
4139 where_pad = (where_pad == downward ? upward : downward);
4141 xinner = x;
4143 int nregs = partial / UNITS_PER_WORD;
4144 rtx *tmp_regs = NULL;
4145 int overlapping = 0;
4147 if (mode == BLKmode
4148 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4150 /* Copy a block into the stack, entirely or partially. */
4152 rtx temp;
4153 int used;
4154 int offset;
4155 int skip;
4157 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4158 used = partial - offset;
4160 if (mode != BLKmode)
4162 /* A value is to be stored in an insufficiently aligned
4163 stack slot; copy via a suitably aligned slot if
4164 necessary. */
4165 size = GEN_INT (GET_MODE_SIZE (mode));
4166 if (!MEM_P (xinner))
4168 temp = assign_temp (type, 1, 1);
4169 emit_move_insn (temp, xinner);
4170 xinner = temp;
4174 gcc_assert (size);
4176 /* USED is now the # of bytes we need not copy to the stack
4177 because registers will take care of them. */
4179 if (partial != 0)
4180 xinner = adjust_address (xinner, BLKmode, used);
4182 /* If the partial register-part of the arg counts in its stack size,
4183 skip the part of stack space corresponding to the registers.
4184 Otherwise, start copying to the beginning of the stack space,
4185 by setting SKIP to 0. */
4186 skip = (reg_parm_stack_space == 0) ? 0 : used;
4188 #ifdef PUSH_ROUNDING
4189 /* Do it with several push insns if that doesn't take lots of insns
4190 and if there is no difficulty with push insns that skip bytes
4191 on the stack for alignment purposes. */
4192 if (args_addr == 0
4193 && PUSH_ARGS
4194 && CONST_INT_P (size)
4195 && skip == 0
4196 && MEM_ALIGN (xinner) >= align
4197 && can_move_by_pieces ((unsigned) INTVAL (size) - used, align)
4198 /* Here we avoid the case of a structure whose weak alignment
4199 forces many pushes of a small amount of data,
4200 and such small pushes do rounding that causes trouble. */
4201 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4202 || align >= BIGGEST_ALIGNMENT
4203 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4204 == (align / BITS_PER_UNIT)))
4205 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4207 /* Push padding now if padding above and stack grows down,
4208 or if padding below and stack grows up.
4209 But if space already allocated, this has already been done. */
4210 if (extra && args_addr == 0
4211 && where_pad != none && where_pad != stack_direction)
4212 anti_adjust_stack (GEN_INT (extra));
4214 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4216 else
4217 #endif /* PUSH_ROUNDING */
4219 rtx target;
4221 /* Otherwise make space on the stack and copy the data
4222 to the address of that space. */
4224 /* Deduct words put into registers from the size we must copy. */
4225 if (partial != 0)
4227 if (CONST_INT_P (size))
4228 size = GEN_INT (INTVAL (size) - used);
4229 else
4230 size = expand_binop (GET_MODE (size), sub_optab, size,
4231 gen_int_mode (used, GET_MODE (size)),
4232 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4235 /* Get the address of the stack space.
4236 In this case, we do not deal with EXTRA separately.
4237 A single stack adjust will do. */
4238 if (! args_addr)
4240 temp = push_block (size, extra, where_pad == downward);
4241 extra = 0;
4243 else if (CONST_INT_P (args_so_far))
4244 temp = memory_address (BLKmode,
4245 plus_constant (Pmode, args_addr,
4246 skip + INTVAL (args_so_far)));
4247 else
4248 temp = memory_address (BLKmode,
4249 plus_constant (Pmode,
4250 gen_rtx_PLUS (Pmode,
4251 args_addr,
4252 args_so_far),
4253 skip));
4255 if (!ACCUMULATE_OUTGOING_ARGS)
4257 /* If the source is referenced relative to the stack pointer,
4258 copy it to another register to stabilize it. We do not need
4259 to do this if we know that we won't be changing sp. */
4261 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4262 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4263 temp = copy_to_reg (temp);
4266 target = gen_rtx_MEM (BLKmode, temp);
4268 /* We do *not* set_mem_attributes here, because incoming arguments
4269 may overlap with sibling call outgoing arguments and we cannot
4270 allow reordering of reads from function arguments with stores
4271 to outgoing arguments of sibling calls. We do, however, want
4272 to record the alignment of the stack slot. */
4273 /* ALIGN may well be better aligned than TYPE, e.g. due to
4274 PARM_BOUNDARY. Assume the caller isn't lying. */
4275 set_mem_align (target, align);
4277 /* If part should go in registers and pushing to that part would
4278 overwrite some of the values that need to go into regs, load the
4279 overlapping values into temporary pseudos to be moved into the hard
4280 regs at the end after the stack pushing has completed.
4281 We cannot load them directly into the hard regs here because
4282 they can be clobbered by the block move expansions.
4283 See PR 65358. */
4285 if (partial > 0 && reg != 0 && mode == BLKmode
4286 && GET_CODE (reg) != PARALLEL)
4288 overlapping = memory_load_overlap (XEXP (x, 0), temp, partial);
4289 if (overlapping > 0)
4291 gcc_assert (overlapping % UNITS_PER_WORD == 0);
4292 overlapping /= UNITS_PER_WORD;
4294 tmp_regs = XALLOCAVEC (rtx, overlapping);
4296 for (int i = 0; i < overlapping; i++)
4297 tmp_regs[i] = gen_reg_rtx (word_mode);
4299 for (int i = 0; i < overlapping; i++)
4300 emit_move_insn (tmp_regs[i],
4301 operand_subword_force (target, i, mode));
4303 else if (overlapping == -1)
4304 overlapping = 0;
4305 /* Could not determine whether there is overlap.
4306 Fail the sibcall. */
4307 else
4309 overlapping = 0;
4310 if (sibcall_p)
4311 return false;
4314 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4317 else if (partial > 0)
4319 /* Scalar partly in registers. */
4321 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4322 int i;
4323 int not_stack;
4324 /* # bytes of start of argument
4325 that we must make space for but need not store. */
4326 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4327 int args_offset = INTVAL (args_so_far);
4328 int skip;
4330 /* Push padding now if padding above and stack grows down,
4331 or if padding below and stack grows up.
4332 But if space already allocated, this has already been done. */
4333 if (extra && args_addr == 0
4334 && where_pad != none && where_pad != stack_direction)
4335 anti_adjust_stack (GEN_INT (extra));
4337 /* If we make space by pushing it, we might as well push
4338 the real data. Otherwise, we can leave OFFSET nonzero
4339 and leave the space uninitialized. */
4340 if (args_addr == 0)
4341 offset = 0;
4343 /* Now NOT_STACK gets the number of words that we don't need to
4344 allocate on the stack. Convert OFFSET to words too. */
4345 not_stack = (partial - offset) / UNITS_PER_WORD;
4346 offset /= UNITS_PER_WORD;
4348 /* If the partial register-part of the arg counts in its stack size,
4349 skip the part of stack space corresponding to the registers.
4350 Otherwise, start copying to the beginning of the stack space,
4351 by setting SKIP to 0. */
4352 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4354 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4355 x = validize_mem (force_const_mem (mode, x));
4357 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4358 SUBREGs of such registers are not allowed. */
4359 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4360 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4361 x = copy_to_reg (x);
4363 /* Loop over all the words allocated on the stack for this arg. */
4364 /* We can do it by words, because any scalar bigger than a word
4365 has a size a multiple of a word. */
4366 for (i = size - 1; i >= not_stack; i--)
4367 if (i >= not_stack + offset)
4368 if (!emit_push_insn (operand_subword_force (x, i, mode),
4369 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4370 0, args_addr,
4371 GEN_INT (args_offset + ((i - not_stack + skip)
4372 * UNITS_PER_WORD)),
4373 reg_parm_stack_space, alignment_pad, sibcall_p))
4374 return false;
4376 else
4378 rtx addr;
4379 rtx dest;
4381 /* Push padding now if padding above and stack grows down,
4382 or if padding below and stack grows up.
4383 But if space already allocated, this has already been done. */
4384 if (extra && args_addr == 0
4385 && where_pad != none && where_pad != stack_direction)
4386 anti_adjust_stack (GEN_INT (extra));
4388 #ifdef PUSH_ROUNDING
4389 if (args_addr == 0 && PUSH_ARGS)
4390 emit_single_push_insn (mode, x, type);
4391 else
4392 #endif
4394 if (CONST_INT_P (args_so_far))
4395 addr
4396 = memory_address (mode,
4397 plus_constant (Pmode, args_addr,
4398 INTVAL (args_so_far)));
4399 else
4400 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4401 args_so_far));
4402 dest = gen_rtx_MEM (mode, addr);
4404 /* We do *not* set_mem_attributes here, because incoming arguments
4405 may overlap with sibling call outgoing arguments and we cannot
4406 allow reordering of reads from function arguments with stores
4407 to outgoing arguments of sibling calls. We do, however, want
4408 to record the alignment of the stack slot. */
4409 /* ALIGN may well be better aligned than TYPE, e.g. due to
4410 PARM_BOUNDARY. Assume the caller isn't lying. */
4411 set_mem_align (dest, align);
4413 emit_move_insn (dest, x);
4417 /* Move the partial arguments into the registers and any overlapping
4418 values that we moved into the pseudos in tmp_regs. */
4419 if (partial > 0 && reg != 0)
4421 /* Handle calls that pass values in multiple non-contiguous locations.
4422 The Irix 6 ABI has examples of this. */
4423 if (GET_CODE (reg) == PARALLEL)
4424 emit_group_load (reg, x, type, -1);
4425 else
4427 gcc_assert (partial % UNITS_PER_WORD == 0);
4428 move_block_to_reg (REGNO (reg), x, nregs - overlapping, mode);
4430 for (int i = 0; i < overlapping; i++)
4431 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg)
4432 + nregs - overlapping + i),
4433 tmp_regs[i]);
4438 if (extra && args_addr == 0 && where_pad == stack_direction)
4439 anti_adjust_stack (GEN_INT (extra));
4441 if (alignment_pad && args_addr == 0)
4442 anti_adjust_stack (alignment_pad);
4444 return true;
4447 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4448 operations. */
4450 static rtx
4451 get_subtarget (rtx x)
4453 return (optimize
4454 || x == 0
4455 /* Only registers can be subtargets. */
4456 || !REG_P (x)
4457 /* Don't use hard regs to avoid extending their life. */
4458 || REGNO (x) < FIRST_PSEUDO_REGISTER
4459 ? 0 : x);
4462 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4463 FIELD is a bitfield. Returns true if the optimization was successful,
4464 and there's nothing else to do. */
4466 static bool
4467 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4468 unsigned HOST_WIDE_INT bitpos,
4469 unsigned HOST_WIDE_INT bitregion_start,
4470 unsigned HOST_WIDE_INT bitregion_end,
4471 machine_mode mode1, rtx str_rtx,
4472 tree to, tree src)
4474 machine_mode str_mode = GET_MODE (str_rtx);
4475 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4476 tree op0, op1;
4477 rtx value, result;
4478 optab binop;
4479 gimple srcstmt;
4480 enum tree_code code;
4482 if (mode1 != VOIDmode
4483 || bitsize >= BITS_PER_WORD
4484 || str_bitsize > BITS_PER_WORD
4485 || TREE_SIDE_EFFECTS (to)
4486 || TREE_THIS_VOLATILE (to))
4487 return false;
4489 STRIP_NOPS (src);
4490 if (TREE_CODE (src) != SSA_NAME)
4491 return false;
4492 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4493 return false;
4495 srcstmt = get_gimple_for_ssa_name (src);
4496 if (!srcstmt
4497 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4498 return false;
4500 code = gimple_assign_rhs_code (srcstmt);
4502 op0 = gimple_assign_rhs1 (srcstmt);
4504 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4505 to find its initialization. Hopefully the initialization will
4506 be from a bitfield load. */
4507 if (TREE_CODE (op0) == SSA_NAME)
4509 gimple op0stmt = get_gimple_for_ssa_name (op0);
4511 /* We want to eventually have OP0 be the same as TO, which
4512 should be a bitfield. */
4513 if (!op0stmt
4514 || !is_gimple_assign (op0stmt)
4515 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4516 return false;
4517 op0 = gimple_assign_rhs1 (op0stmt);
4520 op1 = gimple_assign_rhs2 (srcstmt);
4522 if (!operand_equal_p (to, op0, 0))
4523 return false;
4525 if (MEM_P (str_rtx))
4527 unsigned HOST_WIDE_INT offset1;
4529 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4530 str_mode = word_mode;
4531 str_mode = get_best_mode (bitsize, bitpos,
4532 bitregion_start, bitregion_end,
4533 MEM_ALIGN (str_rtx), str_mode, 0);
4534 if (str_mode == VOIDmode)
4535 return false;
4536 str_bitsize = GET_MODE_BITSIZE (str_mode);
4538 offset1 = bitpos;
4539 bitpos %= str_bitsize;
4540 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4541 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4543 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4544 return false;
4546 /* If the bit field covers the whole REG/MEM, store_field
4547 will likely generate better code. */
4548 if (bitsize >= str_bitsize)
4549 return false;
4551 /* We can't handle fields split across multiple entities. */
4552 if (bitpos + bitsize > str_bitsize)
4553 return false;
4555 if (BYTES_BIG_ENDIAN)
4556 bitpos = str_bitsize - bitpos - bitsize;
4558 switch (code)
4560 case PLUS_EXPR:
4561 case MINUS_EXPR:
4562 /* For now, just optimize the case of the topmost bitfield
4563 where we don't need to do any masking and also
4564 1 bit bitfields where xor can be used.
4565 We might win by one instruction for the other bitfields
4566 too if insv/extv instructions aren't used, so that
4567 can be added later. */
4568 if (bitpos + bitsize != str_bitsize
4569 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4570 break;
4572 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4573 value = convert_modes (str_mode,
4574 TYPE_MODE (TREE_TYPE (op1)), value,
4575 TYPE_UNSIGNED (TREE_TYPE (op1)));
4577 /* We may be accessing data outside the field, which means
4578 we can alias adjacent data. */
4579 if (MEM_P (str_rtx))
4581 str_rtx = shallow_copy_rtx (str_rtx);
4582 set_mem_alias_set (str_rtx, 0);
4583 set_mem_expr (str_rtx, 0);
4586 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4587 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4589 value = expand_and (str_mode, value, const1_rtx, NULL);
4590 binop = xor_optab;
4592 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4593 result = expand_binop (str_mode, binop, str_rtx,
4594 value, str_rtx, 1, OPTAB_WIDEN);
4595 if (result != str_rtx)
4596 emit_move_insn (str_rtx, result);
4597 return true;
4599 case BIT_IOR_EXPR:
4600 case BIT_XOR_EXPR:
4601 if (TREE_CODE (op1) != INTEGER_CST)
4602 break;
4603 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4604 value = convert_modes (str_mode,
4605 TYPE_MODE (TREE_TYPE (op1)), value,
4606 TYPE_UNSIGNED (TREE_TYPE (op1)));
4608 /* We may be accessing data outside the field, which means
4609 we can alias adjacent data. */
4610 if (MEM_P (str_rtx))
4612 str_rtx = shallow_copy_rtx (str_rtx);
4613 set_mem_alias_set (str_rtx, 0);
4614 set_mem_expr (str_rtx, 0);
4617 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4618 if (bitpos + bitsize != str_bitsize)
4620 rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4621 str_mode);
4622 value = expand_and (str_mode, value, mask, NULL_RTX);
4624 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4625 result = expand_binop (str_mode, binop, str_rtx,
4626 value, str_rtx, 1, OPTAB_WIDEN);
4627 if (result != str_rtx)
4628 emit_move_insn (str_rtx, result);
4629 return true;
4631 default:
4632 break;
4635 return false;
4638 /* In the C++ memory model, consecutive bit fields in a structure are
4639 considered one memory location.
4641 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4642 returns the bit range of consecutive bits in which this COMPONENT_REF
4643 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4644 and *OFFSET may be adjusted in the process.
4646 If the access does not need to be restricted, 0 is returned in both
4647 *BITSTART and *BITEND. */
4649 static void
4650 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4651 unsigned HOST_WIDE_INT *bitend,
4652 tree exp,
4653 HOST_WIDE_INT *bitpos,
4654 tree *offset)
4656 HOST_WIDE_INT bitoffset;
4657 tree field, repr;
4659 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4661 field = TREE_OPERAND (exp, 1);
4662 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4663 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4664 need to limit the range we can access. */
4665 if (!repr)
4667 *bitstart = *bitend = 0;
4668 return;
4671 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4672 part of a larger bit field, then the representative does not serve any
4673 useful purpose. This can occur in Ada. */
4674 if (handled_component_p (TREE_OPERAND (exp, 0)))
4676 machine_mode rmode;
4677 HOST_WIDE_INT rbitsize, rbitpos;
4678 tree roffset;
4679 int unsignedp;
4680 int volatilep = 0;
4681 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4682 &roffset, &rmode, &unsignedp, &volatilep, false);
4683 if ((rbitpos % BITS_PER_UNIT) != 0)
4685 *bitstart = *bitend = 0;
4686 return;
4690 /* Compute the adjustment to bitpos from the offset of the field
4691 relative to the representative. DECL_FIELD_OFFSET of field and
4692 repr are the same by construction if they are not constants,
4693 see finish_bitfield_layout. */
4694 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
4695 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
4696 bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
4697 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
4698 else
4699 bitoffset = 0;
4700 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4701 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4703 /* If the adjustment is larger than bitpos, we would have a negative bit
4704 position for the lower bound and this may wreak havoc later. Adjust
4705 offset and bitpos to make the lower bound non-negative in that case. */
4706 if (bitoffset > *bitpos)
4708 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4709 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4711 *bitpos += adjust;
4712 if (*offset == NULL_TREE)
4713 *offset = size_int (-adjust / BITS_PER_UNIT);
4714 else
4715 *offset
4716 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4717 *bitstart = 0;
4719 else
4720 *bitstart = *bitpos - bitoffset;
4722 *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4725 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4726 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4727 DECL_RTL was not set yet, return NORTL. */
4729 static inline bool
4730 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4732 if (TREE_CODE (addr) != ADDR_EXPR)
4733 return false;
4735 tree base = TREE_OPERAND (addr, 0);
4737 if (!DECL_P (base)
4738 || TREE_ADDRESSABLE (base)
4739 || DECL_MODE (base) == BLKmode)
4740 return false;
4742 if (!DECL_RTL_SET_P (base))
4743 return nortl;
4745 return (!MEM_P (DECL_RTL (base)));
4748 /* Returns true if the MEM_REF REF refers to an object that does not
4749 reside in memory and has non-BLKmode. */
4751 static inline bool
4752 mem_ref_refers_to_non_mem_p (tree ref)
4754 tree base = TREE_OPERAND (ref, 0);
4755 return addr_expr_of_non_mem_decl_p_1 (base, false);
4758 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4759 is true, try generating a nontemporal store. */
4761 void
4762 expand_assignment (tree to, tree from, bool nontemporal)
4764 rtx to_rtx = 0;
4765 rtx result;
4766 machine_mode mode;
4767 unsigned int align;
4768 enum insn_code icode;
4770 /* Don't crash if the lhs of the assignment was erroneous. */
4771 if (TREE_CODE (to) == ERROR_MARK)
4773 expand_normal (from);
4774 return;
4777 /* Optimize away no-op moves without side-effects. */
4778 if (operand_equal_p (to, from, 0))
4779 return;
4781 /* Handle misaligned stores. */
4782 mode = TYPE_MODE (TREE_TYPE (to));
4783 if ((TREE_CODE (to) == MEM_REF
4784 || TREE_CODE (to) == TARGET_MEM_REF)
4785 && mode != BLKmode
4786 && !mem_ref_refers_to_non_mem_p (to)
4787 && ((align = get_object_alignment (to))
4788 < GET_MODE_ALIGNMENT (mode))
4789 && (((icode = optab_handler (movmisalign_optab, mode))
4790 != CODE_FOR_nothing)
4791 || SLOW_UNALIGNED_ACCESS (mode, align)))
4793 rtx reg, mem;
4795 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4796 reg = force_not_mem (reg);
4797 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4799 if (icode != CODE_FOR_nothing)
4801 struct expand_operand ops[2];
4803 create_fixed_operand (&ops[0], mem);
4804 create_input_operand (&ops[1], reg, mode);
4805 /* The movmisalign<mode> pattern cannot fail, else the assignment
4806 would silently be omitted. */
4807 expand_insn (icode, 2, ops);
4809 else
4810 store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg);
4811 return;
4814 /* Assignment of a structure component needs special treatment
4815 if the structure component's rtx is not simply a MEM.
4816 Assignment of an array element at a constant index, and assignment of
4817 an array element in an unaligned packed structure field, has the same
4818 problem. Same for (partially) storing into a non-memory object. */
4819 if (handled_component_p (to)
4820 || (TREE_CODE (to) == MEM_REF
4821 && mem_ref_refers_to_non_mem_p (to))
4822 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4824 machine_mode mode1;
4825 HOST_WIDE_INT bitsize, bitpos;
4826 unsigned HOST_WIDE_INT bitregion_start = 0;
4827 unsigned HOST_WIDE_INT bitregion_end = 0;
4828 tree offset;
4829 int unsignedp;
4830 int volatilep = 0;
4831 tree tem;
4833 push_temp_slots ();
4834 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4835 &unsignedp, &volatilep, true);
4837 /* Make sure bitpos is not negative, it can wreak havoc later. */
4838 if (bitpos < 0)
4840 gcc_assert (offset == NULL_TREE);
4841 offset = size_int (bitpos >> (BITS_PER_UNIT == 8
4842 ? 3 : exact_log2 (BITS_PER_UNIT)));
4843 bitpos &= BITS_PER_UNIT - 1;
4846 if (TREE_CODE (to) == COMPONENT_REF
4847 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4848 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4849 /* The C++ memory model naturally applies to byte-aligned fields.
4850 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4851 BITSIZE are not byte-aligned, there is no need to limit the range
4852 we can access. This can occur with packed structures in Ada. */
4853 else if (bitsize > 0
4854 && bitsize % BITS_PER_UNIT == 0
4855 && bitpos % BITS_PER_UNIT == 0)
4857 bitregion_start = bitpos;
4858 bitregion_end = bitpos + bitsize - 1;
4861 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4863 /* If the field has a mode, we want to access it in the
4864 field's mode, not the computed mode.
4865 If a MEM has VOIDmode (external with incomplete type),
4866 use BLKmode for it instead. */
4867 if (MEM_P (to_rtx))
4869 if (mode1 != VOIDmode)
4870 to_rtx = adjust_address (to_rtx, mode1, 0);
4871 else if (GET_MODE (to_rtx) == VOIDmode)
4872 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4875 if (offset != 0)
4877 machine_mode address_mode;
4878 rtx offset_rtx;
4880 if (!MEM_P (to_rtx))
4882 /* We can get constant negative offsets into arrays with broken
4883 user code. Translate this to a trap instead of ICEing. */
4884 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4885 expand_builtin_trap ();
4886 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4889 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4890 address_mode = get_address_mode (to_rtx);
4891 if (GET_MODE (offset_rtx) != address_mode)
4893 /* We cannot be sure that the RTL in offset_rtx is valid outside
4894 of a memory address context, so force it into a register
4895 before attempting to convert it to the desired mode. */
4896 offset_rtx = force_operand (offset_rtx, NULL_RTX);
4897 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4900 /* If we have an expression in OFFSET_RTX and a non-zero
4901 byte offset in BITPOS, adding the byte offset before the
4902 OFFSET_RTX results in better intermediate code, which makes
4903 later rtl optimization passes perform better.
4905 We prefer intermediate code like this:
4907 r124:DI=r123:DI+0x18
4908 [r124:DI]=r121:DI
4910 ... instead of ...
4912 r124:DI=r123:DI+0x10
4913 [r124:DI+0x8]=r121:DI
4915 This is only done for aligned data values, as these can
4916 be expected to result in single move instructions. */
4917 if (mode1 != VOIDmode
4918 && bitpos != 0
4919 && bitsize > 0
4920 && (bitpos % bitsize) == 0
4921 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4922 && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
4924 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4925 bitregion_start = 0;
4926 if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos)
4927 bitregion_end -= bitpos;
4928 bitpos = 0;
4931 to_rtx = offset_address (to_rtx, offset_rtx,
4932 highest_pow2_factor_for_target (to,
4933 offset));
4936 /* No action is needed if the target is not a memory and the field
4937 lies completely outside that target. This can occur if the source
4938 code contains an out-of-bounds access to a small array. */
4939 if (!MEM_P (to_rtx)
4940 && GET_MODE (to_rtx) != BLKmode
4941 && (unsigned HOST_WIDE_INT) bitpos
4942 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4944 expand_normal (from);
4945 result = NULL;
4947 /* Handle expand_expr of a complex value returning a CONCAT. */
4948 else if (GET_CODE (to_rtx) == CONCAT)
4950 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4951 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4952 && bitpos == 0
4953 && bitsize == mode_bitsize)
4954 result = store_expr (from, to_rtx, false, nontemporal);
4955 else if (bitsize == mode_bitsize / 2
4956 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4957 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4958 nontemporal);
4959 else if (bitpos + bitsize <= mode_bitsize / 2)
4960 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4961 bitregion_start, bitregion_end,
4962 mode1, from,
4963 get_alias_set (to), nontemporal);
4964 else if (bitpos >= mode_bitsize / 2)
4965 result = store_field (XEXP (to_rtx, 1), bitsize,
4966 bitpos - mode_bitsize / 2,
4967 bitregion_start, bitregion_end,
4968 mode1, from,
4969 get_alias_set (to), nontemporal);
4970 else if (bitpos == 0 && bitsize == mode_bitsize)
4972 rtx from_rtx;
4973 result = expand_normal (from);
4974 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4975 TYPE_MODE (TREE_TYPE (from)), 0);
4976 emit_move_insn (XEXP (to_rtx, 0),
4977 read_complex_part (from_rtx, false));
4978 emit_move_insn (XEXP (to_rtx, 1),
4979 read_complex_part (from_rtx, true));
4981 else
4983 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4984 GET_MODE_SIZE (GET_MODE (to_rtx)));
4985 write_complex_part (temp, XEXP (to_rtx, 0), false);
4986 write_complex_part (temp, XEXP (to_rtx, 1), true);
4987 result = store_field (temp, bitsize, bitpos,
4988 bitregion_start, bitregion_end,
4989 mode1, from,
4990 get_alias_set (to), nontemporal);
4991 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4992 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4995 else
4997 if (MEM_P (to_rtx))
4999 /* If the field is at offset zero, we could have been given the
5000 DECL_RTX of the parent struct. Don't munge it. */
5001 to_rtx = shallow_copy_rtx (to_rtx);
5002 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
5003 if (volatilep)
5004 MEM_VOLATILE_P (to_rtx) = 1;
5007 if (optimize_bitfield_assignment_op (bitsize, bitpos,
5008 bitregion_start, bitregion_end,
5009 mode1,
5010 to_rtx, to, from))
5011 result = NULL;
5012 else
5013 result = store_field (to_rtx, bitsize, bitpos,
5014 bitregion_start, bitregion_end,
5015 mode1, from,
5016 get_alias_set (to), nontemporal);
5019 if (result)
5020 preserve_temp_slots (result);
5021 pop_temp_slots ();
5022 return;
5025 /* If the rhs is a function call and its value is not an aggregate,
5026 call the function before we start to compute the lhs.
5027 This is needed for correct code for cases such as
5028 val = setjmp (buf) on machines where reference to val
5029 requires loading up part of an address in a separate insn.
5031 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5032 since it might be a promoted variable where the zero- or sign- extension
5033 needs to be done. Handling this in the normal way is safe because no
5034 computation is done before the call. The same is true for SSA names. */
5035 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
5036 && COMPLETE_TYPE_P (TREE_TYPE (from))
5037 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
5038 && ! (((TREE_CODE (to) == VAR_DECL
5039 || TREE_CODE (to) == PARM_DECL
5040 || TREE_CODE (to) == RESULT_DECL)
5041 && REG_P (DECL_RTL (to)))
5042 || TREE_CODE (to) == SSA_NAME))
5044 rtx value;
5045 rtx bounds;
5047 push_temp_slots ();
5048 value = expand_normal (from);
5050 /* Split value and bounds to store them separately. */
5051 chkp_split_slot (value, &value, &bounds);
5053 if (to_rtx == 0)
5054 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5056 /* Handle calls that return values in multiple non-contiguous locations.
5057 The Irix 6 ABI has examples of this. */
5058 if (GET_CODE (to_rtx) == PARALLEL)
5060 if (GET_CODE (value) == PARALLEL)
5061 emit_group_move (to_rtx, value);
5062 else
5063 emit_group_load (to_rtx, value, TREE_TYPE (from),
5064 int_size_in_bytes (TREE_TYPE (from)));
5066 else if (GET_CODE (value) == PARALLEL)
5067 emit_group_store (to_rtx, value, TREE_TYPE (from),
5068 int_size_in_bytes (TREE_TYPE (from)));
5069 else if (GET_MODE (to_rtx) == BLKmode)
5071 /* Handle calls that return BLKmode values in registers. */
5072 if (REG_P (value))
5073 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5074 else
5075 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5077 else
5079 if (POINTER_TYPE_P (TREE_TYPE (to)))
5080 value = convert_memory_address_addr_space
5081 (GET_MODE (to_rtx), value,
5082 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5084 emit_move_insn (to_rtx, value);
5087 /* Store bounds if required. */
5088 if (bounds
5089 && (BOUNDED_P (to) || chkp_type_has_pointer (TREE_TYPE (to))))
5091 gcc_assert (MEM_P (to_rtx));
5092 chkp_emit_bounds_store (bounds, value, to_rtx);
5095 preserve_temp_slots (to_rtx);
5096 pop_temp_slots ();
5097 return;
5100 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5101 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5103 /* Don't move directly into a return register. */
5104 if (TREE_CODE (to) == RESULT_DECL
5105 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5107 rtx temp;
5109 push_temp_slots ();
5111 /* If the source is itself a return value, it still is in a pseudo at
5112 this point so we can move it back to the return register directly. */
5113 if (REG_P (to_rtx)
5114 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5115 && TREE_CODE (from) != CALL_EXPR)
5116 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5117 else
5118 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5120 /* Handle calls that return values in multiple non-contiguous locations.
5121 The Irix 6 ABI has examples of this. */
5122 if (GET_CODE (to_rtx) == PARALLEL)
5124 if (GET_CODE (temp) == PARALLEL)
5125 emit_group_move (to_rtx, temp);
5126 else
5127 emit_group_load (to_rtx, temp, TREE_TYPE (from),
5128 int_size_in_bytes (TREE_TYPE (from)));
5130 else if (temp)
5131 emit_move_insn (to_rtx, temp);
5133 preserve_temp_slots (to_rtx);
5134 pop_temp_slots ();
5135 return;
5138 /* In case we are returning the contents of an object which overlaps
5139 the place the value is being stored, use a safe function when copying
5140 a value through a pointer into a structure value return block. */
5141 if (TREE_CODE (to) == RESULT_DECL
5142 && TREE_CODE (from) == INDIRECT_REF
5143 && ADDR_SPACE_GENERIC_P
5144 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5145 && refs_may_alias_p (to, from)
5146 && cfun->returns_struct
5147 && !cfun->returns_pcc_struct)
5149 rtx from_rtx, size;
5151 push_temp_slots ();
5152 size = expr_size (from);
5153 from_rtx = expand_normal (from);
5155 emit_library_call (memmove_libfunc, LCT_NORMAL,
5156 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
5157 XEXP (from_rtx, 0), Pmode,
5158 convert_to_mode (TYPE_MODE (sizetype),
5159 size, TYPE_UNSIGNED (sizetype)),
5160 TYPE_MODE (sizetype));
5162 preserve_temp_slots (to_rtx);
5163 pop_temp_slots ();
5164 return;
5167 /* Compute FROM and store the value in the rtx we got. */
5169 push_temp_slots ();
5170 result = store_expr_with_bounds (from, to_rtx, 0, nontemporal, to);
5171 preserve_temp_slots (result);
5172 pop_temp_slots ();
5173 return;
5176 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5177 succeeded, false otherwise. */
5179 bool
5180 emit_storent_insn (rtx to, rtx from)
5182 struct expand_operand ops[2];
5183 machine_mode mode = GET_MODE (to);
5184 enum insn_code code = optab_handler (storent_optab, mode);
5186 if (code == CODE_FOR_nothing)
5187 return false;
5189 create_fixed_operand (&ops[0], to);
5190 create_input_operand (&ops[1], from, mode);
5191 return maybe_expand_insn (code, 2, ops);
5194 /* Generate code for computing expression EXP,
5195 and storing the value into TARGET.
5197 If the mode is BLKmode then we may return TARGET itself.
5198 It turns out that in BLKmode it doesn't cause a problem.
5199 because C has no operators that could combine two different
5200 assignments into the same BLKmode object with different values
5201 with no sequence point. Will other languages need this to
5202 be more thorough?
5204 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5205 stack, and block moves may need to be treated specially.
5207 If NONTEMPORAL is true, try using a nontemporal store instruction.
5209 If BTARGET is not NULL then computed bounds of EXP are
5210 associated with BTARGET. */
5213 store_expr_with_bounds (tree exp, rtx target, int call_param_p,
5214 bool nontemporal, tree btarget)
5216 rtx temp;
5217 rtx alt_rtl = NULL_RTX;
5218 location_t loc = curr_insn_location ();
5220 if (VOID_TYPE_P (TREE_TYPE (exp)))
5222 /* C++ can generate ?: expressions with a throw expression in one
5223 branch and an rvalue in the other. Here, we resolve attempts to
5224 store the throw expression's nonexistent result. */
5225 gcc_assert (!call_param_p);
5226 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5227 return NULL_RTX;
5229 if (TREE_CODE (exp) == COMPOUND_EXPR)
5231 /* Perform first part of compound expression, then assign from second
5232 part. */
5233 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5234 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5235 return store_expr_with_bounds (TREE_OPERAND (exp, 1), target,
5236 call_param_p, nontemporal, btarget);
5238 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5240 /* For conditional expression, get safe form of the target. Then
5241 test the condition, doing the appropriate assignment on either
5242 side. This avoids the creation of unnecessary temporaries.
5243 For non-BLKmode, it is more efficient not to do this. */
5245 rtx_code_label *lab1 = gen_label_rtx (), *lab2 = gen_label_rtx ();
5247 do_pending_stack_adjust ();
5248 NO_DEFER_POP;
5249 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5250 store_expr_with_bounds (TREE_OPERAND (exp, 1), target, call_param_p,
5251 nontemporal, btarget);
5252 emit_jump_insn (targetm.gen_jump (lab2));
5253 emit_barrier ();
5254 emit_label (lab1);
5255 store_expr_with_bounds (TREE_OPERAND (exp, 2), target, call_param_p,
5256 nontemporal, btarget);
5257 emit_label (lab2);
5258 OK_DEFER_POP;
5260 return NULL_RTX;
5262 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5263 /* If this is a scalar in a register that is stored in a wider mode
5264 than the declared mode, compute the result into its declared mode
5265 and then convert to the wider mode. Our value is the computed
5266 expression. */
5268 rtx inner_target = 0;
5270 /* We can do the conversion inside EXP, which will often result
5271 in some optimizations. Do the conversion in two steps: first
5272 change the signedness, if needed, then the extend. But don't
5273 do this if the type of EXP is a subtype of something else
5274 since then the conversion might involve more than just
5275 converting modes. */
5276 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5277 && TREE_TYPE (TREE_TYPE (exp)) == 0
5278 && GET_MODE_PRECISION (GET_MODE (target))
5279 == TYPE_PRECISION (TREE_TYPE (exp)))
5281 if (!SUBREG_CHECK_PROMOTED_SIGN (target,
5282 TYPE_UNSIGNED (TREE_TYPE (exp))))
5284 /* Some types, e.g. Fortran's logical*4, won't have a signed
5285 version, so use the mode instead. */
5286 tree ntype
5287 = (signed_or_unsigned_type_for
5288 (SUBREG_PROMOTED_SIGN (target), TREE_TYPE (exp)));
5289 if (ntype == NULL)
5290 ntype = lang_hooks.types.type_for_mode
5291 (TYPE_MODE (TREE_TYPE (exp)),
5292 SUBREG_PROMOTED_SIGN (target));
5294 exp = fold_convert_loc (loc, ntype, exp);
5297 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5298 (GET_MODE (SUBREG_REG (target)),
5299 SUBREG_PROMOTED_SIGN (target)),
5300 exp);
5302 inner_target = SUBREG_REG (target);
5305 temp = expand_expr (exp, inner_target, VOIDmode,
5306 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5308 /* Handle bounds returned by call. */
5309 if (TREE_CODE (exp) == CALL_EXPR)
5311 rtx bounds;
5312 chkp_split_slot (temp, &temp, &bounds);
5313 if (bounds && btarget)
5315 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5316 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5317 chkp_set_rtl_bounds (btarget, tmp);
5321 /* If TEMP is a VOIDmode constant, use convert_modes to make
5322 sure that we properly convert it. */
5323 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5325 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5326 temp, SUBREG_PROMOTED_SIGN (target));
5327 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5328 GET_MODE (target), temp,
5329 SUBREG_PROMOTED_SIGN (target));
5332 convert_move (SUBREG_REG (target), temp,
5333 SUBREG_PROMOTED_SIGN (target));
5335 return NULL_RTX;
5337 else if ((TREE_CODE (exp) == STRING_CST
5338 || (TREE_CODE (exp) == MEM_REF
5339 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5340 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5341 == STRING_CST
5342 && integer_zerop (TREE_OPERAND (exp, 1))))
5343 && !nontemporal && !call_param_p
5344 && MEM_P (target))
5346 /* Optimize initialization of an array with a STRING_CST. */
5347 HOST_WIDE_INT exp_len, str_copy_len;
5348 rtx dest_mem;
5349 tree str = TREE_CODE (exp) == STRING_CST
5350 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5352 exp_len = int_expr_size (exp);
5353 if (exp_len <= 0)
5354 goto normal_expr;
5356 if (TREE_STRING_LENGTH (str) <= 0)
5357 goto normal_expr;
5359 str_copy_len = strlen (TREE_STRING_POINTER (str));
5360 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5361 goto normal_expr;
5363 str_copy_len = TREE_STRING_LENGTH (str);
5364 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5365 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5367 str_copy_len += STORE_MAX_PIECES - 1;
5368 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5370 str_copy_len = MIN (str_copy_len, exp_len);
5371 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5372 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5373 MEM_ALIGN (target), false))
5374 goto normal_expr;
5376 dest_mem = target;
5378 dest_mem = store_by_pieces (dest_mem,
5379 str_copy_len, builtin_strncpy_read_str,
5380 CONST_CAST (char *,
5381 TREE_STRING_POINTER (str)),
5382 MEM_ALIGN (target), false,
5383 exp_len > str_copy_len ? 1 : 0);
5384 if (exp_len > str_copy_len)
5385 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5386 GEN_INT (exp_len - str_copy_len),
5387 BLOCK_OP_NORMAL);
5388 return NULL_RTX;
5390 else
5392 rtx tmp_target;
5394 normal_expr:
5395 /* If we want to use a nontemporal store, force the value to
5396 register first. */
5397 tmp_target = nontemporal ? NULL_RTX : target;
5398 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5399 (call_param_p
5400 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5401 &alt_rtl, false);
5403 /* Handle bounds returned by call. */
5404 if (TREE_CODE (exp) == CALL_EXPR)
5406 rtx bounds;
5407 chkp_split_slot (temp, &temp, &bounds);
5408 if (bounds && btarget)
5410 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5411 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5412 chkp_set_rtl_bounds (btarget, tmp);
5417 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5418 the same as that of TARGET, adjust the constant. This is needed, for
5419 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5420 only a word-sized value. */
5421 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5422 && TREE_CODE (exp) != ERROR_MARK
5423 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5424 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5425 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5427 /* If value was not generated in the target, store it there.
5428 Convert the value to TARGET's type first if necessary and emit the
5429 pending incrementations that have been queued when expanding EXP.
5430 Note that we cannot emit the whole queue blindly because this will
5431 effectively disable the POST_INC optimization later.
5433 If TEMP and TARGET compare equal according to rtx_equal_p, but
5434 one or both of them are volatile memory refs, we have to distinguish
5435 two cases:
5436 - expand_expr has used TARGET. In this case, we must not generate
5437 another copy. This can be detected by TARGET being equal according
5438 to == .
5439 - expand_expr has not used TARGET - that means that the source just
5440 happens to have the same RTX form. Since temp will have been created
5441 by expand_expr, it will compare unequal according to == .
5442 We must generate a copy in this case, to reach the correct number
5443 of volatile memory references. */
5445 if ((! rtx_equal_p (temp, target)
5446 || (temp != target && (side_effects_p (temp)
5447 || side_effects_p (target))))
5448 && TREE_CODE (exp) != ERROR_MARK
5449 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5450 but TARGET is not valid memory reference, TEMP will differ
5451 from TARGET although it is really the same location. */
5452 && !(alt_rtl
5453 && rtx_equal_p (alt_rtl, target)
5454 && !side_effects_p (alt_rtl)
5455 && !side_effects_p (target))
5456 /* If there's nothing to copy, don't bother. Don't call
5457 expr_size unless necessary, because some front-ends (C++)
5458 expr_size-hook must not be given objects that are not
5459 supposed to be bit-copied or bit-initialized. */
5460 && expr_size (exp) != const0_rtx)
5462 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5464 if (GET_MODE (target) == BLKmode)
5466 /* Handle calls that return BLKmode values in registers. */
5467 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5468 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5469 else
5470 store_bit_field (target,
5471 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5472 0, 0, 0, GET_MODE (temp), temp);
5474 else
5475 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5478 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5480 /* Handle copying a string constant into an array. The string
5481 constant may be shorter than the array. So copy just the string's
5482 actual length, and clear the rest. First get the size of the data
5483 type of the string, which is actually the size of the target. */
5484 rtx size = expr_size (exp);
5486 if (CONST_INT_P (size)
5487 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5488 emit_block_move (target, temp, size,
5489 (call_param_p
5490 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5491 else
5493 machine_mode pointer_mode
5494 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5495 machine_mode address_mode = get_address_mode (target);
5497 /* Compute the size of the data to copy from the string. */
5498 tree copy_size
5499 = size_binop_loc (loc, MIN_EXPR,
5500 make_tree (sizetype, size),
5501 size_int (TREE_STRING_LENGTH (exp)));
5502 rtx copy_size_rtx
5503 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5504 (call_param_p
5505 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5506 rtx_code_label *label = 0;
5508 /* Copy that much. */
5509 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5510 TYPE_UNSIGNED (sizetype));
5511 emit_block_move (target, temp, copy_size_rtx,
5512 (call_param_p
5513 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5515 /* Figure out how much is left in TARGET that we have to clear.
5516 Do all calculations in pointer_mode. */
5517 if (CONST_INT_P (copy_size_rtx))
5519 size = plus_constant (address_mode, size,
5520 -INTVAL (copy_size_rtx));
5521 target = adjust_address (target, BLKmode,
5522 INTVAL (copy_size_rtx));
5524 else
5526 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5527 copy_size_rtx, NULL_RTX, 0,
5528 OPTAB_LIB_WIDEN);
5530 if (GET_MODE (copy_size_rtx) != address_mode)
5531 copy_size_rtx = convert_to_mode (address_mode,
5532 copy_size_rtx,
5533 TYPE_UNSIGNED (sizetype));
5535 target = offset_address (target, copy_size_rtx,
5536 highest_pow2_factor (copy_size));
5537 label = gen_label_rtx ();
5538 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5539 GET_MODE (size), 0, label);
5542 if (size != const0_rtx)
5543 clear_storage (target, size, BLOCK_OP_NORMAL);
5545 if (label)
5546 emit_label (label);
5549 /* Handle calls that return values in multiple non-contiguous locations.
5550 The Irix 6 ABI has examples of this. */
5551 else if (GET_CODE (target) == PARALLEL)
5553 if (GET_CODE (temp) == PARALLEL)
5554 emit_group_move (target, temp);
5555 else
5556 emit_group_load (target, temp, TREE_TYPE (exp),
5557 int_size_in_bytes (TREE_TYPE (exp)));
5559 else if (GET_CODE (temp) == PARALLEL)
5560 emit_group_store (target, temp, TREE_TYPE (exp),
5561 int_size_in_bytes (TREE_TYPE (exp)));
5562 else if (GET_MODE (temp) == BLKmode)
5563 emit_block_move (target, temp, expr_size (exp),
5564 (call_param_p
5565 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5566 /* If we emit a nontemporal store, there is nothing else to do. */
5567 else if (nontemporal && emit_storent_insn (target, temp))
5569 else
5571 temp = force_operand (temp, target);
5572 if (temp != target)
5573 emit_move_insn (target, temp);
5577 return NULL_RTX;
5580 /* Same as store_expr_with_bounds but ignoring bounds of EXP. */
5582 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
5584 return store_expr_with_bounds (exp, target, call_param_p, nontemporal, NULL);
5587 /* Return true if field F of structure TYPE is a flexible array. */
5589 static bool
5590 flexible_array_member_p (const_tree f, const_tree type)
5592 const_tree tf;
5594 tf = TREE_TYPE (f);
5595 return (DECL_CHAIN (f) == NULL
5596 && TREE_CODE (tf) == ARRAY_TYPE
5597 && TYPE_DOMAIN (tf)
5598 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5599 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5600 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5601 && int_size_in_bytes (type) >= 0);
5604 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5605 must have in order for it to completely initialize a value of type TYPE.
5606 Return -1 if the number isn't known.
5608 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5610 static HOST_WIDE_INT
5611 count_type_elements (const_tree type, bool for_ctor_p)
5613 switch (TREE_CODE (type))
5615 case ARRAY_TYPE:
5617 tree nelts;
5619 nelts = array_type_nelts (type);
5620 if (nelts && tree_fits_uhwi_p (nelts))
5622 unsigned HOST_WIDE_INT n;
5624 n = tree_to_uhwi (nelts) + 1;
5625 if (n == 0 || for_ctor_p)
5626 return n;
5627 else
5628 return n * count_type_elements (TREE_TYPE (type), false);
5630 return for_ctor_p ? -1 : 1;
5633 case RECORD_TYPE:
5635 unsigned HOST_WIDE_INT n;
5636 tree f;
5638 n = 0;
5639 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5640 if (TREE_CODE (f) == FIELD_DECL)
5642 if (!for_ctor_p)
5643 n += count_type_elements (TREE_TYPE (f), false);
5644 else if (!flexible_array_member_p (f, type))
5645 /* Don't count flexible arrays, which are not supposed
5646 to be initialized. */
5647 n += 1;
5650 return n;
5653 case UNION_TYPE:
5654 case QUAL_UNION_TYPE:
5656 tree f;
5657 HOST_WIDE_INT n, m;
5659 gcc_assert (!for_ctor_p);
5660 /* Estimate the number of scalars in each field and pick the
5661 maximum. Other estimates would do instead; the idea is simply
5662 to make sure that the estimate is not sensitive to the ordering
5663 of the fields. */
5664 n = 1;
5665 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5666 if (TREE_CODE (f) == FIELD_DECL)
5668 m = count_type_elements (TREE_TYPE (f), false);
5669 /* If the field doesn't span the whole union, add an extra
5670 scalar for the rest. */
5671 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5672 TYPE_SIZE (type)) != 1)
5673 m++;
5674 if (n < m)
5675 n = m;
5677 return n;
5680 case COMPLEX_TYPE:
5681 return 2;
5683 case VECTOR_TYPE:
5684 return TYPE_VECTOR_SUBPARTS (type);
5686 case INTEGER_TYPE:
5687 case REAL_TYPE:
5688 case FIXED_POINT_TYPE:
5689 case ENUMERAL_TYPE:
5690 case BOOLEAN_TYPE:
5691 case POINTER_TYPE:
5692 case OFFSET_TYPE:
5693 case REFERENCE_TYPE:
5694 case NULLPTR_TYPE:
5695 return 1;
5697 case ERROR_MARK:
5698 return 0;
5700 case VOID_TYPE:
5701 case METHOD_TYPE:
5702 case FUNCTION_TYPE:
5703 case LANG_TYPE:
5704 default:
5705 gcc_unreachable ();
5709 /* Helper for categorize_ctor_elements. Identical interface. */
5711 static bool
5712 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5713 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5715 unsigned HOST_WIDE_INT idx;
5716 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5717 tree value, purpose, elt_type;
5719 /* Whether CTOR is a valid constant initializer, in accordance with what
5720 initializer_constant_valid_p does. If inferred from the constructor
5721 elements, true until proven otherwise. */
5722 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5723 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5725 nz_elts = 0;
5726 init_elts = 0;
5727 num_fields = 0;
5728 elt_type = NULL_TREE;
5730 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5732 HOST_WIDE_INT mult = 1;
5734 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5736 tree lo_index = TREE_OPERAND (purpose, 0);
5737 tree hi_index = TREE_OPERAND (purpose, 1);
5739 if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
5740 mult = (tree_to_uhwi (hi_index)
5741 - tree_to_uhwi (lo_index) + 1);
5743 num_fields += mult;
5744 elt_type = TREE_TYPE (value);
5746 switch (TREE_CODE (value))
5748 case CONSTRUCTOR:
5750 HOST_WIDE_INT nz = 0, ic = 0;
5752 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5753 p_complete);
5755 nz_elts += mult * nz;
5756 init_elts += mult * ic;
5758 if (const_from_elts_p && const_p)
5759 const_p = const_elt_p;
5761 break;
5763 case INTEGER_CST:
5764 case REAL_CST:
5765 case FIXED_CST:
5766 if (!initializer_zerop (value))
5767 nz_elts += mult;
5768 init_elts += mult;
5769 break;
5771 case STRING_CST:
5772 nz_elts += mult * TREE_STRING_LENGTH (value);
5773 init_elts += mult * TREE_STRING_LENGTH (value);
5774 break;
5776 case COMPLEX_CST:
5777 if (!initializer_zerop (TREE_REALPART (value)))
5778 nz_elts += mult;
5779 if (!initializer_zerop (TREE_IMAGPART (value)))
5780 nz_elts += mult;
5781 init_elts += mult;
5782 break;
5784 case VECTOR_CST:
5786 unsigned i;
5787 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5789 tree v = VECTOR_CST_ELT (value, i);
5790 if (!initializer_zerop (v))
5791 nz_elts += mult;
5792 init_elts += mult;
5795 break;
5797 default:
5799 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5800 nz_elts += mult * tc;
5801 init_elts += mult * tc;
5803 if (const_from_elts_p && const_p)
5804 const_p = initializer_constant_valid_p (value, elt_type)
5805 != NULL_TREE;
5807 break;
5811 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5812 num_fields, elt_type))
5813 *p_complete = false;
5815 *p_nz_elts += nz_elts;
5816 *p_init_elts += init_elts;
5818 return const_p;
5821 /* Examine CTOR to discover:
5822 * how many scalar fields are set to nonzero values,
5823 and place it in *P_NZ_ELTS;
5824 * how many scalar fields in total are in CTOR,
5825 and place it in *P_ELT_COUNT.
5826 * whether the constructor is complete -- in the sense that every
5827 meaningful byte is explicitly given a value --
5828 and place it in *P_COMPLETE.
5830 Return whether or not CTOR is a valid static constant initializer, the same
5831 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5833 bool
5834 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5835 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5837 *p_nz_elts = 0;
5838 *p_init_elts = 0;
5839 *p_complete = true;
5841 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5844 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5845 of which had type LAST_TYPE. Each element was itself a complete
5846 initializer, in the sense that every meaningful byte was explicitly
5847 given a value. Return true if the same is true for the constructor
5848 as a whole. */
5850 bool
5851 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5852 const_tree last_type)
5854 if (TREE_CODE (type) == UNION_TYPE
5855 || TREE_CODE (type) == QUAL_UNION_TYPE)
5857 if (num_elts == 0)
5858 return false;
5860 gcc_assert (num_elts == 1 && last_type);
5862 /* ??? We could look at each element of the union, and find the
5863 largest element. Which would avoid comparing the size of the
5864 initialized element against any tail padding in the union.
5865 Doesn't seem worth the effort... */
5866 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5869 return count_type_elements (type, true) == num_elts;
5872 /* Return 1 if EXP contains mostly (3/4) zeros. */
5874 static int
5875 mostly_zeros_p (const_tree exp)
5877 if (TREE_CODE (exp) == CONSTRUCTOR)
5879 HOST_WIDE_INT nz_elts, init_elts;
5880 bool complete_p;
5882 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5883 return !complete_p || nz_elts < init_elts / 4;
5886 return initializer_zerop (exp);
5889 /* Return 1 if EXP contains all zeros. */
5891 static int
5892 all_zeros_p (const_tree exp)
5894 if (TREE_CODE (exp) == CONSTRUCTOR)
5896 HOST_WIDE_INT nz_elts, init_elts;
5897 bool complete_p;
5899 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5900 return nz_elts == 0;
5903 return initializer_zerop (exp);
5906 /* Helper function for store_constructor.
5907 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5908 CLEARED is as for store_constructor.
5909 ALIAS_SET is the alias set to use for any stores.
5911 This provides a recursive shortcut back to store_constructor when it isn't
5912 necessary to go through store_field. This is so that we can pass through
5913 the cleared field to let store_constructor know that we may not have to
5914 clear a substructure if the outer structure has already been cleared. */
5916 static void
5917 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5918 HOST_WIDE_INT bitpos, machine_mode mode,
5919 tree exp, int cleared, alias_set_type alias_set)
5921 if (TREE_CODE (exp) == CONSTRUCTOR
5922 /* We can only call store_constructor recursively if the size and
5923 bit position are on a byte boundary. */
5924 && bitpos % BITS_PER_UNIT == 0
5925 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5926 /* If we have a nonzero bitpos for a register target, then we just
5927 let store_field do the bitfield handling. This is unlikely to
5928 generate unnecessary clear instructions anyways. */
5929 && (bitpos == 0 || MEM_P (target)))
5931 if (MEM_P (target))
5932 target
5933 = adjust_address (target,
5934 GET_MODE (target) == BLKmode
5935 || 0 != (bitpos
5936 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5937 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5940 /* Update the alias set, if required. */
5941 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5942 && MEM_ALIAS_SET (target) != 0)
5944 target = copy_rtx (target);
5945 set_mem_alias_set (target, alias_set);
5948 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5950 else
5951 store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false);
5955 /* Returns the number of FIELD_DECLs in TYPE. */
5957 static int
5958 fields_length (const_tree type)
5960 tree t = TYPE_FIELDS (type);
5961 int count = 0;
5963 for (; t; t = DECL_CHAIN (t))
5964 if (TREE_CODE (t) == FIELD_DECL)
5965 ++count;
5967 return count;
5971 /* Store the value of constructor EXP into the rtx TARGET.
5972 TARGET is either a REG or a MEM; we know it cannot conflict, since
5973 safe_from_p has been called.
5974 CLEARED is true if TARGET is known to have been zero'd.
5975 SIZE is the number of bytes of TARGET we are allowed to modify: this
5976 may not be the same as the size of EXP if we are assigning to a field
5977 which has been packed to exclude padding bits. */
5979 static void
5980 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5982 tree type = TREE_TYPE (exp);
5983 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5985 switch (TREE_CODE (type))
5987 case RECORD_TYPE:
5988 case UNION_TYPE:
5989 case QUAL_UNION_TYPE:
5991 unsigned HOST_WIDE_INT idx;
5992 tree field, value;
5994 /* If size is zero or the target is already cleared, do nothing. */
5995 if (size == 0 || cleared)
5996 cleared = 1;
5997 /* We either clear the aggregate or indicate the value is dead. */
5998 else if ((TREE_CODE (type) == UNION_TYPE
5999 || TREE_CODE (type) == QUAL_UNION_TYPE)
6000 && ! CONSTRUCTOR_ELTS (exp))
6001 /* If the constructor is empty, clear the union. */
6003 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6004 cleared = 1;
6007 /* If we are building a static constructor into a register,
6008 set the initial value as zero so we can fold the value into
6009 a constant. But if more than one register is involved,
6010 this probably loses. */
6011 else if (REG_P (target) && TREE_STATIC (exp)
6012 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
6014 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6015 cleared = 1;
6018 /* If the constructor has fewer fields than the structure or
6019 if we are initializing the structure to mostly zeros, clear
6020 the whole structure first. Don't do this if TARGET is a
6021 register whose mode size isn't equal to SIZE since
6022 clear_storage can't handle this case. */
6023 else if (size > 0
6024 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
6025 != fields_length (type))
6026 || mostly_zeros_p (exp))
6027 && (!REG_P (target)
6028 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
6029 == size)))
6031 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6032 cleared = 1;
6035 if (REG_P (target) && !cleared)
6036 emit_clobber (target);
6038 /* Store each element of the constructor into the
6039 corresponding field of TARGET. */
6040 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
6042 machine_mode mode;
6043 HOST_WIDE_INT bitsize;
6044 HOST_WIDE_INT bitpos = 0;
6045 tree offset;
6046 rtx to_rtx = target;
6048 /* Just ignore missing fields. We cleared the whole
6049 structure, above, if any fields are missing. */
6050 if (field == 0)
6051 continue;
6053 if (cleared && initializer_zerop (value))
6054 continue;
6056 if (tree_fits_uhwi_p (DECL_SIZE (field)))
6057 bitsize = tree_to_uhwi (DECL_SIZE (field));
6058 else
6059 bitsize = -1;
6061 mode = DECL_MODE (field);
6062 if (DECL_BIT_FIELD (field))
6063 mode = VOIDmode;
6065 offset = DECL_FIELD_OFFSET (field);
6066 if (tree_fits_shwi_p (offset)
6067 && tree_fits_shwi_p (bit_position (field)))
6069 bitpos = int_bit_position (field);
6070 offset = 0;
6072 else
6073 bitpos = tree_to_shwi (DECL_FIELD_BIT_OFFSET (field));
6075 if (offset)
6077 machine_mode address_mode;
6078 rtx offset_rtx;
6080 offset
6081 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
6082 make_tree (TREE_TYPE (exp),
6083 target));
6085 offset_rtx = expand_normal (offset);
6086 gcc_assert (MEM_P (to_rtx));
6088 address_mode = get_address_mode (to_rtx);
6089 if (GET_MODE (offset_rtx) != address_mode)
6090 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
6092 to_rtx = offset_address (to_rtx, offset_rtx,
6093 highest_pow2_factor (offset));
6096 /* If this initializes a field that is smaller than a
6097 word, at the start of a word, try to widen it to a full
6098 word. This special case allows us to output C++ member
6099 function initializations in a form that the optimizers
6100 can understand. */
6101 if (WORD_REGISTER_OPERATIONS
6102 && REG_P (target)
6103 && bitsize < BITS_PER_WORD
6104 && bitpos % BITS_PER_WORD == 0
6105 && GET_MODE_CLASS (mode) == MODE_INT
6106 && TREE_CODE (value) == INTEGER_CST
6107 && exp_size >= 0
6108 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6110 tree type = TREE_TYPE (value);
6112 if (TYPE_PRECISION (type) < BITS_PER_WORD)
6114 type = lang_hooks.types.type_for_mode
6115 (word_mode, TYPE_UNSIGNED (type));
6116 value = fold_convert (type, value);
6119 if (BYTES_BIG_ENDIAN)
6120 value
6121 = fold_build2 (LSHIFT_EXPR, type, value,
6122 build_int_cst (type,
6123 BITS_PER_WORD - bitsize));
6124 bitsize = BITS_PER_WORD;
6125 mode = word_mode;
6128 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6129 && DECL_NONADDRESSABLE_P (field))
6131 to_rtx = copy_rtx (to_rtx);
6132 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6135 store_constructor_field (to_rtx, bitsize, bitpos, mode,
6136 value, cleared,
6137 get_alias_set (TREE_TYPE (field)));
6139 break;
6141 case ARRAY_TYPE:
6143 tree value, index;
6144 unsigned HOST_WIDE_INT i;
6145 int need_to_clear;
6146 tree domain;
6147 tree elttype = TREE_TYPE (type);
6148 int const_bounds_p;
6149 HOST_WIDE_INT minelt = 0;
6150 HOST_WIDE_INT maxelt = 0;
6152 domain = TYPE_DOMAIN (type);
6153 const_bounds_p = (TYPE_MIN_VALUE (domain)
6154 && TYPE_MAX_VALUE (domain)
6155 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6156 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6158 /* If we have constant bounds for the range of the type, get them. */
6159 if (const_bounds_p)
6161 minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6162 maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6165 /* If the constructor has fewer elements than the array, clear
6166 the whole array first. Similarly if this is static
6167 constructor of a non-BLKmode object. */
6168 if (cleared)
6169 need_to_clear = 0;
6170 else if (REG_P (target) && TREE_STATIC (exp))
6171 need_to_clear = 1;
6172 else
6174 unsigned HOST_WIDE_INT idx;
6175 tree index, value;
6176 HOST_WIDE_INT count = 0, zero_count = 0;
6177 need_to_clear = ! const_bounds_p;
6179 /* This loop is a more accurate version of the loop in
6180 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6181 is also needed to check for missing elements. */
6182 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6184 HOST_WIDE_INT this_node_count;
6186 if (need_to_clear)
6187 break;
6189 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6191 tree lo_index = TREE_OPERAND (index, 0);
6192 tree hi_index = TREE_OPERAND (index, 1);
6194 if (! tree_fits_uhwi_p (lo_index)
6195 || ! tree_fits_uhwi_p (hi_index))
6197 need_to_clear = 1;
6198 break;
6201 this_node_count = (tree_to_uhwi (hi_index)
6202 - tree_to_uhwi (lo_index) + 1);
6204 else
6205 this_node_count = 1;
6207 count += this_node_count;
6208 if (mostly_zeros_p (value))
6209 zero_count += this_node_count;
6212 /* Clear the entire array first if there are any missing
6213 elements, or if the incidence of zero elements is >=
6214 75%. */
6215 if (! need_to_clear
6216 && (count < maxelt - minelt + 1
6217 || 4 * zero_count >= 3 * count))
6218 need_to_clear = 1;
6221 if (need_to_clear && size > 0)
6223 if (REG_P (target))
6224 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6225 else
6226 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6227 cleared = 1;
6230 if (!cleared && REG_P (target))
6231 /* Inform later passes that the old value is dead. */
6232 emit_clobber (target);
6234 /* Store each element of the constructor into the
6235 corresponding element of TARGET, determined by counting the
6236 elements. */
6237 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6239 machine_mode mode;
6240 HOST_WIDE_INT bitsize;
6241 HOST_WIDE_INT bitpos;
6242 rtx xtarget = target;
6244 if (cleared && initializer_zerop (value))
6245 continue;
6247 mode = TYPE_MODE (elttype);
6248 if (mode == BLKmode)
6249 bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6250 ? tree_to_uhwi (TYPE_SIZE (elttype))
6251 : -1);
6252 else
6253 bitsize = GET_MODE_BITSIZE (mode);
6255 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6257 tree lo_index = TREE_OPERAND (index, 0);
6258 tree hi_index = TREE_OPERAND (index, 1);
6259 rtx index_r, pos_rtx;
6260 HOST_WIDE_INT lo, hi, count;
6261 tree position;
6263 /* If the range is constant and "small", unroll the loop. */
6264 if (const_bounds_p
6265 && tree_fits_shwi_p (lo_index)
6266 && tree_fits_shwi_p (hi_index)
6267 && (lo = tree_to_shwi (lo_index),
6268 hi = tree_to_shwi (hi_index),
6269 count = hi - lo + 1,
6270 (!MEM_P (target)
6271 || count <= 2
6272 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6273 && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6274 <= 40 * 8)))))
6276 lo -= minelt; hi -= minelt;
6277 for (; lo <= hi; lo++)
6279 bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6281 if (MEM_P (target)
6282 && !MEM_KEEP_ALIAS_SET_P (target)
6283 && TREE_CODE (type) == ARRAY_TYPE
6284 && TYPE_NONALIASED_COMPONENT (type))
6286 target = copy_rtx (target);
6287 MEM_KEEP_ALIAS_SET_P (target) = 1;
6290 store_constructor_field
6291 (target, bitsize, bitpos, mode, value, cleared,
6292 get_alias_set (elttype));
6295 else
6297 rtx_code_label *loop_start = gen_label_rtx ();
6298 rtx_code_label *loop_end = gen_label_rtx ();
6299 tree exit_cond;
6301 expand_normal (hi_index);
6303 index = build_decl (EXPR_LOCATION (exp),
6304 VAR_DECL, NULL_TREE, domain);
6305 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6306 SET_DECL_RTL (index, index_r);
6307 store_expr (lo_index, index_r, 0, false);
6309 /* Build the head of the loop. */
6310 do_pending_stack_adjust ();
6311 emit_label (loop_start);
6313 /* Assign value to element index. */
6314 position =
6315 fold_convert (ssizetype,
6316 fold_build2 (MINUS_EXPR,
6317 TREE_TYPE (index),
6318 index,
6319 TYPE_MIN_VALUE (domain)));
6321 position =
6322 size_binop (MULT_EXPR, position,
6323 fold_convert (ssizetype,
6324 TYPE_SIZE_UNIT (elttype)));
6326 pos_rtx = expand_normal (position);
6327 xtarget = offset_address (target, pos_rtx,
6328 highest_pow2_factor (position));
6329 xtarget = adjust_address (xtarget, mode, 0);
6330 if (TREE_CODE (value) == CONSTRUCTOR)
6331 store_constructor (value, xtarget, cleared,
6332 bitsize / BITS_PER_UNIT);
6333 else
6334 store_expr (value, xtarget, 0, false);
6336 /* Generate a conditional jump to exit the loop. */
6337 exit_cond = build2 (LT_EXPR, integer_type_node,
6338 index, hi_index);
6339 jumpif (exit_cond, loop_end, -1);
6341 /* Update the loop counter, and jump to the head of
6342 the loop. */
6343 expand_assignment (index,
6344 build2 (PLUS_EXPR, TREE_TYPE (index),
6345 index, integer_one_node),
6346 false);
6348 emit_jump (loop_start);
6350 /* Build the end of the loop. */
6351 emit_label (loop_end);
6354 else if ((index != 0 && ! tree_fits_shwi_p (index))
6355 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6357 tree position;
6359 if (index == 0)
6360 index = ssize_int (1);
6362 if (minelt)
6363 index = fold_convert (ssizetype,
6364 fold_build2 (MINUS_EXPR,
6365 TREE_TYPE (index),
6366 index,
6367 TYPE_MIN_VALUE (domain)));
6369 position =
6370 size_binop (MULT_EXPR, index,
6371 fold_convert (ssizetype,
6372 TYPE_SIZE_UNIT (elttype)));
6373 xtarget = offset_address (target,
6374 expand_normal (position),
6375 highest_pow2_factor (position));
6376 xtarget = adjust_address (xtarget, mode, 0);
6377 store_expr (value, xtarget, 0, false);
6379 else
6381 if (index != 0)
6382 bitpos = ((tree_to_shwi (index) - minelt)
6383 * tree_to_uhwi (TYPE_SIZE (elttype)));
6384 else
6385 bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6387 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6388 && TREE_CODE (type) == ARRAY_TYPE
6389 && TYPE_NONALIASED_COMPONENT (type))
6391 target = copy_rtx (target);
6392 MEM_KEEP_ALIAS_SET_P (target) = 1;
6394 store_constructor_field (target, bitsize, bitpos, mode, value,
6395 cleared, get_alias_set (elttype));
6398 break;
6401 case VECTOR_TYPE:
6403 unsigned HOST_WIDE_INT idx;
6404 constructor_elt *ce;
6405 int i;
6406 int need_to_clear;
6407 int icode = CODE_FOR_nothing;
6408 tree elttype = TREE_TYPE (type);
6409 int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6410 machine_mode eltmode = TYPE_MODE (elttype);
6411 HOST_WIDE_INT bitsize;
6412 HOST_WIDE_INT bitpos;
6413 rtvec vector = NULL;
6414 unsigned n_elts;
6415 alias_set_type alias;
6417 gcc_assert (eltmode != BLKmode);
6419 n_elts = TYPE_VECTOR_SUBPARTS (type);
6420 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6422 machine_mode mode = GET_MODE (target);
6424 icode = (int) optab_handler (vec_init_optab, mode);
6425 /* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
6426 if (icode != CODE_FOR_nothing)
6428 tree value;
6430 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6431 if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
6433 icode = CODE_FOR_nothing;
6434 break;
6437 if (icode != CODE_FOR_nothing)
6439 unsigned int i;
6441 vector = rtvec_alloc (n_elts);
6442 for (i = 0; i < n_elts; i++)
6443 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6447 /* If the constructor has fewer elements than the vector,
6448 clear the whole array first. Similarly if this is static
6449 constructor of a non-BLKmode object. */
6450 if (cleared)
6451 need_to_clear = 0;
6452 else if (REG_P (target) && TREE_STATIC (exp))
6453 need_to_clear = 1;
6454 else
6456 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6457 tree value;
6459 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6461 int n_elts_here = tree_to_uhwi
6462 (int_const_binop (TRUNC_DIV_EXPR,
6463 TYPE_SIZE (TREE_TYPE (value)),
6464 TYPE_SIZE (elttype)));
6466 count += n_elts_here;
6467 if (mostly_zeros_p (value))
6468 zero_count += n_elts_here;
6471 /* Clear the entire vector first if there are any missing elements,
6472 or if the incidence of zero elements is >= 75%. */
6473 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6476 if (need_to_clear && size > 0 && !vector)
6478 if (REG_P (target))
6479 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6480 else
6481 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6482 cleared = 1;
6485 /* Inform later passes that the old value is dead. */
6486 if (!cleared && !vector && REG_P (target))
6487 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6489 if (MEM_P (target))
6490 alias = MEM_ALIAS_SET (target);
6491 else
6492 alias = get_alias_set (elttype);
6494 /* Store each element of the constructor into the corresponding
6495 element of TARGET, determined by counting the elements. */
6496 for (idx = 0, i = 0;
6497 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6498 idx++, i += bitsize / elt_size)
6500 HOST_WIDE_INT eltpos;
6501 tree value = ce->value;
6503 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6504 if (cleared && initializer_zerop (value))
6505 continue;
6507 if (ce->index)
6508 eltpos = tree_to_uhwi (ce->index);
6509 else
6510 eltpos = i;
6512 if (vector)
6514 /* vec_init<mode> should not be used if there are VECTOR_TYPE
6515 elements. */
6516 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6517 RTVEC_ELT (vector, eltpos)
6518 = expand_normal (value);
6520 else
6522 machine_mode value_mode =
6523 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6524 ? TYPE_MODE (TREE_TYPE (value))
6525 : eltmode;
6526 bitpos = eltpos * elt_size;
6527 store_constructor_field (target, bitsize, bitpos, value_mode,
6528 value, cleared, alias);
6532 if (vector)
6533 emit_insn (GEN_FCN (icode)
6534 (target,
6535 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6536 break;
6539 default:
6540 gcc_unreachable ();
6544 /* Store the value of EXP (an expression tree)
6545 into a subfield of TARGET which has mode MODE and occupies
6546 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6547 If MODE is VOIDmode, it means that we are storing into a bit-field.
6549 BITREGION_START is bitpos of the first bitfield in this region.
6550 BITREGION_END is the bitpos of the ending bitfield in this region.
6551 These two fields are 0, if the C++ memory model does not apply,
6552 or we are not interested in keeping track of bitfield regions.
6554 Always return const0_rtx unless we have something particular to
6555 return.
6557 ALIAS_SET is the alias set for the destination. This value will
6558 (in general) be different from that for TARGET, since TARGET is a
6559 reference to the containing structure.
6561 If NONTEMPORAL is true, try generating a nontemporal store. */
6563 static rtx
6564 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6565 unsigned HOST_WIDE_INT bitregion_start,
6566 unsigned HOST_WIDE_INT bitregion_end,
6567 machine_mode mode, tree exp,
6568 alias_set_type alias_set, bool nontemporal)
6570 if (TREE_CODE (exp) == ERROR_MARK)
6571 return const0_rtx;
6573 /* If we have nothing to store, do nothing unless the expression has
6574 side-effects. */
6575 if (bitsize == 0)
6576 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6578 if (GET_CODE (target) == CONCAT)
6580 /* We're storing into a struct containing a single __complex. */
6582 gcc_assert (!bitpos);
6583 return store_expr (exp, target, 0, nontemporal);
6586 /* If the structure is in a register or if the component
6587 is a bit field, we cannot use addressing to access it.
6588 Use bit-field techniques or SUBREG to store in it. */
6590 if (mode == VOIDmode
6591 || (mode != BLKmode && ! direct_store[(int) mode]
6592 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6593 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6594 || REG_P (target)
6595 || GET_CODE (target) == SUBREG
6596 /* If the field isn't aligned enough to store as an ordinary memref,
6597 store it as a bit field. */
6598 || (mode != BLKmode
6599 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6600 || bitpos % GET_MODE_ALIGNMENT (mode))
6601 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6602 || (bitpos % BITS_PER_UNIT != 0)))
6603 || (bitsize >= 0 && mode != BLKmode
6604 && GET_MODE_BITSIZE (mode) > bitsize)
6605 /* If the RHS and field are a constant size and the size of the
6606 RHS isn't the same size as the bitfield, we must use bitfield
6607 operations. */
6608 || (bitsize >= 0
6609 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6610 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6611 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6612 decl we must use bitfield operations. */
6613 || (bitsize >= 0
6614 && TREE_CODE (exp) == MEM_REF
6615 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6616 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6617 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6618 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6620 rtx temp;
6621 gimple nop_def;
6623 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6624 implies a mask operation. If the precision is the same size as
6625 the field we're storing into, that mask is redundant. This is
6626 particularly common with bit field assignments generated by the
6627 C front end. */
6628 nop_def = get_def_for_expr (exp, NOP_EXPR);
6629 if (nop_def)
6631 tree type = TREE_TYPE (exp);
6632 if (INTEGRAL_TYPE_P (type)
6633 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6634 && bitsize == TYPE_PRECISION (type))
6636 tree op = gimple_assign_rhs1 (nop_def);
6637 type = TREE_TYPE (op);
6638 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6639 exp = op;
6643 temp = expand_normal (exp);
6645 /* If BITSIZE is narrower than the size of the type of EXP
6646 we will be narrowing TEMP. Normally, what's wanted are the
6647 low-order bits. However, if EXP's type is a record and this is
6648 big-endian machine, we want the upper BITSIZE bits. */
6649 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6650 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6651 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6652 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6653 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6654 NULL_RTX, 1);
6656 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6657 if (mode != VOIDmode && mode != BLKmode
6658 && mode != TYPE_MODE (TREE_TYPE (exp)))
6659 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6661 /* If TEMP is not a PARALLEL (see below) and its mode and that of TARGET
6662 are both BLKmode, both must be in memory and BITPOS must be aligned
6663 on a byte boundary. If so, we simply do a block copy. Likewise for
6664 a BLKmode-like TARGET. */
6665 if (GET_CODE (temp) != PARALLEL
6666 && GET_MODE (temp) == BLKmode
6667 && (GET_MODE (target) == BLKmode
6668 || (MEM_P (target)
6669 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6670 && (bitpos % BITS_PER_UNIT) == 0
6671 && (bitsize % BITS_PER_UNIT) == 0)))
6673 gcc_assert (MEM_P (target) && MEM_P (temp)
6674 && (bitpos % BITS_PER_UNIT) == 0);
6676 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6677 emit_block_move (target, temp,
6678 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6679 / BITS_PER_UNIT),
6680 BLOCK_OP_NORMAL);
6682 return const0_rtx;
6685 /* Handle calls that return values in multiple non-contiguous locations.
6686 The Irix 6 ABI has examples of this. */
6687 if (GET_CODE (temp) == PARALLEL)
6689 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6690 rtx temp_target;
6691 if (mode == BLKmode || mode == VOIDmode)
6692 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6693 temp_target = gen_reg_rtx (mode);
6694 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6695 temp = temp_target;
6697 else if (mode == BLKmode)
6699 /* Handle calls that return BLKmode values in registers. */
6700 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6702 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6703 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6704 temp = temp_target;
6706 else
6708 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6709 rtx temp_target;
6710 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6711 temp_target = gen_reg_rtx (mode);
6712 temp_target
6713 = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
6714 temp_target, mode, mode);
6715 temp = temp_target;
6719 /* Store the value in the bitfield. */
6720 store_bit_field (target, bitsize, bitpos,
6721 bitregion_start, bitregion_end,
6722 mode, temp);
6724 return const0_rtx;
6726 else
6728 /* Now build a reference to just the desired component. */
6729 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6731 if (to_rtx == target)
6732 to_rtx = copy_rtx (to_rtx);
6734 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6735 set_mem_alias_set (to_rtx, alias_set);
6737 return store_expr (exp, to_rtx, 0, nontemporal);
6741 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6742 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6743 codes and find the ultimate containing object, which we return.
6745 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6746 bit position, and *PUNSIGNEDP to the signedness of the field.
6747 If the position of the field is variable, we store a tree
6748 giving the variable offset (in units) in *POFFSET.
6749 This offset is in addition to the bit position.
6750 If the position is not variable, we store 0 in *POFFSET.
6752 If any of the extraction expressions is volatile,
6753 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6755 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6756 Otherwise, it is a mode that can be used to access the field.
6758 If the field describes a variable-sized object, *PMODE is set to
6759 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6760 this case, but the address of the object can be found.
6762 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6763 look through nodes that serve as markers of a greater alignment than
6764 the one that can be deduced from the expression. These nodes make it
6765 possible for front-ends to prevent temporaries from being created by
6766 the middle-end on alignment considerations. For that purpose, the
6767 normal operating mode at high-level is to always pass FALSE so that
6768 the ultimate containing object is really returned; moreover, the
6769 associated predicate handled_component_p will always return TRUE
6770 on these nodes, thus indicating that they are essentially handled
6771 by get_inner_reference. TRUE should only be passed when the caller
6772 is scanning the expression in order to build another representation
6773 and specifically knows how to handle these nodes; as such, this is
6774 the normal operating mode in the RTL expanders. */
6776 tree
6777 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6778 HOST_WIDE_INT *pbitpos, tree *poffset,
6779 machine_mode *pmode, int *punsignedp,
6780 int *pvolatilep, bool keep_aligning)
6782 tree size_tree = 0;
6783 machine_mode mode = VOIDmode;
6784 bool blkmode_bitfield = false;
6785 tree offset = size_zero_node;
6786 offset_int bit_offset = 0;
6788 /* First get the mode, signedness, and size. We do this from just the
6789 outermost expression. */
6790 *pbitsize = -1;
6791 if (TREE_CODE (exp) == COMPONENT_REF)
6793 tree field = TREE_OPERAND (exp, 1);
6794 size_tree = DECL_SIZE (field);
6795 if (flag_strict_volatile_bitfields > 0
6796 && TREE_THIS_VOLATILE (exp)
6797 && DECL_BIT_FIELD_TYPE (field)
6798 && DECL_MODE (field) != BLKmode)
6799 /* Volatile bitfields should be accessed in the mode of the
6800 field's type, not the mode computed based on the bit
6801 size. */
6802 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6803 else if (!DECL_BIT_FIELD (field))
6804 mode = DECL_MODE (field);
6805 else if (DECL_MODE (field) == BLKmode)
6806 blkmode_bitfield = true;
6808 *punsignedp = DECL_UNSIGNED (field);
6810 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6812 size_tree = TREE_OPERAND (exp, 1);
6813 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6814 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6816 /* For vector types, with the correct size of access, use the mode of
6817 inner type. */
6818 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6819 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6820 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6821 mode = TYPE_MODE (TREE_TYPE (exp));
6823 else
6825 mode = TYPE_MODE (TREE_TYPE (exp));
6826 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6828 if (mode == BLKmode)
6829 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6830 else
6831 *pbitsize = GET_MODE_BITSIZE (mode);
6834 if (size_tree != 0)
6836 if (! tree_fits_uhwi_p (size_tree))
6837 mode = BLKmode, *pbitsize = -1;
6838 else
6839 *pbitsize = tree_to_uhwi (size_tree);
6842 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6843 and find the ultimate containing object. */
6844 while (1)
6846 switch (TREE_CODE (exp))
6848 case BIT_FIELD_REF:
6849 bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
6850 break;
6852 case COMPONENT_REF:
6854 tree field = TREE_OPERAND (exp, 1);
6855 tree this_offset = component_ref_field_offset (exp);
6857 /* If this field hasn't been filled in yet, don't go past it.
6858 This should only happen when folding expressions made during
6859 type construction. */
6860 if (this_offset == 0)
6861 break;
6863 offset = size_binop (PLUS_EXPR, offset, this_offset);
6864 bit_offset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
6866 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6868 break;
6870 case ARRAY_REF:
6871 case ARRAY_RANGE_REF:
6873 tree index = TREE_OPERAND (exp, 1);
6874 tree low_bound = array_ref_low_bound (exp);
6875 tree unit_size = array_ref_element_size (exp);
6877 /* We assume all arrays have sizes that are a multiple of a byte.
6878 First subtract the lower bound, if any, in the type of the
6879 index, then convert to sizetype and multiply by the size of
6880 the array element. */
6881 if (! integer_zerop (low_bound))
6882 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6883 index, low_bound);
6885 offset = size_binop (PLUS_EXPR, offset,
6886 size_binop (MULT_EXPR,
6887 fold_convert (sizetype, index),
6888 unit_size));
6890 break;
6892 case REALPART_EXPR:
6893 break;
6895 case IMAGPART_EXPR:
6896 bit_offset += *pbitsize;
6897 break;
6899 case VIEW_CONVERT_EXPR:
6900 if (keep_aligning && STRICT_ALIGNMENT
6901 && (TYPE_ALIGN (TREE_TYPE (exp))
6902 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6903 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6904 < BIGGEST_ALIGNMENT)
6905 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6906 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6907 goto done;
6908 break;
6910 case MEM_REF:
6911 /* Hand back the decl for MEM[&decl, off]. */
6912 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6914 tree off = TREE_OPERAND (exp, 1);
6915 if (!integer_zerop (off))
6917 offset_int boff, coff = mem_ref_offset (exp);
6918 boff = wi::lshift (coff, LOG2_BITS_PER_UNIT);
6919 bit_offset += boff;
6921 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6923 goto done;
6925 default:
6926 goto done;
6929 /* If any reference in the chain is volatile, the effect is volatile. */
6930 if (TREE_THIS_VOLATILE (exp))
6931 *pvolatilep = 1;
6933 exp = TREE_OPERAND (exp, 0);
6935 done:
6937 /* If OFFSET is constant, see if we can return the whole thing as a
6938 constant bit position. Make sure to handle overflow during
6939 this conversion. */
6940 if (TREE_CODE (offset) == INTEGER_CST)
6942 offset_int tem = wi::sext (wi::to_offset (offset),
6943 TYPE_PRECISION (sizetype));
6944 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
6945 tem += bit_offset;
6946 if (wi::fits_shwi_p (tem))
6948 *pbitpos = tem.to_shwi ();
6949 *poffset = offset = NULL_TREE;
6953 /* Otherwise, split it up. */
6954 if (offset)
6956 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6957 if (wi::neg_p (bit_offset) || !wi::fits_shwi_p (bit_offset))
6959 offset_int mask = wi::mask <offset_int> (LOG2_BITS_PER_UNIT, false);
6960 offset_int tem = bit_offset.and_not (mask);
6961 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6962 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6963 bit_offset -= tem;
6964 tem = wi::arshift (tem, LOG2_BITS_PER_UNIT);
6965 offset = size_binop (PLUS_EXPR, offset,
6966 wide_int_to_tree (sizetype, tem));
6969 *pbitpos = bit_offset.to_shwi ();
6970 *poffset = offset;
6973 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6974 if (mode == VOIDmode
6975 && blkmode_bitfield
6976 && (*pbitpos % BITS_PER_UNIT) == 0
6977 && (*pbitsize % BITS_PER_UNIT) == 0)
6978 *pmode = BLKmode;
6979 else
6980 *pmode = mode;
6982 return exp;
6985 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6987 static unsigned HOST_WIDE_INT
6988 target_align (const_tree target)
6990 /* We might have a chain of nested references with intermediate misaligning
6991 bitfields components, so need to recurse to find out. */
6993 unsigned HOST_WIDE_INT this_align, outer_align;
6995 switch (TREE_CODE (target))
6997 case BIT_FIELD_REF:
6998 return 1;
7000 case COMPONENT_REF:
7001 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7002 outer_align = target_align (TREE_OPERAND (target, 0));
7003 return MIN (this_align, outer_align);
7005 case ARRAY_REF:
7006 case ARRAY_RANGE_REF:
7007 this_align = TYPE_ALIGN (TREE_TYPE (target));
7008 outer_align = target_align (TREE_OPERAND (target, 0));
7009 return MIN (this_align, outer_align);
7011 CASE_CONVERT:
7012 case NON_LVALUE_EXPR:
7013 case VIEW_CONVERT_EXPR:
7014 this_align = TYPE_ALIGN (TREE_TYPE (target));
7015 outer_align = target_align (TREE_OPERAND (target, 0));
7016 return MAX (this_align, outer_align);
7018 default:
7019 return TYPE_ALIGN (TREE_TYPE (target));
7024 /* Given an rtx VALUE that may contain additions and multiplications, return
7025 an equivalent value that just refers to a register, memory, or constant.
7026 This is done by generating instructions to perform the arithmetic and
7027 returning a pseudo-register containing the value.
7029 The returned value may be a REG, SUBREG, MEM or constant. */
7032 force_operand (rtx value, rtx target)
7034 rtx op1, op2;
7035 /* Use subtarget as the target for operand 0 of a binary operation. */
7036 rtx subtarget = get_subtarget (target);
7037 enum rtx_code code = GET_CODE (value);
7039 /* Check for subreg applied to an expression produced by loop optimizer. */
7040 if (code == SUBREG
7041 && !REG_P (SUBREG_REG (value))
7042 && !MEM_P (SUBREG_REG (value)))
7044 value
7045 = simplify_gen_subreg (GET_MODE (value),
7046 force_reg (GET_MODE (SUBREG_REG (value)),
7047 force_operand (SUBREG_REG (value),
7048 NULL_RTX)),
7049 GET_MODE (SUBREG_REG (value)),
7050 SUBREG_BYTE (value));
7051 code = GET_CODE (value);
7054 /* Check for a PIC address load. */
7055 if ((code == PLUS || code == MINUS)
7056 && XEXP (value, 0) == pic_offset_table_rtx
7057 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7058 || GET_CODE (XEXP (value, 1)) == LABEL_REF
7059 || GET_CODE (XEXP (value, 1)) == CONST))
7061 if (!subtarget)
7062 subtarget = gen_reg_rtx (GET_MODE (value));
7063 emit_move_insn (subtarget, value);
7064 return subtarget;
7067 if (ARITHMETIC_P (value))
7069 op2 = XEXP (value, 1);
7070 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7071 subtarget = 0;
7072 if (code == MINUS && CONST_INT_P (op2))
7074 code = PLUS;
7075 op2 = negate_rtx (GET_MODE (value), op2);
7078 /* Check for an addition with OP2 a constant integer and our first
7079 operand a PLUS of a virtual register and something else. In that
7080 case, we want to emit the sum of the virtual register and the
7081 constant first and then add the other value. This allows virtual
7082 register instantiation to simply modify the constant rather than
7083 creating another one around this addition. */
7084 if (code == PLUS && CONST_INT_P (op2)
7085 && GET_CODE (XEXP (value, 0)) == PLUS
7086 && REG_P (XEXP (XEXP (value, 0), 0))
7087 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7088 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7090 rtx temp = expand_simple_binop (GET_MODE (value), code,
7091 XEXP (XEXP (value, 0), 0), op2,
7092 subtarget, 0, OPTAB_LIB_WIDEN);
7093 return expand_simple_binop (GET_MODE (value), code, temp,
7094 force_operand (XEXP (XEXP (value,
7095 0), 1), 0),
7096 target, 0, OPTAB_LIB_WIDEN);
7099 op1 = force_operand (XEXP (value, 0), subtarget);
7100 op2 = force_operand (op2, NULL_RTX);
7101 switch (code)
7103 case MULT:
7104 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7105 case DIV:
7106 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7107 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7108 target, 1, OPTAB_LIB_WIDEN);
7109 else
7110 return expand_divmod (0,
7111 FLOAT_MODE_P (GET_MODE (value))
7112 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7113 GET_MODE (value), op1, op2, target, 0);
7114 case MOD:
7115 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7116 target, 0);
7117 case UDIV:
7118 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7119 target, 1);
7120 case UMOD:
7121 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7122 target, 1);
7123 case ASHIFTRT:
7124 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7125 target, 0, OPTAB_LIB_WIDEN);
7126 default:
7127 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7128 target, 1, OPTAB_LIB_WIDEN);
7131 if (UNARY_P (value))
7133 if (!target)
7134 target = gen_reg_rtx (GET_MODE (value));
7135 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7136 switch (code)
7138 case ZERO_EXTEND:
7139 case SIGN_EXTEND:
7140 case TRUNCATE:
7141 case FLOAT_EXTEND:
7142 case FLOAT_TRUNCATE:
7143 convert_move (target, op1, code == ZERO_EXTEND);
7144 return target;
7146 case FIX:
7147 case UNSIGNED_FIX:
7148 expand_fix (target, op1, code == UNSIGNED_FIX);
7149 return target;
7151 case FLOAT:
7152 case UNSIGNED_FLOAT:
7153 expand_float (target, op1, code == UNSIGNED_FLOAT);
7154 return target;
7156 default:
7157 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7161 #ifdef INSN_SCHEDULING
7162 /* On machines that have insn scheduling, we want all memory reference to be
7163 explicit, so we need to deal with such paradoxical SUBREGs. */
7164 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7165 value
7166 = simplify_gen_subreg (GET_MODE (value),
7167 force_reg (GET_MODE (SUBREG_REG (value)),
7168 force_operand (SUBREG_REG (value),
7169 NULL_RTX)),
7170 GET_MODE (SUBREG_REG (value)),
7171 SUBREG_BYTE (value));
7172 #endif
7174 return value;
7177 /* Subroutine of expand_expr: return nonzero iff there is no way that
7178 EXP can reference X, which is being modified. TOP_P is nonzero if this
7179 call is going to be used to determine whether we need a temporary
7180 for EXP, as opposed to a recursive call to this function.
7182 It is always safe for this routine to return zero since it merely
7183 searches for optimization opportunities. */
7186 safe_from_p (const_rtx x, tree exp, int top_p)
7188 rtx exp_rtl = 0;
7189 int i, nops;
7191 if (x == 0
7192 /* If EXP has varying size, we MUST use a target since we currently
7193 have no way of allocating temporaries of variable size
7194 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7195 So we assume here that something at a higher level has prevented a
7196 clash. This is somewhat bogus, but the best we can do. Only
7197 do this when X is BLKmode and when we are at the top level. */
7198 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7199 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7200 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7201 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7202 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7203 != INTEGER_CST)
7204 && GET_MODE (x) == BLKmode)
7205 /* If X is in the outgoing argument area, it is always safe. */
7206 || (MEM_P (x)
7207 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7208 || (GET_CODE (XEXP (x, 0)) == PLUS
7209 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7210 return 1;
7212 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7213 find the underlying pseudo. */
7214 if (GET_CODE (x) == SUBREG)
7216 x = SUBREG_REG (x);
7217 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7218 return 0;
7221 /* Now look at our tree code and possibly recurse. */
7222 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7224 case tcc_declaration:
7225 exp_rtl = DECL_RTL_IF_SET (exp);
7226 break;
7228 case tcc_constant:
7229 return 1;
7231 case tcc_exceptional:
7232 if (TREE_CODE (exp) == TREE_LIST)
7234 while (1)
7236 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7237 return 0;
7238 exp = TREE_CHAIN (exp);
7239 if (!exp)
7240 return 1;
7241 if (TREE_CODE (exp) != TREE_LIST)
7242 return safe_from_p (x, exp, 0);
7245 else if (TREE_CODE (exp) == CONSTRUCTOR)
7247 constructor_elt *ce;
7248 unsigned HOST_WIDE_INT idx;
7250 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7251 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7252 || !safe_from_p (x, ce->value, 0))
7253 return 0;
7254 return 1;
7256 else if (TREE_CODE (exp) == ERROR_MARK)
7257 return 1; /* An already-visited SAVE_EXPR? */
7258 else
7259 return 0;
7261 case tcc_statement:
7262 /* The only case we look at here is the DECL_INITIAL inside a
7263 DECL_EXPR. */
7264 return (TREE_CODE (exp) != DECL_EXPR
7265 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7266 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7267 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7269 case tcc_binary:
7270 case tcc_comparison:
7271 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7272 return 0;
7273 /* Fall through. */
7275 case tcc_unary:
7276 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7278 case tcc_expression:
7279 case tcc_reference:
7280 case tcc_vl_exp:
7281 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7282 the expression. If it is set, we conflict iff we are that rtx or
7283 both are in memory. Otherwise, we check all operands of the
7284 expression recursively. */
7286 switch (TREE_CODE (exp))
7288 case ADDR_EXPR:
7289 /* If the operand is static or we are static, we can't conflict.
7290 Likewise if we don't conflict with the operand at all. */
7291 if (staticp (TREE_OPERAND (exp, 0))
7292 || TREE_STATIC (exp)
7293 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7294 return 1;
7296 /* Otherwise, the only way this can conflict is if we are taking
7297 the address of a DECL a that address if part of X, which is
7298 very rare. */
7299 exp = TREE_OPERAND (exp, 0);
7300 if (DECL_P (exp))
7302 if (!DECL_RTL_SET_P (exp)
7303 || !MEM_P (DECL_RTL (exp)))
7304 return 0;
7305 else
7306 exp_rtl = XEXP (DECL_RTL (exp), 0);
7308 break;
7310 case MEM_REF:
7311 if (MEM_P (x)
7312 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7313 get_alias_set (exp)))
7314 return 0;
7315 break;
7317 case CALL_EXPR:
7318 /* Assume that the call will clobber all hard registers and
7319 all of memory. */
7320 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7321 || MEM_P (x))
7322 return 0;
7323 break;
7325 case WITH_CLEANUP_EXPR:
7326 case CLEANUP_POINT_EXPR:
7327 /* Lowered by gimplify.c. */
7328 gcc_unreachable ();
7330 case SAVE_EXPR:
7331 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7333 default:
7334 break;
7337 /* If we have an rtx, we do not need to scan our operands. */
7338 if (exp_rtl)
7339 break;
7341 nops = TREE_OPERAND_LENGTH (exp);
7342 for (i = 0; i < nops; i++)
7343 if (TREE_OPERAND (exp, i) != 0
7344 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7345 return 0;
7347 break;
7349 case tcc_type:
7350 /* Should never get a type here. */
7351 gcc_unreachable ();
7354 /* If we have an rtl, find any enclosed object. Then see if we conflict
7355 with it. */
7356 if (exp_rtl)
7358 if (GET_CODE (exp_rtl) == SUBREG)
7360 exp_rtl = SUBREG_REG (exp_rtl);
7361 if (REG_P (exp_rtl)
7362 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7363 return 0;
7366 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7367 are memory and they conflict. */
7368 return ! (rtx_equal_p (x, exp_rtl)
7369 || (MEM_P (x) && MEM_P (exp_rtl)
7370 && true_dependence (exp_rtl, VOIDmode, x)));
7373 /* If we reach here, it is safe. */
7374 return 1;
7378 /* Return the highest power of two that EXP is known to be a multiple of.
7379 This is used in updating alignment of MEMs in array references. */
7381 unsigned HOST_WIDE_INT
7382 highest_pow2_factor (const_tree exp)
7384 unsigned HOST_WIDE_INT ret;
7385 int trailing_zeros = tree_ctz (exp);
7386 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7387 return BIGGEST_ALIGNMENT;
7388 ret = (unsigned HOST_WIDE_INT) 1 << trailing_zeros;
7389 if (ret > BIGGEST_ALIGNMENT)
7390 return BIGGEST_ALIGNMENT;
7391 return ret;
7394 /* Similar, except that the alignment requirements of TARGET are
7395 taken into account. Assume it is at least as aligned as its
7396 type, unless it is a COMPONENT_REF in which case the layout of
7397 the structure gives the alignment. */
7399 static unsigned HOST_WIDE_INT
7400 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7402 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7403 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7405 return MAX (factor, talign);
7408 /* Convert the tree comparison code TCODE to the rtl one where the
7409 signedness is UNSIGNEDP. */
7411 static enum rtx_code
7412 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7414 enum rtx_code code;
7415 switch (tcode)
7417 case EQ_EXPR:
7418 code = EQ;
7419 break;
7420 case NE_EXPR:
7421 code = NE;
7422 break;
7423 case LT_EXPR:
7424 code = unsignedp ? LTU : LT;
7425 break;
7426 case LE_EXPR:
7427 code = unsignedp ? LEU : LE;
7428 break;
7429 case GT_EXPR:
7430 code = unsignedp ? GTU : GT;
7431 break;
7432 case GE_EXPR:
7433 code = unsignedp ? GEU : GE;
7434 break;
7435 case UNORDERED_EXPR:
7436 code = UNORDERED;
7437 break;
7438 case ORDERED_EXPR:
7439 code = ORDERED;
7440 break;
7441 case UNLT_EXPR:
7442 code = UNLT;
7443 break;
7444 case UNLE_EXPR:
7445 code = UNLE;
7446 break;
7447 case UNGT_EXPR:
7448 code = UNGT;
7449 break;
7450 case UNGE_EXPR:
7451 code = UNGE;
7452 break;
7453 case UNEQ_EXPR:
7454 code = UNEQ;
7455 break;
7456 case LTGT_EXPR:
7457 code = LTGT;
7458 break;
7460 default:
7461 gcc_unreachable ();
7463 return code;
7466 /* Subroutine of expand_expr. Expand the two operands of a binary
7467 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7468 The value may be stored in TARGET if TARGET is nonzero. The
7469 MODIFIER argument is as documented by expand_expr. */
7471 void
7472 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7473 enum expand_modifier modifier)
7475 if (! safe_from_p (target, exp1, 1))
7476 target = 0;
7477 if (operand_equal_p (exp0, exp1, 0))
7479 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7480 *op1 = copy_rtx (*op0);
7482 else
7484 /* If we need to preserve evaluation order, copy exp0 into its own
7485 temporary variable so that it can't be clobbered by exp1. */
7486 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7487 exp0 = save_expr (exp0);
7488 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7489 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7494 /* Return a MEM that contains constant EXP. DEFER is as for
7495 output_constant_def and MODIFIER is as for expand_expr. */
7497 static rtx
7498 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7500 rtx mem;
7502 mem = output_constant_def (exp, defer);
7503 if (modifier != EXPAND_INITIALIZER)
7504 mem = use_anchored_address (mem);
7505 return mem;
7508 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7509 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7511 static rtx
7512 expand_expr_addr_expr_1 (tree exp, rtx target, machine_mode tmode,
7513 enum expand_modifier modifier, addr_space_t as)
7515 rtx result, subtarget;
7516 tree inner, offset;
7517 HOST_WIDE_INT bitsize, bitpos;
7518 int volatilep, unsignedp;
7519 machine_mode mode1;
7521 /* If we are taking the address of a constant and are at the top level,
7522 we have to use output_constant_def since we can't call force_const_mem
7523 at top level. */
7524 /* ??? This should be considered a front-end bug. We should not be
7525 generating ADDR_EXPR of something that isn't an LVALUE. The only
7526 exception here is STRING_CST. */
7527 if (CONSTANT_CLASS_P (exp))
7529 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7530 if (modifier < EXPAND_SUM)
7531 result = force_operand (result, target);
7532 return result;
7535 /* Everything must be something allowed by is_gimple_addressable. */
7536 switch (TREE_CODE (exp))
7538 case INDIRECT_REF:
7539 /* This case will happen via recursion for &a->b. */
7540 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7542 case MEM_REF:
7544 tree tem = TREE_OPERAND (exp, 0);
7545 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7546 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7547 return expand_expr (tem, target, tmode, modifier);
7550 case CONST_DECL:
7551 /* Expand the initializer like constants above. */
7552 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7553 0, modifier), 0);
7554 if (modifier < EXPAND_SUM)
7555 result = force_operand (result, target);
7556 return result;
7558 case REALPART_EXPR:
7559 /* The real part of the complex number is always first, therefore
7560 the address is the same as the address of the parent object. */
7561 offset = 0;
7562 bitpos = 0;
7563 inner = TREE_OPERAND (exp, 0);
7564 break;
7566 case IMAGPART_EXPR:
7567 /* The imaginary part of the complex number is always second.
7568 The expression is therefore always offset by the size of the
7569 scalar type. */
7570 offset = 0;
7571 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7572 inner = TREE_OPERAND (exp, 0);
7573 break;
7575 case COMPOUND_LITERAL_EXPR:
7576 /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
7577 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
7578 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
7579 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
7580 the initializers aren't gimplified. */
7581 if (COMPOUND_LITERAL_EXPR_DECL (exp)
7582 && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp)))
7583 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7584 target, tmode, modifier, as);
7585 /* FALLTHRU */
7586 default:
7587 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7588 expand_expr, as that can have various side effects; LABEL_DECLs for
7589 example, may not have their DECL_RTL set yet. Expand the rtl of
7590 CONSTRUCTORs too, which should yield a memory reference for the
7591 constructor's contents. Assume language specific tree nodes can
7592 be expanded in some interesting way. */
7593 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7594 if (DECL_P (exp)
7595 || TREE_CODE (exp) == CONSTRUCTOR
7596 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7598 result = expand_expr (exp, target, tmode,
7599 modifier == EXPAND_INITIALIZER
7600 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7602 /* If the DECL isn't in memory, then the DECL wasn't properly
7603 marked TREE_ADDRESSABLE, which will be either a front-end
7604 or a tree optimizer bug. */
7606 if (TREE_ADDRESSABLE (exp)
7607 && ! MEM_P (result)
7608 && ! targetm.calls.allocate_stack_slots_for_args ())
7610 error ("local frame unavailable (naked function?)");
7611 return result;
7613 else
7614 gcc_assert (MEM_P (result));
7615 result = XEXP (result, 0);
7617 /* ??? Is this needed anymore? */
7618 if (DECL_P (exp))
7619 TREE_USED (exp) = 1;
7621 if (modifier != EXPAND_INITIALIZER
7622 && modifier != EXPAND_CONST_ADDRESS
7623 && modifier != EXPAND_SUM)
7624 result = force_operand (result, target);
7625 return result;
7628 /* Pass FALSE as the last argument to get_inner_reference although
7629 we are expanding to RTL. The rationale is that we know how to
7630 handle "aligning nodes" here: we can just bypass them because
7631 they won't change the final object whose address will be returned
7632 (they actually exist only for that purpose). */
7633 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7634 &mode1, &unsignedp, &volatilep, false);
7635 break;
7638 /* We must have made progress. */
7639 gcc_assert (inner != exp);
7641 subtarget = offset || bitpos ? NULL_RTX : target;
7642 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7643 inner alignment, force the inner to be sufficiently aligned. */
7644 if (CONSTANT_CLASS_P (inner)
7645 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7647 inner = copy_node (inner);
7648 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7649 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7650 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7652 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7654 if (offset)
7656 rtx tmp;
7658 if (modifier != EXPAND_NORMAL)
7659 result = force_operand (result, NULL);
7660 tmp = expand_expr (offset, NULL_RTX, tmode,
7661 modifier == EXPAND_INITIALIZER
7662 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7664 /* expand_expr is allowed to return an object in a mode other
7665 than TMODE. If it did, we need to convert. */
7666 if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
7667 tmp = convert_modes (tmode, GET_MODE (tmp),
7668 tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
7669 result = convert_memory_address_addr_space (tmode, result, as);
7670 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7672 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7673 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7674 else
7676 subtarget = bitpos ? NULL_RTX : target;
7677 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7678 1, OPTAB_LIB_WIDEN);
7682 if (bitpos)
7684 /* Someone beforehand should have rejected taking the address
7685 of such an object. */
7686 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7688 result = convert_memory_address_addr_space (tmode, result, as);
7689 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7690 if (modifier < EXPAND_SUM)
7691 result = force_operand (result, target);
7694 return result;
7697 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7698 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7700 static rtx
7701 expand_expr_addr_expr (tree exp, rtx target, machine_mode tmode,
7702 enum expand_modifier modifier)
7704 addr_space_t as = ADDR_SPACE_GENERIC;
7705 machine_mode address_mode = Pmode;
7706 machine_mode pointer_mode = ptr_mode;
7707 machine_mode rmode;
7708 rtx result;
7710 /* Target mode of VOIDmode says "whatever's natural". */
7711 if (tmode == VOIDmode)
7712 tmode = TYPE_MODE (TREE_TYPE (exp));
7714 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7716 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7717 address_mode = targetm.addr_space.address_mode (as);
7718 pointer_mode = targetm.addr_space.pointer_mode (as);
7721 /* We can get called with some Weird Things if the user does silliness
7722 like "(short) &a". In that case, convert_memory_address won't do
7723 the right thing, so ignore the given target mode. */
7724 if (tmode != address_mode && tmode != pointer_mode)
7725 tmode = address_mode;
7727 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7728 tmode, modifier, as);
7730 /* Despite expand_expr claims concerning ignoring TMODE when not
7731 strictly convenient, stuff breaks if we don't honor it. Note
7732 that combined with the above, we only do this for pointer modes. */
7733 rmode = GET_MODE (result);
7734 if (rmode == VOIDmode)
7735 rmode = tmode;
7736 if (rmode != tmode)
7737 result = convert_memory_address_addr_space (tmode, result, as);
7739 return result;
7742 /* Generate code for computing CONSTRUCTOR EXP.
7743 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7744 is TRUE, instead of creating a temporary variable in memory
7745 NULL is returned and the caller needs to handle it differently. */
7747 static rtx
7748 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7749 bool avoid_temp_mem)
7751 tree type = TREE_TYPE (exp);
7752 machine_mode mode = TYPE_MODE (type);
7754 /* Try to avoid creating a temporary at all. This is possible
7755 if all of the initializer is zero.
7756 FIXME: try to handle all [0..255] initializers we can handle
7757 with memset. */
7758 if (TREE_STATIC (exp)
7759 && !TREE_ADDRESSABLE (exp)
7760 && target != 0 && mode == BLKmode
7761 && all_zeros_p (exp))
7763 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7764 return target;
7767 /* All elts simple constants => refer to a constant in memory. But
7768 if this is a non-BLKmode mode, let it store a field at a time
7769 since that should make a CONST_INT, CONST_WIDE_INT or
7770 CONST_DOUBLE when we fold. Likewise, if we have a target we can
7771 use, it is best to store directly into the target unless the type
7772 is large enough that memcpy will be used. If we are making an
7773 initializer and all operands are constant, put it in memory as
7774 well.
7776 FIXME: Avoid trying to fill vector constructors piece-meal.
7777 Output them with output_constant_def below unless we're sure
7778 they're zeros. This should go away when vector initializers
7779 are treated like VECTOR_CST instead of arrays. */
7780 if ((TREE_STATIC (exp)
7781 && ((mode == BLKmode
7782 && ! (target != 0 && safe_from_p (target, exp, 1)))
7783 || TREE_ADDRESSABLE (exp)
7784 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
7785 && (! can_move_by_pieces
7786 (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
7787 TYPE_ALIGN (type)))
7788 && ! mostly_zeros_p (exp))))
7789 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7790 && TREE_CONSTANT (exp)))
7792 rtx constructor;
7794 if (avoid_temp_mem)
7795 return NULL_RTX;
7797 constructor = expand_expr_constant (exp, 1, modifier);
7799 if (modifier != EXPAND_CONST_ADDRESS
7800 && modifier != EXPAND_INITIALIZER
7801 && modifier != EXPAND_SUM)
7802 constructor = validize_mem (constructor);
7804 return constructor;
7807 /* Handle calls that pass values in multiple non-contiguous
7808 locations. The Irix 6 ABI has examples of this. */
7809 if (target == 0 || ! safe_from_p (target, exp, 1)
7810 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7812 if (avoid_temp_mem)
7813 return NULL_RTX;
7815 target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
7818 store_constructor (exp, target, 0, int_expr_size (exp));
7819 return target;
7823 /* expand_expr: generate code for computing expression EXP.
7824 An rtx for the computed value is returned. The value is never null.
7825 In the case of a void EXP, const0_rtx is returned.
7827 The value may be stored in TARGET if TARGET is nonzero.
7828 TARGET is just a suggestion; callers must assume that
7829 the rtx returned may not be the same as TARGET.
7831 If TARGET is CONST0_RTX, it means that the value will be ignored.
7833 If TMODE is not VOIDmode, it suggests generating the
7834 result in mode TMODE. But this is done only when convenient.
7835 Otherwise, TMODE is ignored and the value generated in its natural mode.
7836 TMODE is just a suggestion; callers must assume that
7837 the rtx returned may not have mode TMODE.
7839 Note that TARGET may have neither TMODE nor MODE. In that case, it
7840 probably will not be used.
7842 If MODIFIER is EXPAND_SUM then when EXP is an addition
7843 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7844 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7845 products as above, or REG or MEM, or constant.
7846 Ordinarily in such cases we would output mul or add instructions
7847 and then return a pseudo reg containing the sum.
7849 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7850 it also marks a label as absolutely required (it can't be dead).
7851 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7852 This is used for outputting expressions used in initializers.
7854 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7855 with a constant address even if that address is not normally legitimate.
7856 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7858 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7859 a call parameter. Such targets require special care as we haven't yet
7860 marked TARGET so that it's safe from being trashed by libcalls. We
7861 don't want to use TARGET for anything but the final result;
7862 Intermediate values must go elsewhere. Additionally, calls to
7863 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7865 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7866 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7867 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7868 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7869 recursively.
7871 If INNER_REFERENCE_P is true, we are expanding an inner reference.
7872 In this case, we don't adjust a returned MEM rtx that wouldn't be
7873 sufficiently aligned for its mode; instead, it's up to the caller
7874 to deal with it afterwards. This is used to make sure that unaligned
7875 base objects for which out-of-bounds accesses are supported, for
7876 example record types with trailing arrays, aren't realigned behind
7877 the back of the caller.
7878 The normal operating mode is to pass FALSE for this parameter. */
7881 expand_expr_real (tree exp, rtx target, machine_mode tmode,
7882 enum expand_modifier modifier, rtx *alt_rtl,
7883 bool inner_reference_p)
7885 rtx ret;
7887 /* Handle ERROR_MARK before anybody tries to access its type. */
7888 if (TREE_CODE (exp) == ERROR_MARK
7889 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7891 ret = CONST0_RTX (tmode);
7892 return ret ? ret : const0_rtx;
7895 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
7896 inner_reference_p);
7897 return ret;
7900 /* Try to expand the conditional expression which is represented by
7901 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If it succeeds
7902 return the rtl reg which represents the result. Otherwise return
7903 NULL_RTX. */
7905 static rtx
7906 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
7907 tree treeop1 ATTRIBUTE_UNUSED,
7908 tree treeop2 ATTRIBUTE_UNUSED)
7910 rtx insn;
7911 rtx op00, op01, op1, op2;
7912 enum rtx_code comparison_code;
7913 machine_mode comparison_mode;
7914 gimple srcstmt;
7915 rtx temp;
7916 tree type = TREE_TYPE (treeop1);
7917 int unsignedp = TYPE_UNSIGNED (type);
7918 machine_mode mode = TYPE_MODE (type);
7919 machine_mode orig_mode = mode;
7921 /* If we cannot do a conditional move on the mode, try doing it
7922 with the promoted mode. */
7923 if (!can_conditionally_move_p (mode))
7925 mode = promote_mode (type, mode, &unsignedp);
7926 if (!can_conditionally_move_p (mode))
7927 return NULL_RTX;
7928 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
7930 else
7931 temp = assign_temp (type, 0, 1);
7933 start_sequence ();
7934 expand_operands (treeop1, treeop2,
7935 temp, &op1, &op2, EXPAND_NORMAL);
7937 if (TREE_CODE (treeop0) == SSA_NAME
7938 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
7940 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
7941 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
7942 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
7943 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
7944 comparison_mode = TYPE_MODE (type);
7945 unsignedp = TYPE_UNSIGNED (type);
7946 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7948 else if (COMPARISON_CLASS_P (treeop0))
7950 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
7951 enum tree_code cmpcode = TREE_CODE (treeop0);
7952 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
7953 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
7954 unsignedp = TYPE_UNSIGNED (type);
7955 comparison_mode = TYPE_MODE (type);
7956 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7958 else
7960 op00 = expand_normal (treeop0);
7961 op01 = const0_rtx;
7962 comparison_code = NE;
7963 comparison_mode = GET_MODE (op00);
7964 if (comparison_mode == VOIDmode)
7965 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
7968 if (GET_MODE (op1) != mode)
7969 op1 = gen_lowpart (mode, op1);
7971 if (GET_MODE (op2) != mode)
7972 op2 = gen_lowpart (mode, op2);
7974 /* Try to emit the conditional move. */
7975 insn = emit_conditional_move (temp, comparison_code,
7976 op00, op01, comparison_mode,
7977 op1, op2, mode,
7978 unsignedp);
7980 /* If we could do the conditional move, emit the sequence,
7981 and return. */
7982 if (insn)
7984 rtx_insn *seq = get_insns ();
7985 end_sequence ();
7986 emit_insn (seq);
7987 return convert_modes (orig_mode, mode, temp, 0);
7990 /* Otherwise discard the sequence and fall back to code with
7991 branches. */
7992 end_sequence ();
7993 return NULL_RTX;
7997 expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
7998 enum expand_modifier modifier)
8000 rtx op0, op1, op2, temp;
8001 rtx_code_label *lab;
8002 tree type;
8003 int unsignedp;
8004 machine_mode mode;
8005 enum tree_code code = ops->code;
8006 optab this_optab;
8007 rtx subtarget, original_target;
8008 int ignore;
8009 bool reduce_bit_field;
8010 location_t loc = ops->location;
8011 tree treeop0, treeop1, treeop2;
8012 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8013 ? reduce_to_bit_field_precision ((expr), \
8014 target, \
8015 type) \
8016 : (expr))
8018 type = ops->type;
8019 mode = TYPE_MODE (type);
8020 unsignedp = TYPE_UNSIGNED (type);
8022 treeop0 = ops->op0;
8023 treeop1 = ops->op1;
8024 treeop2 = ops->op2;
8026 /* We should be called only on simple (binary or unary) expressions,
8027 exactly those that are valid in gimple expressions that aren't
8028 GIMPLE_SINGLE_RHS (or invalid). */
8029 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8030 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8031 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8033 ignore = (target == const0_rtx
8034 || ((CONVERT_EXPR_CODE_P (code)
8035 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8036 && TREE_CODE (type) == VOID_TYPE));
8038 /* We should be called only if we need the result. */
8039 gcc_assert (!ignore);
8041 /* An operation in what may be a bit-field type needs the
8042 result to be reduced to the precision of the bit-field type,
8043 which is narrower than that of the type's mode. */
8044 reduce_bit_field = (INTEGRAL_TYPE_P (type)
8045 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8047 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8048 target = 0;
8050 /* Use subtarget as the target for operand 0 of a binary operation. */
8051 subtarget = get_subtarget (target);
8052 original_target = target;
8054 switch (code)
8056 case NON_LVALUE_EXPR:
8057 case PAREN_EXPR:
8058 CASE_CONVERT:
8059 if (treeop0 == error_mark_node)
8060 return const0_rtx;
8062 if (TREE_CODE (type) == UNION_TYPE)
8064 tree valtype = TREE_TYPE (treeop0);
8066 /* If both input and output are BLKmode, this conversion isn't doing
8067 anything except possibly changing memory attribute. */
8068 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8070 rtx result = expand_expr (treeop0, target, tmode,
8071 modifier);
8073 result = copy_rtx (result);
8074 set_mem_attributes (result, type, 0);
8075 return result;
8078 if (target == 0)
8080 if (TYPE_MODE (type) != BLKmode)
8081 target = gen_reg_rtx (TYPE_MODE (type));
8082 else
8083 target = assign_temp (type, 1, 1);
8086 if (MEM_P (target))
8087 /* Store data into beginning of memory target. */
8088 store_expr (treeop0,
8089 adjust_address (target, TYPE_MODE (valtype), 0),
8090 modifier == EXPAND_STACK_PARM,
8091 false);
8093 else
8095 gcc_assert (REG_P (target));
8097 /* Store this field into a union of the proper type. */
8098 store_field (target,
8099 MIN ((int_size_in_bytes (TREE_TYPE
8100 (treeop0))
8101 * BITS_PER_UNIT),
8102 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8103 0, 0, 0, TYPE_MODE (valtype), treeop0, 0, false);
8106 /* Return the entire union. */
8107 return target;
8110 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8112 op0 = expand_expr (treeop0, target, VOIDmode,
8113 modifier);
8115 /* If the signedness of the conversion differs and OP0 is
8116 a promoted SUBREG, clear that indication since we now
8117 have to do the proper extension. */
8118 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8119 && GET_CODE (op0) == SUBREG)
8120 SUBREG_PROMOTED_VAR_P (op0) = 0;
8122 return REDUCE_BIT_FIELD (op0);
8125 op0 = expand_expr (treeop0, NULL_RTX, mode,
8126 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8127 if (GET_MODE (op0) == mode)
8130 /* If OP0 is a constant, just convert it into the proper mode. */
8131 else if (CONSTANT_P (op0))
8133 tree inner_type = TREE_TYPE (treeop0);
8134 machine_mode inner_mode = GET_MODE (op0);
8136 if (inner_mode == VOIDmode)
8137 inner_mode = TYPE_MODE (inner_type);
8139 if (modifier == EXPAND_INITIALIZER)
8140 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8141 subreg_lowpart_offset (mode,
8142 inner_mode));
8143 else
8144 op0= convert_modes (mode, inner_mode, op0,
8145 TYPE_UNSIGNED (inner_type));
8148 else if (modifier == EXPAND_INITIALIZER)
8149 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8151 else if (target == 0)
8152 op0 = convert_to_mode (mode, op0,
8153 TYPE_UNSIGNED (TREE_TYPE
8154 (treeop0)));
8155 else
8157 convert_move (target, op0,
8158 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8159 op0 = target;
8162 return REDUCE_BIT_FIELD (op0);
8164 case ADDR_SPACE_CONVERT_EXPR:
8166 tree treeop0_type = TREE_TYPE (treeop0);
8167 addr_space_t as_to;
8168 addr_space_t as_from;
8170 gcc_assert (POINTER_TYPE_P (type));
8171 gcc_assert (POINTER_TYPE_P (treeop0_type));
8173 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8174 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8176 /* Conversions between pointers to the same address space should
8177 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8178 gcc_assert (as_to != as_from);
8180 /* Ask target code to handle conversion between pointers
8181 to overlapping address spaces. */
8182 if (targetm.addr_space.subset_p (as_to, as_from)
8183 || targetm.addr_space.subset_p (as_from, as_to))
8185 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8186 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8187 gcc_assert (op0);
8188 return op0;
8191 /* For disjoint address spaces, converting anything but
8192 a null pointer invokes undefined behaviour. We simply
8193 always return a null pointer here. */
8194 return CONST0_RTX (mode);
8197 case POINTER_PLUS_EXPR:
8198 /* Even though the sizetype mode and the pointer's mode can be different
8199 expand is able to handle this correctly and get the correct result out
8200 of the PLUS_EXPR code. */
8201 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8202 if sizetype precision is smaller than pointer precision. */
8203 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8204 treeop1 = fold_convert_loc (loc, type,
8205 fold_convert_loc (loc, ssizetype,
8206 treeop1));
8207 /* If sizetype precision is larger than pointer precision, truncate the
8208 offset to have matching modes. */
8209 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8210 treeop1 = fold_convert_loc (loc, type, treeop1);
8212 case PLUS_EXPR:
8213 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8214 something else, make sure we add the register to the constant and
8215 then to the other thing. This case can occur during strength
8216 reduction and doing it this way will produce better code if the
8217 frame pointer or argument pointer is eliminated.
8219 fold-const.c will ensure that the constant is always in the inner
8220 PLUS_EXPR, so the only case we need to do anything about is if
8221 sp, ap, or fp is our second argument, in which case we must swap
8222 the innermost first argument and our second argument. */
8224 if (TREE_CODE (treeop0) == PLUS_EXPR
8225 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8226 && TREE_CODE (treeop1) == VAR_DECL
8227 && (DECL_RTL (treeop1) == frame_pointer_rtx
8228 || DECL_RTL (treeop1) == stack_pointer_rtx
8229 || DECL_RTL (treeop1) == arg_pointer_rtx))
8231 gcc_unreachable ();
8234 /* If the result is to be ptr_mode and we are adding an integer to
8235 something, we might be forming a constant. So try to use
8236 plus_constant. If it produces a sum and we can't accept it,
8237 use force_operand. This allows P = &ARR[const] to generate
8238 efficient code on machines where a SYMBOL_REF is not a valid
8239 address.
8241 If this is an EXPAND_SUM call, always return the sum. */
8242 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8243 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8245 if (modifier == EXPAND_STACK_PARM)
8246 target = 0;
8247 if (TREE_CODE (treeop0) == INTEGER_CST
8248 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8249 && TREE_CONSTANT (treeop1))
8251 rtx constant_part;
8252 HOST_WIDE_INT wc;
8253 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
8255 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8256 EXPAND_SUM);
8257 /* Use wi::shwi to ensure that the constant is
8258 truncated according to the mode of OP1, then sign extended
8259 to a HOST_WIDE_INT. Using the constant directly can result
8260 in non-canonical RTL in a 64x32 cross compile. */
8261 wc = TREE_INT_CST_LOW (treeop0);
8262 constant_part =
8263 immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8264 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8265 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8266 op1 = force_operand (op1, target);
8267 return REDUCE_BIT_FIELD (op1);
8270 else if (TREE_CODE (treeop1) == INTEGER_CST
8271 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8272 && TREE_CONSTANT (treeop0))
8274 rtx constant_part;
8275 HOST_WIDE_INT wc;
8276 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
8278 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8279 (modifier == EXPAND_INITIALIZER
8280 ? EXPAND_INITIALIZER : EXPAND_SUM));
8281 if (! CONSTANT_P (op0))
8283 op1 = expand_expr (treeop1, NULL_RTX,
8284 VOIDmode, modifier);
8285 /* Return a PLUS if modifier says it's OK. */
8286 if (modifier == EXPAND_SUM
8287 || modifier == EXPAND_INITIALIZER)
8288 return simplify_gen_binary (PLUS, mode, op0, op1);
8289 goto binop2;
8291 /* Use wi::shwi to ensure that the constant is
8292 truncated according to the mode of OP1, then sign extended
8293 to a HOST_WIDE_INT. Using the constant directly can result
8294 in non-canonical RTL in a 64x32 cross compile. */
8295 wc = TREE_INT_CST_LOW (treeop1);
8296 constant_part
8297 = immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8298 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8299 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8300 op0 = force_operand (op0, target);
8301 return REDUCE_BIT_FIELD (op0);
8305 /* Use TER to expand pointer addition of a negated value
8306 as pointer subtraction. */
8307 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8308 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8309 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8310 && TREE_CODE (treeop1) == SSA_NAME
8311 && TYPE_MODE (TREE_TYPE (treeop0))
8312 == TYPE_MODE (TREE_TYPE (treeop1)))
8314 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8315 if (def)
8317 treeop1 = gimple_assign_rhs1 (def);
8318 code = MINUS_EXPR;
8319 goto do_minus;
8323 /* No sense saving up arithmetic to be done
8324 if it's all in the wrong mode to form part of an address.
8325 And force_operand won't know whether to sign-extend or
8326 zero-extend. */
8327 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8328 || mode != ptr_mode)
8330 expand_operands (treeop0, treeop1,
8331 subtarget, &op0, &op1, EXPAND_NORMAL);
8332 if (op0 == const0_rtx)
8333 return op1;
8334 if (op1 == const0_rtx)
8335 return op0;
8336 goto binop2;
8339 expand_operands (treeop0, treeop1,
8340 subtarget, &op0, &op1, modifier);
8341 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8343 case MINUS_EXPR:
8344 do_minus:
8345 /* For initializers, we are allowed to return a MINUS of two
8346 symbolic constants. Here we handle all cases when both operands
8347 are constant. */
8348 /* Handle difference of two symbolic constants,
8349 for the sake of an initializer. */
8350 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8351 && really_constant_p (treeop0)
8352 && really_constant_p (treeop1))
8354 expand_operands (treeop0, treeop1,
8355 NULL_RTX, &op0, &op1, modifier);
8357 /* If the last operand is a CONST_INT, use plus_constant of
8358 the negated constant. Else make the MINUS. */
8359 if (CONST_INT_P (op1))
8360 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8361 -INTVAL (op1)));
8362 else
8363 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8366 /* No sense saving up arithmetic to be done
8367 if it's all in the wrong mode to form part of an address.
8368 And force_operand won't know whether to sign-extend or
8369 zero-extend. */
8370 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8371 || mode != ptr_mode)
8372 goto binop;
8374 expand_operands (treeop0, treeop1,
8375 subtarget, &op0, &op1, modifier);
8377 /* Convert A - const to A + (-const). */
8378 if (CONST_INT_P (op1))
8380 op1 = negate_rtx (mode, op1);
8381 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8384 goto binop2;
8386 case WIDEN_MULT_PLUS_EXPR:
8387 case WIDEN_MULT_MINUS_EXPR:
8388 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8389 op2 = expand_normal (treeop2);
8390 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8391 target, unsignedp);
8392 return target;
8394 case WIDEN_MULT_EXPR:
8395 /* If first operand is constant, swap them.
8396 Thus the following special case checks need only
8397 check the second operand. */
8398 if (TREE_CODE (treeop0) == INTEGER_CST)
8399 std::swap (treeop0, treeop1);
8401 /* First, check if we have a multiplication of one signed and one
8402 unsigned operand. */
8403 if (TREE_CODE (treeop1) != INTEGER_CST
8404 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8405 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8407 machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8408 this_optab = usmul_widen_optab;
8409 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8410 != CODE_FOR_nothing)
8412 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8413 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8414 EXPAND_NORMAL);
8415 else
8416 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8417 EXPAND_NORMAL);
8418 /* op0 and op1 might still be constant, despite the above
8419 != INTEGER_CST check. Handle it. */
8420 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8422 op0 = convert_modes (innermode, mode, op0, true);
8423 op1 = convert_modes (innermode, mode, op1, false);
8424 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8425 target, unsignedp));
8427 goto binop3;
8430 /* Check for a multiplication with matching signedness. */
8431 else if ((TREE_CODE (treeop1) == INTEGER_CST
8432 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8433 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8434 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8436 tree op0type = TREE_TYPE (treeop0);
8437 machine_mode innermode = TYPE_MODE (op0type);
8438 bool zextend_p = TYPE_UNSIGNED (op0type);
8439 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8440 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8442 if (TREE_CODE (treeop0) != INTEGER_CST)
8444 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8445 != CODE_FOR_nothing)
8447 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8448 EXPAND_NORMAL);
8449 /* op0 and op1 might still be constant, despite the above
8450 != INTEGER_CST check. Handle it. */
8451 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8453 widen_mult_const:
8454 op0 = convert_modes (innermode, mode, op0, zextend_p);
8456 = convert_modes (innermode, mode, op1,
8457 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8458 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8459 target,
8460 unsignedp));
8462 temp = expand_widening_mult (mode, op0, op1, target,
8463 unsignedp, this_optab);
8464 return REDUCE_BIT_FIELD (temp);
8466 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8467 != CODE_FOR_nothing
8468 && innermode == word_mode)
8470 rtx htem, hipart;
8471 op0 = expand_normal (treeop0);
8472 if (TREE_CODE (treeop1) == INTEGER_CST)
8473 op1 = convert_modes (innermode, mode,
8474 expand_normal (treeop1),
8475 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8476 else
8477 op1 = expand_normal (treeop1);
8478 /* op0 and op1 might still be constant, despite the above
8479 != INTEGER_CST check. Handle it. */
8480 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8481 goto widen_mult_const;
8482 temp = expand_binop (mode, other_optab, op0, op1, target,
8483 unsignedp, OPTAB_LIB_WIDEN);
8484 hipart = gen_highpart (innermode, temp);
8485 htem = expand_mult_highpart_adjust (innermode, hipart,
8486 op0, op1, hipart,
8487 zextend_p);
8488 if (htem != hipart)
8489 emit_move_insn (hipart, htem);
8490 return REDUCE_BIT_FIELD (temp);
8494 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8495 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8496 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8497 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8499 case FMA_EXPR:
8501 optab opt = fma_optab;
8502 gimple def0, def2;
8504 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8505 call. */
8506 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8508 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8509 tree call_expr;
8511 gcc_assert (fn != NULL_TREE);
8512 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8513 return expand_builtin (call_expr, target, subtarget, mode, false);
8516 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8517 /* The multiplication is commutative - look at its 2nd operand
8518 if the first isn't fed by a negate. */
8519 if (!def0)
8521 def0 = get_def_for_expr (treeop1, NEGATE_EXPR);
8522 /* Swap operands if the 2nd operand is fed by a negate. */
8523 if (def0)
8524 std::swap (treeop0, treeop1);
8526 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8528 op0 = op2 = NULL;
8530 if (def0 && def2
8531 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8533 opt = fnms_optab;
8534 op0 = expand_normal (gimple_assign_rhs1 (def0));
8535 op2 = expand_normal (gimple_assign_rhs1 (def2));
8537 else if (def0
8538 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8540 opt = fnma_optab;
8541 op0 = expand_normal (gimple_assign_rhs1 (def0));
8543 else if (def2
8544 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8546 opt = fms_optab;
8547 op2 = expand_normal (gimple_assign_rhs1 (def2));
8550 if (op0 == NULL)
8551 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8552 if (op2 == NULL)
8553 op2 = expand_normal (treeop2);
8554 op1 = expand_normal (treeop1);
8556 return expand_ternary_op (TYPE_MODE (type), opt,
8557 op0, op1, op2, target, 0);
8560 case MULT_EXPR:
8561 /* If this is a fixed-point operation, then we cannot use the code
8562 below because "expand_mult" doesn't support sat/no-sat fixed-point
8563 multiplications. */
8564 if (ALL_FIXED_POINT_MODE_P (mode))
8565 goto binop;
8567 /* If first operand is constant, swap them.
8568 Thus the following special case checks need only
8569 check the second operand. */
8570 if (TREE_CODE (treeop0) == INTEGER_CST)
8571 std::swap (treeop0, treeop1);
8573 /* Attempt to return something suitable for generating an
8574 indexed address, for machines that support that. */
8576 if (modifier == EXPAND_SUM && mode == ptr_mode
8577 && tree_fits_shwi_p (treeop1))
8579 tree exp1 = treeop1;
8581 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8582 EXPAND_SUM);
8584 if (!REG_P (op0))
8585 op0 = force_operand (op0, NULL_RTX);
8586 if (!REG_P (op0))
8587 op0 = copy_to_mode_reg (mode, op0);
8589 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8590 gen_int_mode (tree_to_shwi (exp1),
8591 TYPE_MODE (TREE_TYPE (exp1)))));
8594 if (modifier == EXPAND_STACK_PARM)
8595 target = 0;
8597 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8598 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8600 case TRUNC_DIV_EXPR:
8601 case FLOOR_DIV_EXPR:
8602 case CEIL_DIV_EXPR:
8603 case ROUND_DIV_EXPR:
8604 case EXACT_DIV_EXPR:
8605 /* If this is a fixed-point operation, then we cannot use the code
8606 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8607 divisions. */
8608 if (ALL_FIXED_POINT_MODE_P (mode))
8609 goto binop;
8611 if (modifier == EXPAND_STACK_PARM)
8612 target = 0;
8613 /* Possible optimization: compute the dividend with EXPAND_SUM
8614 then if the divisor is constant can optimize the case
8615 where some terms of the dividend have coeffs divisible by it. */
8616 expand_operands (treeop0, treeop1,
8617 subtarget, &op0, &op1, EXPAND_NORMAL);
8618 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8620 case RDIV_EXPR:
8621 goto binop;
8623 case MULT_HIGHPART_EXPR:
8624 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8625 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8626 gcc_assert (temp);
8627 return temp;
8629 case TRUNC_MOD_EXPR:
8630 case FLOOR_MOD_EXPR:
8631 case CEIL_MOD_EXPR:
8632 case ROUND_MOD_EXPR:
8633 if (modifier == EXPAND_STACK_PARM)
8634 target = 0;
8635 expand_operands (treeop0, treeop1,
8636 subtarget, &op0, &op1, EXPAND_NORMAL);
8637 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8639 case FIXED_CONVERT_EXPR:
8640 op0 = expand_normal (treeop0);
8641 if (target == 0 || modifier == EXPAND_STACK_PARM)
8642 target = gen_reg_rtx (mode);
8644 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8645 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8646 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8647 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8648 else
8649 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8650 return target;
8652 case FIX_TRUNC_EXPR:
8653 op0 = expand_normal (treeop0);
8654 if (target == 0 || modifier == EXPAND_STACK_PARM)
8655 target = gen_reg_rtx (mode);
8656 expand_fix (target, op0, unsignedp);
8657 return target;
8659 case FLOAT_EXPR:
8660 op0 = expand_normal (treeop0);
8661 if (target == 0 || modifier == EXPAND_STACK_PARM)
8662 target = gen_reg_rtx (mode);
8663 /* expand_float can't figure out what to do if FROM has VOIDmode.
8664 So give it the correct mode. With -O, cse will optimize this. */
8665 if (GET_MODE (op0) == VOIDmode)
8666 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8667 op0);
8668 expand_float (target, op0,
8669 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8670 return target;
8672 case NEGATE_EXPR:
8673 op0 = expand_expr (treeop0, subtarget,
8674 VOIDmode, EXPAND_NORMAL);
8675 if (modifier == EXPAND_STACK_PARM)
8676 target = 0;
8677 temp = expand_unop (mode,
8678 optab_for_tree_code (NEGATE_EXPR, type,
8679 optab_default),
8680 op0, target, 0);
8681 gcc_assert (temp);
8682 return REDUCE_BIT_FIELD (temp);
8684 case ABS_EXPR:
8685 op0 = expand_expr (treeop0, subtarget,
8686 VOIDmode, EXPAND_NORMAL);
8687 if (modifier == EXPAND_STACK_PARM)
8688 target = 0;
8690 /* ABS_EXPR is not valid for complex arguments. */
8691 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8692 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8694 /* Unsigned abs is simply the operand. Testing here means we don't
8695 risk generating incorrect code below. */
8696 if (TYPE_UNSIGNED (type))
8697 return op0;
8699 return expand_abs (mode, op0, target, unsignedp,
8700 safe_from_p (target, treeop0, 1));
8702 case MAX_EXPR:
8703 case MIN_EXPR:
8704 target = original_target;
8705 if (target == 0
8706 || modifier == EXPAND_STACK_PARM
8707 || (MEM_P (target) && MEM_VOLATILE_P (target))
8708 || GET_MODE (target) != mode
8709 || (REG_P (target)
8710 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8711 target = gen_reg_rtx (mode);
8712 expand_operands (treeop0, treeop1,
8713 target, &op0, &op1, EXPAND_NORMAL);
8715 /* First try to do it with a special MIN or MAX instruction.
8716 If that does not win, use a conditional jump to select the proper
8717 value. */
8718 this_optab = optab_for_tree_code (code, type, optab_default);
8719 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8720 OPTAB_WIDEN);
8721 if (temp != 0)
8722 return temp;
8724 /* At this point, a MEM target is no longer useful; we will get better
8725 code without it. */
8727 if (! REG_P (target))
8728 target = gen_reg_rtx (mode);
8730 /* If op1 was placed in target, swap op0 and op1. */
8731 if (target != op0 && target == op1)
8732 std::swap (op0, op1);
8734 /* We generate better code and avoid problems with op1 mentioning
8735 target by forcing op1 into a pseudo if it isn't a constant. */
8736 if (! CONSTANT_P (op1))
8737 op1 = force_reg (mode, op1);
8740 enum rtx_code comparison_code;
8741 rtx cmpop1 = op1;
8743 if (code == MAX_EXPR)
8744 comparison_code = unsignedp ? GEU : GE;
8745 else
8746 comparison_code = unsignedp ? LEU : LE;
8748 /* Canonicalize to comparisons against 0. */
8749 if (op1 == const1_rtx)
8751 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8752 or (a != 0 ? a : 1) for unsigned.
8753 For MIN we are safe converting (a <= 1 ? a : 1)
8754 into (a <= 0 ? a : 1) */
8755 cmpop1 = const0_rtx;
8756 if (code == MAX_EXPR)
8757 comparison_code = unsignedp ? NE : GT;
8759 if (op1 == constm1_rtx && !unsignedp)
8761 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8762 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8763 cmpop1 = const0_rtx;
8764 if (code == MIN_EXPR)
8765 comparison_code = LT;
8768 /* Use a conditional move if possible. */
8769 if (can_conditionally_move_p (mode))
8771 rtx insn;
8773 start_sequence ();
8775 /* Try to emit the conditional move. */
8776 insn = emit_conditional_move (target, comparison_code,
8777 op0, cmpop1, mode,
8778 op0, op1, mode,
8779 unsignedp);
8781 /* If we could do the conditional move, emit the sequence,
8782 and return. */
8783 if (insn)
8785 rtx_insn *seq = get_insns ();
8786 end_sequence ();
8787 emit_insn (seq);
8788 return target;
8791 /* Otherwise discard the sequence and fall back to code with
8792 branches. */
8793 end_sequence ();
8796 if (target != op0)
8797 emit_move_insn (target, op0);
8799 lab = gen_label_rtx ();
8800 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8801 unsignedp, mode, NULL_RTX, NULL, lab,
8802 -1);
8804 emit_move_insn (target, op1);
8805 emit_label (lab);
8806 return target;
8808 case BIT_NOT_EXPR:
8809 op0 = expand_expr (treeop0, subtarget,
8810 VOIDmode, EXPAND_NORMAL);
8811 if (modifier == EXPAND_STACK_PARM)
8812 target = 0;
8813 /* In case we have to reduce the result to bitfield precision
8814 for unsigned bitfield expand this as XOR with a proper constant
8815 instead. */
8816 if (reduce_bit_field && TYPE_UNSIGNED (type))
8818 wide_int mask = wi::mask (TYPE_PRECISION (type),
8819 false, GET_MODE_PRECISION (mode));
8821 temp = expand_binop (mode, xor_optab, op0,
8822 immed_wide_int_const (mask, mode),
8823 target, 1, OPTAB_LIB_WIDEN);
8825 else
8826 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8827 gcc_assert (temp);
8828 return temp;
8830 /* ??? Can optimize bitwise operations with one arg constant.
8831 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8832 and (a bitwise1 b) bitwise2 b (etc)
8833 but that is probably not worth while. */
8835 case BIT_AND_EXPR:
8836 case BIT_IOR_EXPR:
8837 case BIT_XOR_EXPR:
8838 goto binop;
8840 case LROTATE_EXPR:
8841 case RROTATE_EXPR:
8842 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8843 || (GET_MODE_PRECISION (TYPE_MODE (type))
8844 == TYPE_PRECISION (type)));
8845 /* fall through */
8847 case LSHIFT_EXPR:
8848 case RSHIFT_EXPR:
8849 /* If this is a fixed-point operation, then we cannot use the code
8850 below because "expand_shift" doesn't support sat/no-sat fixed-point
8851 shifts. */
8852 if (ALL_FIXED_POINT_MODE_P (mode))
8853 goto binop;
8855 if (! safe_from_p (subtarget, treeop1, 1))
8856 subtarget = 0;
8857 if (modifier == EXPAND_STACK_PARM)
8858 target = 0;
8859 op0 = expand_expr (treeop0, subtarget,
8860 VOIDmode, EXPAND_NORMAL);
8861 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8862 unsignedp);
8863 if (code == LSHIFT_EXPR)
8864 temp = REDUCE_BIT_FIELD (temp);
8865 return temp;
8867 /* Could determine the answer when only additive constants differ. Also,
8868 the addition of one can be handled by changing the condition. */
8869 case LT_EXPR:
8870 case LE_EXPR:
8871 case GT_EXPR:
8872 case GE_EXPR:
8873 case EQ_EXPR:
8874 case NE_EXPR:
8875 case UNORDERED_EXPR:
8876 case ORDERED_EXPR:
8877 case UNLT_EXPR:
8878 case UNLE_EXPR:
8879 case UNGT_EXPR:
8880 case UNGE_EXPR:
8881 case UNEQ_EXPR:
8882 case LTGT_EXPR:
8884 temp = do_store_flag (ops,
8885 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8886 tmode != VOIDmode ? tmode : mode);
8887 if (temp)
8888 return temp;
8890 /* Use a compare and a jump for BLKmode comparisons, or for function
8891 type comparisons is have_canonicalize_funcptr_for_compare. */
8893 if ((target == 0
8894 || modifier == EXPAND_STACK_PARM
8895 || ! safe_from_p (target, treeop0, 1)
8896 || ! safe_from_p (target, treeop1, 1)
8897 /* Make sure we don't have a hard reg (such as function's return
8898 value) live across basic blocks, if not optimizing. */
8899 || (!optimize && REG_P (target)
8900 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8901 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8903 emit_move_insn (target, const0_rtx);
8905 rtx_code_label *lab1 = gen_label_rtx ();
8906 jumpifnot_1 (code, treeop0, treeop1, lab1, -1);
8908 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8909 emit_move_insn (target, constm1_rtx);
8910 else
8911 emit_move_insn (target, const1_rtx);
8913 emit_label (lab1);
8914 return target;
8916 case COMPLEX_EXPR:
8917 /* Get the rtx code of the operands. */
8918 op0 = expand_normal (treeop0);
8919 op1 = expand_normal (treeop1);
8921 if (!target)
8922 target = gen_reg_rtx (TYPE_MODE (type));
8923 else
8924 /* If target overlaps with op1, then either we need to force
8925 op1 into a pseudo (if target also overlaps with op0),
8926 or write the complex parts in reverse order. */
8927 switch (GET_CODE (target))
8929 case CONCAT:
8930 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
8932 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
8934 complex_expr_force_op1:
8935 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
8936 emit_move_insn (temp, op1);
8937 op1 = temp;
8938 break;
8940 complex_expr_swap_order:
8941 /* Move the imaginary (op1) and real (op0) parts to their
8942 location. */
8943 write_complex_part (target, op1, true);
8944 write_complex_part (target, op0, false);
8946 return target;
8948 break;
8949 case MEM:
8950 temp = adjust_address_nv (target,
8951 GET_MODE_INNER (GET_MODE (target)), 0);
8952 if (reg_overlap_mentioned_p (temp, op1))
8954 machine_mode imode = GET_MODE_INNER (GET_MODE (target));
8955 temp = adjust_address_nv (target, imode,
8956 GET_MODE_SIZE (imode));
8957 if (reg_overlap_mentioned_p (temp, op0))
8958 goto complex_expr_force_op1;
8959 goto complex_expr_swap_order;
8961 break;
8962 default:
8963 if (reg_overlap_mentioned_p (target, op1))
8965 if (reg_overlap_mentioned_p (target, op0))
8966 goto complex_expr_force_op1;
8967 goto complex_expr_swap_order;
8969 break;
8972 /* Move the real (op0) and imaginary (op1) parts to their location. */
8973 write_complex_part (target, op0, false);
8974 write_complex_part (target, op1, true);
8976 return target;
8978 case WIDEN_SUM_EXPR:
8980 tree oprnd0 = treeop0;
8981 tree oprnd1 = treeop1;
8983 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8984 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8985 target, unsignedp);
8986 return target;
8989 case REDUC_MAX_EXPR:
8990 case REDUC_MIN_EXPR:
8991 case REDUC_PLUS_EXPR:
8993 op0 = expand_normal (treeop0);
8994 this_optab = optab_for_tree_code (code, type, optab_default);
8995 machine_mode vec_mode = TYPE_MODE (TREE_TYPE (treeop0));
8997 if (optab_handler (this_optab, vec_mode) != CODE_FOR_nothing)
8999 struct expand_operand ops[2];
9000 enum insn_code icode = optab_handler (this_optab, vec_mode);
9002 create_output_operand (&ops[0], target, mode);
9003 create_input_operand (&ops[1], op0, vec_mode);
9004 if (maybe_expand_insn (icode, 2, ops))
9006 target = ops[0].value;
9007 if (GET_MODE (target) != mode)
9008 return gen_lowpart (tmode, target);
9009 return target;
9012 /* Fall back to optab with vector result, and then extract scalar. */
9013 this_optab = scalar_reduc_to_vector (this_optab, type);
9014 temp = expand_unop (vec_mode, this_optab, op0, NULL_RTX, unsignedp);
9015 gcc_assert (temp);
9016 /* The tree code produces a scalar result, but (somewhat by convention)
9017 the optab produces a vector with the result in element 0 if
9018 little-endian, or element N-1 if big-endian. So pull the scalar
9019 result out of that element. */
9020 int index = BYTES_BIG_ENDIAN ? GET_MODE_NUNITS (vec_mode) - 1 : 0;
9021 int bitsize = GET_MODE_BITSIZE (GET_MODE_INNER (vec_mode));
9022 temp = extract_bit_field (temp, bitsize, bitsize * index, unsignedp,
9023 target, mode, mode);
9024 gcc_assert (temp);
9025 return temp;
9028 case VEC_UNPACK_HI_EXPR:
9029 case VEC_UNPACK_LO_EXPR:
9031 op0 = expand_normal (treeop0);
9032 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9033 target, unsignedp);
9034 gcc_assert (temp);
9035 return temp;
9038 case VEC_UNPACK_FLOAT_HI_EXPR:
9039 case VEC_UNPACK_FLOAT_LO_EXPR:
9041 op0 = expand_normal (treeop0);
9042 /* The signedness is determined from input operand. */
9043 temp = expand_widen_pattern_expr
9044 (ops, op0, NULL_RTX, NULL_RTX,
9045 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9047 gcc_assert (temp);
9048 return temp;
9051 case VEC_WIDEN_MULT_HI_EXPR:
9052 case VEC_WIDEN_MULT_LO_EXPR:
9053 case VEC_WIDEN_MULT_EVEN_EXPR:
9054 case VEC_WIDEN_MULT_ODD_EXPR:
9055 case VEC_WIDEN_LSHIFT_HI_EXPR:
9056 case VEC_WIDEN_LSHIFT_LO_EXPR:
9057 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9058 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9059 target, unsignedp);
9060 gcc_assert (target);
9061 return target;
9063 case VEC_PACK_TRUNC_EXPR:
9064 case VEC_PACK_SAT_EXPR:
9065 case VEC_PACK_FIX_TRUNC_EXPR:
9066 mode = TYPE_MODE (TREE_TYPE (treeop0));
9067 goto binop;
9069 case VEC_PERM_EXPR:
9070 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9071 op2 = expand_normal (treeop2);
9073 /* Careful here: if the target doesn't support integral vector modes,
9074 a constant selection vector could wind up smooshed into a normal
9075 integral constant. */
9076 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9078 tree sel_type = TREE_TYPE (treeop2);
9079 machine_mode vmode
9080 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9081 TYPE_VECTOR_SUBPARTS (sel_type));
9082 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9083 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9084 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9086 else
9087 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9089 temp = expand_vec_perm (mode, op0, op1, op2, target);
9090 gcc_assert (temp);
9091 return temp;
9093 case DOT_PROD_EXPR:
9095 tree oprnd0 = treeop0;
9096 tree oprnd1 = treeop1;
9097 tree oprnd2 = treeop2;
9098 rtx op2;
9100 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9101 op2 = expand_normal (oprnd2);
9102 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9103 target, unsignedp);
9104 return target;
9107 case SAD_EXPR:
9109 tree oprnd0 = treeop0;
9110 tree oprnd1 = treeop1;
9111 tree oprnd2 = treeop2;
9112 rtx op2;
9114 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9115 op2 = expand_normal (oprnd2);
9116 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9117 target, unsignedp);
9118 return target;
9121 case REALIGN_LOAD_EXPR:
9123 tree oprnd0 = treeop0;
9124 tree oprnd1 = treeop1;
9125 tree oprnd2 = treeop2;
9126 rtx op2;
9128 this_optab = optab_for_tree_code (code, type, optab_default);
9129 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9130 op2 = expand_normal (oprnd2);
9131 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9132 target, unsignedp);
9133 gcc_assert (temp);
9134 return temp;
9137 case COND_EXPR:
9139 /* A COND_EXPR with its type being VOID_TYPE represents a
9140 conditional jump and is handled in
9141 expand_gimple_cond_expr. */
9142 gcc_assert (!VOID_TYPE_P (type));
9144 /* Note that COND_EXPRs whose type is a structure or union
9145 are required to be constructed to contain assignments of
9146 a temporary variable, so that we can evaluate them here
9147 for side effect only. If type is void, we must do likewise. */
9149 gcc_assert (!TREE_ADDRESSABLE (type)
9150 && !ignore
9151 && TREE_TYPE (treeop1) != void_type_node
9152 && TREE_TYPE (treeop2) != void_type_node);
9154 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9155 if (temp)
9156 return temp;
9158 /* If we are not to produce a result, we have no target. Otherwise,
9159 if a target was specified use it; it will not be used as an
9160 intermediate target unless it is safe. If no target, use a
9161 temporary. */
9163 if (modifier != EXPAND_STACK_PARM
9164 && original_target
9165 && safe_from_p (original_target, treeop0, 1)
9166 && GET_MODE (original_target) == mode
9167 && !MEM_P (original_target))
9168 temp = original_target;
9169 else
9170 temp = assign_temp (type, 0, 1);
9172 do_pending_stack_adjust ();
9173 NO_DEFER_POP;
9174 rtx_code_label *lab0 = gen_label_rtx ();
9175 rtx_code_label *lab1 = gen_label_rtx ();
9176 jumpifnot (treeop0, lab0, -1);
9177 store_expr (treeop1, temp,
9178 modifier == EXPAND_STACK_PARM,
9179 false);
9181 emit_jump_insn (targetm.gen_jump (lab1));
9182 emit_barrier ();
9183 emit_label (lab0);
9184 store_expr (treeop2, temp,
9185 modifier == EXPAND_STACK_PARM,
9186 false);
9188 emit_label (lab1);
9189 OK_DEFER_POP;
9190 return temp;
9193 case VEC_COND_EXPR:
9194 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9195 return target;
9197 default:
9198 gcc_unreachable ();
9201 /* Here to do an ordinary binary operator. */
9202 binop:
9203 expand_operands (treeop0, treeop1,
9204 subtarget, &op0, &op1, EXPAND_NORMAL);
9205 binop2:
9206 this_optab = optab_for_tree_code (code, type, optab_default);
9207 binop3:
9208 if (modifier == EXPAND_STACK_PARM)
9209 target = 0;
9210 temp = expand_binop (mode, this_optab, op0, op1, target,
9211 unsignedp, OPTAB_LIB_WIDEN);
9212 gcc_assert (temp);
9213 /* Bitwise operations do not need bitfield reduction as we expect their
9214 operands being properly truncated. */
9215 if (code == BIT_XOR_EXPR
9216 || code == BIT_AND_EXPR
9217 || code == BIT_IOR_EXPR)
9218 return temp;
9219 return REDUCE_BIT_FIELD (temp);
9221 #undef REDUCE_BIT_FIELD
9224 /* Return TRUE if expression STMT is suitable for replacement.
9225 Never consider memory loads as replaceable, because those don't ever lead
9226 into constant expressions. */
9228 static bool
9229 stmt_is_replaceable_p (gimple stmt)
9231 if (ssa_is_replaceable_p (stmt))
9233 /* Don't move around loads. */
9234 if (!gimple_assign_single_p (stmt)
9235 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9236 return true;
9238 return false;
9242 expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
9243 enum expand_modifier modifier, rtx *alt_rtl,
9244 bool inner_reference_p)
9246 rtx op0, op1, temp, decl_rtl;
9247 tree type;
9248 int unsignedp;
9249 machine_mode mode;
9250 enum tree_code code = TREE_CODE (exp);
9251 rtx subtarget, original_target;
9252 int ignore;
9253 tree context;
9254 bool reduce_bit_field;
9255 location_t loc = EXPR_LOCATION (exp);
9256 struct separate_ops ops;
9257 tree treeop0, treeop1, treeop2;
9258 tree ssa_name = NULL_TREE;
9259 gimple g;
9261 type = TREE_TYPE (exp);
9262 mode = TYPE_MODE (type);
9263 unsignedp = TYPE_UNSIGNED (type);
9265 treeop0 = treeop1 = treeop2 = NULL_TREE;
9266 if (!VL_EXP_CLASS_P (exp))
9267 switch (TREE_CODE_LENGTH (code))
9269 default:
9270 case 3: treeop2 = TREE_OPERAND (exp, 2);
9271 case 2: treeop1 = TREE_OPERAND (exp, 1);
9272 case 1: treeop0 = TREE_OPERAND (exp, 0);
9273 case 0: break;
9275 ops.code = code;
9276 ops.type = type;
9277 ops.op0 = treeop0;
9278 ops.op1 = treeop1;
9279 ops.op2 = treeop2;
9280 ops.location = loc;
9282 ignore = (target == const0_rtx
9283 || ((CONVERT_EXPR_CODE_P (code)
9284 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9285 && TREE_CODE (type) == VOID_TYPE));
9287 /* An operation in what may be a bit-field type needs the
9288 result to be reduced to the precision of the bit-field type,
9289 which is narrower than that of the type's mode. */
9290 reduce_bit_field = (!ignore
9291 && INTEGRAL_TYPE_P (type)
9292 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9294 /* If we are going to ignore this result, we need only do something
9295 if there is a side-effect somewhere in the expression. If there
9296 is, short-circuit the most common cases here. Note that we must
9297 not call expand_expr with anything but const0_rtx in case this
9298 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9300 if (ignore)
9302 if (! TREE_SIDE_EFFECTS (exp))
9303 return const0_rtx;
9305 /* Ensure we reference a volatile object even if value is ignored, but
9306 don't do this if all we are doing is taking its address. */
9307 if (TREE_THIS_VOLATILE (exp)
9308 && TREE_CODE (exp) != FUNCTION_DECL
9309 && mode != VOIDmode && mode != BLKmode
9310 && modifier != EXPAND_CONST_ADDRESS)
9312 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9313 if (MEM_P (temp))
9314 copy_to_reg (temp);
9315 return const0_rtx;
9318 if (TREE_CODE_CLASS (code) == tcc_unary
9319 || code == BIT_FIELD_REF
9320 || code == COMPONENT_REF
9321 || code == INDIRECT_REF)
9322 return expand_expr (treeop0, const0_rtx, VOIDmode,
9323 modifier);
9325 else if (TREE_CODE_CLASS (code) == tcc_binary
9326 || TREE_CODE_CLASS (code) == tcc_comparison
9327 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9329 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9330 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9331 return const0_rtx;
9334 target = 0;
9337 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9338 target = 0;
9340 /* Use subtarget as the target for operand 0 of a binary operation. */
9341 subtarget = get_subtarget (target);
9342 original_target = target;
9344 switch (code)
9346 case LABEL_DECL:
9348 tree function = decl_function_context (exp);
9350 temp = label_rtx (exp);
9351 temp = gen_rtx_LABEL_REF (Pmode, temp);
9353 if (function != current_function_decl
9354 && function != 0)
9355 LABEL_REF_NONLOCAL_P (temp) = 1;
9357 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9358 return temp;
9361 case SSA_NAME:
9362 /* ??? ivopts calls expander, without any preparation from
9363 out-of-ssa. So fake instructions as if this was an access to the
9364 base variable. This unnecessarily allocates a pseudo, see how we can
9365 reuse it, if partition base vars have it set already. */
9366 if (!currently_expanding_to_rtl)
9368 tree var = SSA_NAME_VAR (exp);
9369 if (var && DECL_RTL_SET_P (var))
9370 return DECL_RTL (var);
9371 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9372 LAST_VIRTUAL_REGISTER + 1);
9375 g = get_gimple_for_ssa_name (exp);
9376 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9377 if (g == NULL
9378 && modifier == EXPAND_INITIALIZER
9379 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9380 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9381 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9382 g = SSA_NAME_DEF_STMT (exp);
9383 if (g)
9385 rtx r;
9386 ops.code = gimple_assign_rhs_code (g);
9387 switch (get_gimple_rhs_class (ops.code))
9389 case GIMPLE_TERNARY_RHS:
9390 ops.op2 = gimple_assign_rhs3 (g);
9391 /* Fallthru */
9392 case GIMPLE_BINARY_RHS:
9393 ops.op1 = gimple_assign_rhs2 (g);
9395 /* Try to expand conditonal compare. */
9396 if (targetm.gen_ccmp_first)
9398 gcc_checking_assert (targetm.gen_ccmp_next != NULL);
9399 r = expand_ccmp_expr (g);
9400 if (r)
9401 break;
9403 /* Fallthru */
9404 case GIMPLE_UNARY_RHS:
9405 ops.op0 = gimple_assign_rhs1 (g);
9406 ops.type = TREE_TYPE (gimple_assign_lhs (g));
9407 ops.location = gimple_location (g);
9408 r = expand_expr_real_2 (&ops, target, tmode, modifier);
9409 break;
9410 case GIMPLE_SINGLE_RHS:
9412 location_t saved_loc = curr_insn_location ();
9413 set_curr_insn_location (gimple_location (g));
9414 r = expand_expr_real (gimple_assign_rhs1 (g), target,
9415 tmode, modifier, NULL, inner_reference_p);
9416 set_curr_insn_location (saved_loc);
9417 break;
9419 default:
9420 gcc_unreachable ();
9422 if (REG_P (r) && !REG_EXPR (r))
9423 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9424 return r;
9427 ssa_name = exp;
9428 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9429 exp = SSA_NAME_VAR (ssa_name);
9430 goto expand_decl_rtl;
9432 case PARM_DECL:
9433 case VAR_DECL:
9434 /* If a static var's type was incomplete when the decl was written,
9435 but the type is complete now, lay out the decl now. */
9436 if (DECL_SIZE (exp) == 0
9437 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9438 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9439 layout_decl (exp, 0);
9441 /* ... fall through ... */
9443 case FUNCTION_DECL:
9444 case RESULT_DECL:
9445 decl_rtl = DECL_RTL (exp);
9446 expand_decl_rtl:
9447 gcc_assert (decl_rtl);
9448 decl_rtl = copy_rtx (decl_rtl);
9449 /* Record writes to register variables. */
9450 if (modifier == EXPAND_WRITE
9451 && REG_P (decl_rtl)
9452 && HARD_REGISTER_P (decl_rtl))
9453 add_to_hard_reg_set (&crtl->asm_clobbers,
9454 GET_MODE (decl_rtl), REGNO (decl_rtl));
9456 /* Ensure variable marked as used even if it doesn't go through
9457 a parser. If it hasn't be used yet, write out an external
9458 definition. */
9459 TREE_USED (exp) = 1;
9461 /* Show we haven't gotten RTL for this yet. */
9462 temp = 0;
9464 /* Variables inherited from containing functions should have
9465 been lowered by this point. */
9466 context = decl_function_context (exp);
9467 gcc_assert (SCOPE_FILE_SCOPE_P (context)
9468 || context == current_function_decl
9469 || TREE_STATIC (exp)
9470 || DECL_EXTERNAL (exp)
9471 /* ??? C++ creates functions that are not TREE_STATIC. */
9472 || TREE_CODE (exp) == FUNCTION_DECL);
9474 /* This is the case of an array whose size is to be determined
9475 from its initializer, while the initializer is still being parsed.
9476 ??? We aren't parsing while expanding anymore. */
9478 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9479 temp = validize_mem (decl_rtl);
9481 /* If DECL_RTL is memory, we are in the normal case and the
9482 address is not valid, get the address into a register. */
9484 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9486 if (alt_rtl)
9487 *alt_rtl = decl_rtl;
9488 decl_rtl = use_anchored_address (decl_rtl);
9489 if (modifier != EXPAND_CONST_ADDRESS
9490 && modifier != EXPAND_SUM
9491 && !memory_address_addr_space_p (DECL_MODE (exp),
9492 XEXP (decl_rtl, 0),
9493 MEM_ADDR_SPACE (decl_rtl)))
9494 temp = replace_equiv_address (decl_rtl,
9495 copy_rtx (XEXP (decl_rtl, 0)));
9498 /* If we got something, return it. But first, set the alignment
9499 if the address is a register. */
9500 if (temp != 0)
9502 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9503 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9505 return temp;
9508 /* If the mode of DECL_RTL does not match that of the decl,
9509 there are two cases: we are dealing with a BLKmode value
9510 that is returned in a register, or we are dealing with
9511 a promoted value. In the latter case, return a SUBREG
9512 of the wanted mode, but mark it so that we know that it
9513 was already extended. */
9514 if (REG_P (decl_rtl)
9515 && DECL_MODE (exp) != BLKmode
9516 && GET_MODE (decl_rtl) != DECL_MODE (exp))
9518 machine_mode pmode;
9520 /* Get the signedness to be used for this variable. Ensure we get
9521 the same mode we got when the variable was declared. */
9522 if (code == SSA_NAME
9523 && (g = SSA_NAME_DEF_STMT (ssa_name))
9524 && gimple_code (g) == GIMPLE_CALL
9525 && !gimple_call_internal_p (g))
9526 pmode = promote_function_mode (type, mode, &unsignedp,
9527 gimple_call_fntype (g),
9529 else
9530 pmode = promote_decl_mode (exp, &unsignedp);
9531 gcc_assert (GET_MODE (decl_rtl) == pmode);
9533 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9534 SUBREG_PROMOTED_VAR_P (temp) = 1;
9535 SUBREG_PROMOTED_SET (temp, unsignedp);
9536 return temp;
9539 return decl_rtl;
9541 case INTEGER_CST:
9542 /* Given that TYPE_PRECISION (type) is not always equal to
9543 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9544 the former to the latter according to the signedness of the
9545 type. */
9546 temp = immed_wide_int_const (wide_int::from
9547 (exp,
9548 GET_MODE_PRECISION (TYPE_MODE (type)),
9549 TYPE_SIGN (type)),
9550 TYPE_MODE (type));
9551 return temp;
9553 case VECTOR_CST:
9555 tree tmp = NULL_TREE;
9556 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9557 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9558 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9559 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9560 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9561 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9562 return const_vector_from_tree (exp);
9563 if (GET_MODE_CLASS (mode) == MODE_INT)
9565 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9566 if (type_for_mode)
9567 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9569 if (!tmp)
9571 vec<constructor_elt, va_gc> *v;
9572 unsigned i;
9573 vec_alloc (v, VECTOR_CST_NELTS (exp));
9574 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9575 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9576 tmp = build_constructor (type, v);
9578 return expand_expr (tmp, ignore ? const0_rtx : target,
9579 tmode, modifier);
9582 case CONST_DECL:
9583 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9585 case REAL_CST:
9586 /* If optimized, generate immediate CONST_DOUBLE
9587 which will be turned into memory by reload if necessary.
9589 We used to force a register so that loop.c could see it. But
9590 this does not allow gen_* patterns to perform optimizations with
9591 the constants. It also produces two insns in cases like "x = 1.0;".
9592 On most machines, floating-point constants are not permitted in
9593 many insns, so we'd end up copying it to a register in any case.
9595 Now, we do the copying in expand_binop, if appropriate. */
9596 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9597 TYPE_MODE (TREE_TYPE (exp)));
9599 case FIXED_CST:
9600 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9601 TYPE_MODE (TREE_TYPE (exp)));
9603 case COMPLEX_CST:
9604 /* Handle evaluating a complex constant in a CONCAT target. */
9605 if (original_target && GET_CODE (original_target) == CONCAT)
9607 machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9608 rtx rtarg, itarg;
9610 rtarg = XEXP (original_target, 0);
9611 itarg = XEXP (original_target, 1);
9613 /* Move the real and imaginary parts separately. */
9614 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9615 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9617 if (op0 != rtarg)
9618 emit_move_insn (rtarg, op0);
9619 if (op1 != itarg)
9620 emit_move_insn (itarg, op1);
9622 return original_target;
9625 /* ... fall through ... */
9627 case STRING_CST:
9628 temp = expand_expr_constant (exp, 1, modifier);
9630 /* temp contains a constant address.
9631 On RISC machines where a constant address isn't valid,
9632 make some insns to get that address into a register. */
9633 if (modifier != EXPAND_CONST_ADDRESS
9634 && modifier != EXPAND_INITIALIZER
9635 && modifier != EXPAND_SUM
9636 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9637 MEM_ADDR_SPACE (temp)))
9638 return replace_equiv_address (temp,
9639 copy_rtx (XEXP (temp, 0)));
9640 return temp;
9642 case SAVE_EXPR:
9644 tree val = treeop0;
9645 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
9646 inner_reference_p);
9648 if (!SAVE_EXPR_RESOLVED_P (exp))
9650 /* We can indeed still hit this case, typically via builtin
9651 expanders calling save_expr immediately before expanding
9652 something. Assume this means that we only have to deal
9653 with non-BLKmode values. */
9654 gcc_assert (GET_MODE (ret) != BLKmode);
9656 val = build_decl (curr_insn_location (),
9657 VAR_DECL, NULL, TREE_TYPE (exp));
9658 DECL_ARTIFICIAL (val) = 1;
9659 DECL_IGNORED_P (val) = 1;
9660 treeop0 = val;
9661 TREE_OPERAND (exp, 0) = treeop0;
9662 SAVE_EXPR_RESOLVED_P (exp) = 1;
9664 if (!CONSTANT_P (ret))
9665 ret = copy_to_reg (ret);
9666 SET_DECL_RTL (val, ret);
9669 return ret;
9673 case CONSTRUCTOR:
9674 /* If we don't need the result, just ensure we evaluate any
9675 subexpressions. */
9676 if (ignore)
9678 unsigned HOST_WIDE_INT idx;
9679 tree value;
9681 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9682 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9684 return const0_rtx;
9687 return expand_constructor (exp, target, modifier, false);
9689 case TARGET_MEM_REF:
9691 addr_space_t as
9692 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9693 enum insn_code icode;
9694 unsigned int align;
9696 op0 = addr_for_mem_ref (exp, as, true);
9697 op0 = memory_address_addr_space (mode, op0, as);
9698 temp = gen_rtx_MEM (mode, op0);
9699 set_mem_attributes (temp, exp, 0);
9700 set_mem_addr_space (temp, as);
9701 align = get_object_alignment (exp);
9702 if (modifier != EXPAND_WRITE
9703 && modifier != EXPAND_MEMORY
9704 && mode != BLKmode
9705 && align < GET_MODE_ALIGNMENT (mode)
9706 /* If the target does not have special handling for unaligned
9707 loads of mode then it can use regular moves for them. */
9708 && ((icode = optab_handler (movmisalign_optab, mode))
9709 != CODE_FOR_nothing))
9711 struct expand_operand ops[2];
9713 /* We've already validated the memory, and we're creating a
9714 new pseudo destination. The predicates really can't fail,
9715 nor can the generator. */
9716 create_output_operand (&ops[0], NULL_RTX, mode);
9717 create_fixed_operand (&ops[1], temp);
9718 expand_insn (icode, 2, ops);
9719 temp = ops[0].value;
9721 return temp;
9724 case MEM_REF:
9726 addr_space_t as
9727 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9728 machine_mode address_mode;
9729 tree base = TREE_OPERAND (exp, 0);
9730 gimple def_stmt;
9731 enum insn_code icode;
9732 unsigned align;
9733 /* Handle expansion of non-aliased memory with non-BLKmode. That
9734 might end up in a register. */
9735 if (mem_ref_refers_to_non_mem_p (exp))
9737 HOST_WIDE_INT offset = mem_ref_offset (exp).to_short_addr ();
9738 base = TREE_OPERAND (base, 0);
9739 if (offset == 0
9740 && tree_fits_uhwi_p (TYPE_SIZE (type))
9741 && (GET_MODE_BITSIZE (DECL_MODE (base))
9742 == tree_to_uhwi (TYPE_SIZE (type))))
9743 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
9744 target, tmode, modifier);
9745 if (TYPE_MODE (type) == BLKmode)
9747 temp = assign_stack_temp (DECL_MODE (base),
9748 GET_MODE_SIZE (DECL_MODE (base)));
9749 store_expr (base, temp, 0, false);
9750 temp = adjust_address (temp, BLKmode, offset);
9751 set_mem_size (temp, int_size_in_bytes (type));
9752 return temp;
9754 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
9755 bitsize_int (offset * BITS_PER_UNIT));
9756 return expand_expr (exp, target, tmode, modifier);
9758 address_mode = targetm.addr_space.address_mode (as);
9759 base = TREE_OPERAND (exp, 0);
9760 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9762 tree mask = gimple_assign_rhs2 (def_stmt);
9763 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9764 gimple_assign_rhs1 (def_stmt), mask);
9765 TREE_OPERAND (exp, 0) = base;
9767 align = get_object_alignment (exp);
9768 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9769 op0 = memory_address_addr_space (mode, op0, as);
9770 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9772 rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
9773 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9774 op0 = memory_address_addr_space (mode, op0, as);
9776 temp = gen_rtx_MEM (mode, op0);
9777 set_mem_attributes (temp, exp, 0);
9778 set_mem_addr_space (temp, as);
9779 if (TREE_THIS_VOLATILE (exp))
9780 MEM_VOLATILE_P (temp) = 1;
9781 if (modifier != EXPAND_WRITE
9782 && modifier != EXPAND_MEMORY
9783 && !inner_reference_p
9784 && mode != BLKmode
9785 && align < GET_MODE_ALIGNMENT (mode))
9787 if ((icode = optab_handler (movmisalign_optab, mode))
9788 != CODE_FOR_nothing)
9790 struct expand_operand ops[2];
9792 /* We've already validated the memory, and we're creating a
9793 new pseudo destination. The predicates really can't fail,
9794 nor can the generator. */
9795 create_output_operand (&ops[0], NULL_RTX, mode);
9796 create_fixed_operand (&ops[1], temp);
9797 expand_insn (icode, 2, ops);
9798 temp = ops[0].value;
9800 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9801 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9802 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9803 (modifier == EXPAND_STACK_PARM
9804 ? NULL_RTX : target),
9805 mode, mode);
9807 return temp;
9810 case ARRAY_REF:
9813 tree array = treeop0;
9814 tree index = treeop1;
9815 tree init;
9817 /* Fold an expression like: "foo"[2].
9818 This is not done in fold so it won't happen inside &.
9819 Don't fold if this is for wide characters since it's too
9820 difficult to do correctly and this is a very rare case. */
9822 if (modifier != EXPAND_CONST_ADDRESS
9823 && modifier != EXPAND_INITIALIZER
9824 && modifier != EXPAND_MEMORY)
9826 tree t = fold_read_from_constant_string (exp);
9828 if (t)
9829 return expand_expr (t, target, tmode, modifier);
9832 /* If this is a constant index into a constant array,
9833 just get the value from the array. Handle both the cases when
9834 we have an explicit constructor and when our operand is a variable
9835 that was declared const. */
9837 if (modifier != EXPAND_CONST_ADDRESS
9838 && modifier != EXPAND_INITIALIZER
9839 && modifier != EXPAND_MEMORY
9840 && TREE_CODE (array) == CONSTRUCTOR
9841 && ! TREE_SIDE_EFFECTS (array)
9842 && TREE_CODE (index) == INTEGER_CST)
9844 unsigned HOST_WIDE_INT ix;
9845 tree field, value;
9847 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9848 field, value)
9849 if (tree_int_cst_equal (field, index))
9851 if (!TREE_SIDE_EFFECTS (value))
9852 return expand_expr (fold (value), target, tmode, modifier);
9853 break;
9857 else if (optimize >= 1
9858 && modifier != EXPAND_CONST_ADDRESS
9859 && modifier != EXPAND_INITIALIZER
9860 && modifier != EXPAND_MEMORY
9861 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9862 && TREE_CODE (index) == INTEGER_CST
9863 && (TREE_CODE (array) == VAR_DECL
9864 || TREE_CODE (array) == CONST_DECL)
9865 && (init = ctor_for_folding (array)) != error_mark_node)
9867 if (init == NULL_TREE)
9869 tree value = build_zero_cst (type);
9870 if (TREE_CODE (value) == CONSTRUCTOR)
9872 /* If VALUE is a CONSTRUCTOR, this optimization is only
9873 useful if this doesn't store the CONSTRUCTOR into
9874 memory. If it does, it is more efficient to just
9875 load the data from the array directly. */
9876 rtx ret = expand_constructor (value, target,
9877 modifier, true);
9878 if (ret == NULL_RTX)
9879 value = NULL_TREE;
9882 if (value)
9883 return expand_expr (value, target, tmode, modifier);
9885 else if (TREE_CODE (init) == CONSTRUCTOR)
9887 unsigned HOST_WIDE_INT ix;
9888 tree field, value;
9890 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
9891 field, value)
9892 if (tree_int_cst_equal (field, index))
9894 if (TREE_SIDE_EFFECTS (value))
9895 break;
9897 if (TREE_CODE (value) == CONSTRUCTOR)
9899 /* If VALUE is a CONSTRUCTOR, this
9900 optimization is only useful if
9901 this doesn't store the CONSTRUCTOR
9902 into memory. If it does, it is more
9903 efficient to just load the data from
9904 the array directly. */
9905 rtx ret = expand_constructor (value, target,
9906 modifier, true);
9907 if (ret == NULL_RTX)
9908 break;
9911 return
9912 expand_expr (fold (value), target, tmode, modifier);
9915 else if (TREE_CODE (init) == STRING_CST)
9917 tree low_bound = array_ref_low_bound (exp);
9918 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
9920 /* Optimize the special case of a zero lower bound.
9922 We convert the lower bound to sizetype to avoid problems
9923 with constant folding. E.g. suppose the lower bound is
9924 1 and its mode is QI. Without the conversion
9925 (ARRAY + (INDEX - (unsigned char)1))
9926 becomes
9927 (ARRAY + (-(unsigned char)1) + INDEX)
9928 which becomes
9929 (ARRAY + 255 + INDEX). Oops! */
9930 if (!integer_zerop (low_bound))
9931 index1 = size_diffop_loc (loc, index1,
9932 fold_convert_loc (loc, sizetype,
9933 low_bound));
9935 if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
9937 tree type = TREE_TYPE (TREE_TYPE (init));
9938 machine_mode mode = TYPE_MODE (type);
9940 if (GET_MODE_CLASS (mode) == MODE_INT
9941 && GET_MODE_SIZE (mode) == 1)
9942 return gen_int_mode (TREE_STRING_POINTER (init)
9943 [TREE_INT_CST_LOW (index1)],
9944 mode);
9949 goto normal_inner_ref;
9951 case COMPONENT_REF:
9952 /* If the operand is a CONSTRUCTOR, we can just extract the
9953 appropriate field if it is present. */
9954 if (TREE_CODE (treeop0) == CONSTRUCTOR)
9956 unsigned HOST_WIDE_INT idx;
9957 tree field, value;
9959 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
9960 idx, field, value)
9961 if (field == treeop1
9962 /* We can normally use the value of the field in the
9963 CONSTRUCTOR. However, if this is a bitfield in
9964 an integral mode that we can fit in a HOST_WIDE_INT,
9965 we must mask only the number of bits in the bitfield,
9966 since this is done implicitly by the constructor. If
9967 the bitfield does not meet either of those conditions,
9968 we can't do this optimization. */
9969 && (! DECL_BIT_FIELD (field)
9970 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
9971 && (GET_MODE_PRECISION (DECL_MODE (field))
9972 <= HOST_BITS_PER_WIDE_INT))))
9974 if (DECL_BIT_FIELD (field)
9975 && modifier == EXPAND_STACK_PARM)
9976 target = 0;
9977 op0 = expand_expr (value, target, tmode, modifier);
9978 if (DECL_BIT_FIELD (field))
9980 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
9981 machine_mode imode = TYPE_MODE (TREE_TYPE (field));
9983 if (TYPE_UNSIGNED (TREE_TYPE (field)))
9985 op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
9986 imode);
9987 op0 = expand_and (imode, op0, op1, target);
9989 else
9991 int count = GET_MODE_PRECISION (imode) - bitsize;
9993 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
9994 target, 0);
9995 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
9996 target, 0);
10000 return op0;
10003 goto normal_inner_ref;
10005 case BIT_FIELD_REF:
10006 case ARRAY_RANGE_REF:
10007 normal_inner_ref:
10009 machine_mode mode1, mode2;
10010 HOST_WIDE_INT bitsize, bitpos;
10011 tree offset;
10012 int volatilep = 0, must_force_mem;
10013 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
10014 &mode1, &unsignedp, &volatilep, true);
10015 rtx orig_op0, memloc;
10016 bool clear_mem_expr = false;
10018 /* If we got back the original object, something is wrong. Perhaps
10019 we are evaluating an expression too early. In any event, don't
10020 infinitely recurse. */
10021 gcc_assert (tem != exp);
10023 /* If TEM's type is a union of variable size, pass TARGET to the inner
10024 computation, since it will need a temporary and TARGET is known
10025 to have to do. This occurs in unchecked conversion in Ada. */
10026 orig_op0 = op0
10027 = expand_expr_real (tem,
10028 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10029 && COMPLETE_TYPE_P (TREE_TYPE (tem))
10030 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10031 != INTEGER_CST)
10032 && modifier != EXPAND_STACK_PARM
10033 ? target : NULL_RTX),
10034 VOIDmode,
10035 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10036 NULL, true);
10038 /* If the field has a mode, we want to access it in the
10039 field's mode, not the computed mode.
10040 If a MEM has VOIDmode (external with incomplete type),
10041 use BLKmode for it instead. */
10042 if (MEM_P (op0))
10044 if (mode1 != VOIDmode)
10045 op0 = adjust_address (op0, mode1, 0);
10046 else if (GET_MODE (op0) == VOIDmode)
10047 op0 = adjust_address (op0, BLKmode, 0);
10050 mode2
10051 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10053 /* If we have either an offset, a BLKmode result, or a reference
10054 outside the underlying object, we must force it to memory.
10055 Such a case can occur in Ada if we have unchecked conversion
10056 of an expression from a scalar type to an aggregate type or
10057 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10058 passed a partially uninitialized object or a view-conversion
10059 to a larger size. */
10060 must_force_mem = (offset
10061 || mode1 == BLKmode
10062 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
10064 /* Handle CONCAT first. */
10065 if (GET_CODE (op0) == CONCAT && !must_force_mem)
10067 if (bitpos == 0
10068 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
10069 return op0;
10070 if (bitpos == 0
10071 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10072 && bitsize)
10074 op0 = XEXP (op0, 0);
10075 mode2 = GET_MODE (op0);
10077 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10078 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
10079 && bitpos
10080 && bitsize)
10082 op0 = XEXP (op0, 1);
10083 bitpos = 0;
10084 mode2 = GET_MODE (op0);
10086 else
10087 /* Otherwise force into memory. */
10088 must_force_mem = 1;
10091 /* If this is a constant, put it in a register if it is a legitimate
10092 constant and we don't need a memory reference. */
10093 if (CONSTANT_P (op0)
10094 && mode2 != BLKmode
10095 && targetm.legitimate_constant_p (mode2, op0)
10096 && !must_force_mem)
10097 op0 = force_reg (mode2, op0);
10099 /* Otherwise, if this is a constant, try to force it to the constant
10100 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10101 is a legitimate constant. */
10102 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10103 op0 = validize_mem (memloc);
10105 /* Otherwise, if this is a constant or the object is not in memory
10106 and need be, put it there. */
10107 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10109 memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10110 emit_move_insn (memloc, op0);
10111 op0 = memloc;
10112 clear_mem_expr = true;
10115 if (offset)
10117 machine_mode address_mode;
10118 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10119 EXPAND_SUM);
10121 gcc_assert (MEM_P (op0));
10123 address_mode = get_address_mode (op0);
10124 if (GET_MODE (offset_rtx) != address_mode)
10126 /* We cannot be sure that the RTL in offset_rtx is valid outside
10127 of a memory address context, so force it into a register
10128 before attempting to convert it to the desired mode. */
10129 offset_rtx = force_operand (offset_rtx, NULL_RTX);
10130 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10133 /* See the comment in expand_assignment for the rationale. */
10134 if (mode1 != VOIDmode
10135 && bitpos != 0
10136 && bitsize > 0
10137 && (bitpos % bitsize) == 0
10138 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
10139 && MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
10141 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10142 bitpos = 0;
10145 op0 = offset_address (op0, offset_rtx,
10146 highest_pow2_factor (offset));
10149 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10150 record its alignment as BIGGEST_ALIGNMENT. */
10151 if (MEM_P (op0) && bitpos == 0 && offset != 0
10152 && is_aligning_offset (offset, tem))
10153 set_mem_align (op0, BIGGEST_ALIGNMENT);
10155 /* Don't forget about volatility even if this is a bitfield. */
10156 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10158 if (op0 == orig_op0)
10159 op0 = copy_rtx (op0);
10161 MEM_VOLATILE_P (op0) = 1;
10164 /* In cases where an aligned union has an unaligned object
10165 as a field, we might be extracting a BLKmode value from
10166 an integer-mode (e.g., SImode) object. Handle this case
10167 by doing the extract into an object as wide as the field
10168 (which we know to be the width of a basic mode), then
10169 storing into memory, and changing the mode to BLKmode. */
10170 if (mode1 == VOIDmode
10171 || REG_P (op0) || GET_CODE (op0) == SUBREG
10172 || (mode1 != BLKmode && ! direct_load[(int) mode1]
10173 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10174 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10175 && modifier != EXPAND_CONST_ADDRESS
10176 && modifier != EXPAND_INITIALIZER
10177 && modifier != EXPAND_MEMORY)
10178 /* If the bitfield is volatile and the bitsize
10179 is narrower than the access size of the bitfield,
10180 we need to extract bitfields from the access. */
10181 || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10182 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10183 && mode1 != BLKmode
10184 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)
10185 /* If the field isn't aligned enough to fetch as a memref,
10186 fetch it as a bit field. */
10187 || (mode1 != BLKmode
10188 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10189 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10190 || (MEM_P (op0)
10191 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10192 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10193 && modifier != EXPAND_MEMORY
10194 && ((modifier == EXPAND_CONST_ADDRESS
10195 || modifier == EXPAND_INITIALIZER)
10196 ? STRICT_ALIGNMENT
10197 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10198 || (bitpos % BITS_PER_UNIT != 0)))
10199 /* If the type and the field are a constant size and the
10200 size of the type isn't the same size as the bitfield,
10201 we must use bitfield operations. */
10202 || (bitsize >= 0
10203 && TYPE_SIZE (TREE_TYPE (exp))
10204 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10205 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10206 bitsize)))
10208 machine_mode ext_mode = mode;
10210 if (ext_mode == BLKmode
10211 && ! (target != 0 && MEM_P (op0)
10212 && MEM_P (target)
10213 && bitpos % BITS_PER_UNIT == 0))
10214 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10216 if (ext_mode == BLKmode)
10218 if (target == 0)
10219 target = assign_temp (type, 1, 1);
10221 /* ??? Unlike the similar test a few lines below, this one is
10222 very likely obsolete. */
10223 if (bitsize == 0)
10224 return target;
10226 /* In this case, BITPOS must start at a byte boundary and
10227 TARGET, if specified, must be a MEM. */
10228 gcc_assert (MEM_P (op0)
10229 && (!target || MEM_P (target))
10230 && !(bitpos % BITS_PER_UNIT));
10232 emit_block_move (target,
10233 adjust_address (op0, VOIDmode,
10234 bitpos / BITS_PER_UNIT),
10235 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10236 / BITS_PER_UNIT),
10237 (modifier == EXPAND_STACK_PARM
10238 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10240 return target;
10243 /* If we have nothing to extract, the result will be 0 for targets
10244 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10245 return 0 for the sake of consistency, as reading a zero-sized
10246 bitfield is valid in Ada and the value is fully specified. */
10247 if (bitsize == 0)
10248 return const0_rtx;
10250 op0 = validize_mem (op0);
10252 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10253 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10255 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10256 (modifier == EXPAND_STACK_PARM
10257 ? NULL_RTX : target),
10258 ext_mode, ext_mode);
10260 /* If the result is a record type and BITSIZE is narrower than
10261 the mode of OP0, an integral mode, and this is a big endian
10262 machine, we must put the field into the high-order bits. */
10263 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10264 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10265 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10266 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10267 GET_MODE_BITSIZE (GET_MODE (op0))
10268 - bitsize, op0, 1);
10270 /* If the result type is BLKmode, store the data into a temporary
10271 of the appropriate type, but with the mode corresponding to the
10272 mode for the data we have (op0's mode). */
10273 if (mode == BLKmode)
10275 rtx new_rtx
10276 = assign_stack_temp_for_type (ext_mode,
10277 GET_MODE_BITSIZE (ext_mode),
10278 type);
10279 emit_move_insn (new_rtx, op0);
10280 op0 = copy_rtx (new_rtx);
10281 PUT_MODE (op0, BLKmode);
10284 return op0;
10287 /* If the result is BLKmode, use that to access the object
10288 now as well. */
10289 if (mode == BLKmode)
10290 mode1 = BLKmode;
10292 /* Get a reference to just this component. */
10293 if (modifier == EXPAND_CONST_ADDRESS
10294 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10295 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10296 else
10297 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10299 if (op0 == orig_op0)
10300 op0 = copy_rtx (op0);
10302 set_mem_attributes (op0, exp, 0);
10304 if (REG_P (XEXP (op0, 0)))
10305 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10307 /* If op0 is a temporary because the original expressions was forced
10308 to memory, clear MEM_EXPR so that the original expression cannot
10309 be marked as addressable through MEM_EXPR of the temporary. */
10310 if (clear_mem_expr)
10311 set_mem_expr (op0, NULL_TREE);
10313 MEM_VOLATILE_P (op0) |= volatilep;
10314 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10315 || modifier == EXPAND_CONST_ADDRESS
10316 || modifier == EXPAND_INITIALIZER)
10317 return op0;
10319 if (target == 0)
10320 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10322 convert_move (target, op0, unsignedp);
10323 return target;
10326 case OBJ_TYPE_REF:
10327 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10329 case CALL_EXPR:
10330 /* All valid uses of __builtin_va_arg_pack () are removed during
10331 inlining. */
10332 if (CALL_EXPR_VA_ARG_PACK (exp))
10333 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10335 tree fndecl = get_callee_fndecl (exp), attr;
10337 if (fndecl
10338 && (attr = lookup_attribute ("error",
10339 DECL_ATTRIBUTES (fndecl))) != NULL)
10340 error ("%Kcall to %qs declared with attribute error: %s",
10341 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10342 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10343 if (fndecl
10344 && (attr = lookup_attribute ("warning",
10345 DECL_ATTRIBUTES (fndecl))) != NULL)
10346 warning_at (tree_nonartificial_location (exp),
10347 0, "%Kcall to %qs declared with attribute warning: %s",
10348 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10349 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10351 /* Check for a built-in function. */
10352 if (fndecl && DECL_BUILT_IN (fndecl))
10354 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10355 if (CALL_WITH_BOUNDS_P (exp))
10356 return expand_builtin_with_bounds (exp, target, subtarget,
10357 tmode, ignore);
10358 else
10359 return expand_builtin (exp, target, subtarget, tmode, ignore);
10362 return expand_call (exp, target, ignore);
10364 case VIEW_CONVERT_EXPR:
10365 op0 = NULL_RTX;
10367 /* If we are converting to BLKmode, try to avoid an intermediate
10368 temporary by fetching an inner memory reference. */
10369 if (mode == BLKmode
10370 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10371 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10372 && handled_component_p (treeop0))
10374 machine_mode mode1;
10375 HOST_WIDE_INT bitsize, bitpos;
10376 tree offset;
10377 int unsignedp;
10378 int volatilep = 0;
10379 tree tem
10380 = get_inner_reference (treeop0, &bitsize, &bitpos,
10381 &offset, &mode1, &unsignedp, &volatilep,
10382 true);
10383 rtx orig_op0;
10385 /* ??? We should work harder and deal with non-zero offsets. */
10386 if (!offset
10387 && (bitpos % BITS_PER_UNIT) == 0
10388 && bitsize >= 0
10389 && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10391 /* See the normal_inner_ref case for the rationale. */
10392 orig_op0
10393 = expand_expr_real (tem,
10394 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10395 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10396 != INTEGER_CST)
10397 && modifier != EXPAND_STACK_PARM
10398 ? target : NULL_RTX),
10399 VOIDmode,
10400 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10401 NULL, true);
10403 if (MEM_P (orig_op0))
10405 op0 = orig_op0;
10407 /* Get a reference to just this component. */
10408 if (modifier == EXPAND_CONST_ADDRESS
10409 || modifier == EXPAND_SUM
10410 || modifier == EXPAND_INITIALIZER)
10411 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10412 else
10413 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10415 if (op0 == orig_op0)
10416 op0 = copy_rtx (op0);
10418 set_mem_attributes (op0, treeop0, 0);
10419 if (REG_P (XEXP (op0, 0)))
10420 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10422 MEM_VOLATILE_P (op0) |= volatilep;
10427 if (!op0)
10428 op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
10429 NULL, inner_reference_p);
10431 /* If the input and output modes are both the same, we are done. */
10432 if (mode == GET_MODE (op0))
10434 /* If neither mode is BLKmode, and both modes are the same size
10435 then we can use gen_lowpart. */
10436 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10437 && (GET_MODE_PRECISION (mode)
10438 == GET_MODE_PRECISION (GET_MODE (op0)))
10439 && !COMPLEX_MODE_P (GET_MODE (op0)))
10441 if (GET_CODE (op0) == SUBREG)
10442 op0 = force_reg (GET_MODE (op0), op0);
10443 temp = gen_lowpart_common (mode, op0);
10444 if (temp)
10445 op0 = temp;
10446 else
10448 if (!REG_P (op0) && !MEM_P (op0))
10449 op0 = force_reg (GET_MODE (op0), op0);
10450 op0 = gen_lowpart (mode, op0);
10453 /* If both types are integral, convert from one mode to the other. */
10454 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10455 op0 = convert_modes (mode, GET_MODE (op0), op0,
10456 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10457 /* If the output type is a bit-field type, do an extraction. */
10458 else if (reduce_bit_field)
10459 return extract_bit_field (op0, TYPE_PRECISION (type), 0,
10460 TYPE_UNSIGNED (type), NULL_RTX,
10461 mode, mode);
10462 /* As a last resort, spill op0 to memory, and reload it in a
10463 different mode. */
10464 else if (!MEM_P (op0))
10466 /* If the operand is not a MEM, force it into memory. Since we
10467 are going to be changing the mode of the MEM, don't call
10468 force_const_mem for constants because we don't allow pool
10469 constants to change mode. */
10470 tree inner_type = TREE_TYPE (treeop0);
10472 gcc_assert (!TREE_ADDRESSABLE (exp));
10474 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10475 target
10476 = assign_stack_temp_for_type
10477 (TYPE_MODE (inner_type),
10478 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10480 emit_move_insn (target, op0);
10481 op0 = target;
10484 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10485 output type is such that the operand is known to be aligned, indicate
10486 that it is. Otherwise, we need only be concerned about alignment for
10487 non-BLKmode results. */
10488 if (MEM_P (op0))
10490 enum insn_code icode;
10492 if (TYPE_ALIGN_OK (type))
10494 /* ??? Copying the MEM without substantially changing it might
10495 run afoul of the code handling volatile memory references in
10496 store_expr, which assumes that TARGET is returned unmodified
10497 if it has been used. */
10498 op0 = copy_rtx (op0);
10499 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10501 else if (modifier != EXPAND_WRITE
10502 && modifier != EXPAND_MEMORY
10503 && !inner_reference_p
10504 && mode != BLKmode
10505 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10507 /* If the target does have special handling for unaligned
10508 loads of mode then use them. */
10509 if ((icode = optab_handler (movmisalign_optab, mode))
10510 != CODE_FOR_nothing)
10512 rtx reg;
10514 op0 = adjust_address (op0, mode, 0);
10515 /* We've already validated the memory, and we're creating a
10516 new pseudo destination. The predicates really can't
10517 fail. */
10518 reg = gen_reg_rtx (mode);
10520 /* Nor can the insn generator. */
10521 rtx_insn *insn = GEN_FCN (icode) (reg, op0);
10522 emit_insn (insn);
10523 return reg;
10525 else if (STRICT_ALIGNMENT)
10527 tree inner_type = TREE_TYPE (treeop0);
10528 HOST_WIDE_INT temp_size
10529 = MAX (int_size_in_bytes (inner_type),
10530 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10531 rtx new_rtx
10532 = assign_stack_temp_for_type (mode, temp_size, type);
10533 rtx new_with_op0_mode
10534 = adjust_address (new_rtx, GET_MODE (op0), 0);
10536 gcc_assert (!TREE_ADDRESSABLE (exp));
10538 if (GET_MODE (op0) == BLKmode)
10539 emit_block_move (new_with_op0_mode, op0,
10540 GEN_INT (GET_MODE_SIZE (mode)),
10541 (modifier == EXPAND_STACK_PARM
10542 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10543 else
10544 emit_move_insn (new_with_op0_mode, op0);
10546 op0 = new_rtx;
10550 op0 = adjust_address (op0, mode, 0);
10553 return op0;
10555 case MODIFY_EXPR:
10557 tree lhs = treeop0;
10558 tree rhs = treeop1;
10559 gcc_assert (ignore);
10561 /* Check for |= or &= of a bitfield of size one into another bitfield
10562 of size 1. In this case, (unless we need the result of the
10563 assignment) we can do this more efficiently with a
10564 test followed by an assignment, if necessary.
10566 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10567 things change so we do, this code should be enhanced to
10568 support it. */
10569 if (TREE_CODE (lhs) == COMPONENT_REF
10570 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10571 || TREE_CODE (rhs) == BIT_AND_EXPR)
10572 && TREE_OPERAND (rhs, 0) == lhs
10573 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10574 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10575 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10577 rtx_code_label *label = gen_label_rtx ();
10578 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10579 do_jump (TREE_OPERAND (rhs, 1),
10580 value ? label : 0,
10581 value ? 0 : label, -1);
10582 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10583 false);
10584 do_pending_stack_adjust ();
10585 emit_label (label);
10586 return const0_rtx;
10589 expand_assignment (lhs, rhs, false);
10590 return const0_rtx;
10593 case ADDR_EXPR:
10594 return expand_expr_addr_expr (exp, target, tmode, modifier);
10596 case REALPART_EXPR:
10597 op0 = expand_normal (treeop0);
10598 return read_complex_part (op0, false);
10600 case IMAGPART_EXPR:
10601 op0 = expand_normal (treeop0);
10602 return read_complex_part (op0, true);
10604 case RETURN_EXPR:
10605 case LABEL_EXPR:
10606 case GOTO_EXPR:
10607 case SWITCH_EXPR:
10608 case ASM_EXPR:
10609 /* Expanded in cfgexpand.c. */
10610 gcc_unreachable ();
10612 case TRY_CATCH_EXPR:
10613 case CATCH_EXPR:
10614 case EH_FILTER_EXPR:
10615 case TRY_FINALLY_EXPR:
10616 /* Lowered by tree-eh.c. */
10617 gcc_unreachable ();
10619 case WITH_CLEANUP_EXPR:
10620 case CLEANUP_POINT_EXPR:
10621 case TARGET_EXPR:
10622 case CASE_LABEL_EXPR:
10623 case VA_ARG_EXPR:
10624 case BIND_EXPR:
10625 case INIT_EXPR:
10626 case CONJ_EXPR:
10627 case COMPOUND_EXPR:
10628 case PREINCREMENT_EXPR:
10629 case PREDECREMENT_EXPR:
10630 case POSTINCREMENT_EXPR:
10631 case POSTDECREMENT_EXPR:
10632 case LOOP_EXPR:
10633 case EXIT_EXPR:
10634 case COMPOUND_LITERAL_EXPR:
10635 /* Lowered by gimplify.c. */
10636 gcc_unreachable ();
10638 case FDESC_EXPR:
10639 /* Function descriptors are not valid except for as
10640 initialization constants, and should not be expanded. */
10641 gcc_unreachable ();
10643 case WITH_SIZE_EXPR:
10644 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10645 have pulled out the size to use in whatever context it needed. */
10646 return expand_expr_real (treeop0, original_target, tmode,
10647 modifier, alt_rtl, inner_reference_p);
10649 default:
10650 return expand_expr_real_2 (&ops, target, tmode, modifier);
10654 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10655 signedness of TYPE), possibly returning the result in TARGET. */
10656 static rtx
10657 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10659 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10660 if (target && GET_MODE (target) != GET_MODE (exp))
10661 target = 0;
10662 /* For constant values, reduce using build_int_cst_type. */
10663 if (CONST_INT_P (exp))
10665 HOST_WIDE_INT value = INTVAL (exp);
10666 tree t = build_int_cst_type (type, value);
10667 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10669 else if (TYPE_UNSIGNED (type))
10671 machine_mode mode = GET_MODE (exp);
10672 rtx mask = immed_wide_int_const
10673 (wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
10674 return expand_and (mode, exp, mask, target);
10676 else
10678 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10679 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10680 exp, count, target, 0);
10681 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10682 exp, count, target, 0);
10686 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10687 when applied to the address of EXP produces an address known to be
10688 aligned more than BIGGEST_ALIGNMENT. */
10690 static int
10691 is_aligning_offset (const_tree offset, const_tree exp)
10693 /* Strip off any conversions. */
10694 while (CONVERT_EXPR_P (offset))
10695 offset = TREE_OPERAND (offset, 0);
10697 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10698 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10699 if (TREE_CODE (offset) != BIT_AND_EXPR
10700 || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
10701 || compare_tree_int (TREE_OPERAND (offset, 1),
10702 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10703 || exact_log2 (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1) < 0)
10704 return 0;
10706 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10707 It must be NEGATE_EXPR. Then strip any more conversions. */
10708 offset = TREE_OPERAND (offset, 0);
10709 while (CONVERT_EXPR_P (offset))
10710 offset = TREE_OPERAND (offset, 0);
10712 if (TREE_CODE (offset) != NEGATE_EXPR)
10713 return 0;
10715 offset = TREE_OPERAND (offset, 0);
10716 while (CONVERT_EXPR_P (offset))
10717 offset = TREE_OPERAND (offset, 0);
10719 /* This must now be the address of EXP. */
10720 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10723 /* Return the tree node if an ARG corresponds to a string constant or zero
10724 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10725 in bytes within the string that ARG is accessing. The type of the
10726 offset will be `sizetype'. */
10728 tree
10729 string_constant (tree arg, tree *ptr_offset)
10731 tree array, offset, lower_bound;
10732 STRIP_NOPS (arg);
10734 if (TREE_CODE (arg) == ADDR_EXPR)
10736 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10738 *ptr_offset = size_zero_node;
10739 return TREE_OPERAND (arg, 0);
10741 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10743 array = TREE_OPERAND (arg, 0);
10744 offset = size_zero_node;
10746 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10748 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10749 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10750 if (TREE_CODE (array) != STRING_CST
10751 && TREE_CODE (array) != VAR_DECL)
10752 return 0;
10754 /* Check if the array has a nonzero lower bound. */
10755 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10756 if (!integer_zerop (lower_bound))
10758 /* If the offset and base aren't both constants, return 0. */
10759 if (TREE_CODE (lower_bound) != INTEGER_CST)
10760 return 0;
10761 if (TREE_CODE (offset) != INTEGER_CST)
10762 return 0;
10763 /* Adjust offset by the lower bound. */
10764 offset = size_diffop (fold_convert (sizetype, offset),
10765 fold_convert (sizetype, lower_bound));
10768 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10770 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10771 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10772 if (TREE_CODE (array) != ADDR_EXPR)
10773 return 0;
10774 array = TREE_OPERAND (array, 0);
10775 if (TREE_CODE (array) != STRING_CST
10776 && TREE_CODE (array) != VAR_DECL)
10777 return 0;
10779 else
10780 return 0;
10782 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10784 tree arg0 = TREE_OPERAND (arg, 0);
10785 tree arg1 = TREE_OPERAND (arg, 1);
10787 STRIP_NOPS (arg0);
10788 STRIP_NOPS (arg1);
10790 if (TREE_CODE (arg0) == ADDR_EXPR
10791 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10792 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10794 array = TREE_OPERAND (arg0, 0);
10795 offset = arg1;
10797 else if (TREE_CODE (arg1) == ADDR_EXPR
10798 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10799 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10801 array = TREE_OPERAND (arg1, 0);
10802 offset = arg0;
10804 else
10805 return 0;
10807 else
10808 return 0;
10810 if (TREE_CODE (array) == STRING_CST)
10812 *ptr_offset = fold_convert (sizetype, offset);
10813 return array;
10815 else if (TREE_CODE (array) == VAR_DECL
10816 || TREE_CODE (array) == CONST_DECL)
10818 int length;
10819 tree init = ctor_for_folding (array);
10821 /* Variables initialized to string literals can be handled too. */
10822 if (init == error_mark_node
10823 || !init
10824 || TREE_CODE (init) != STRING_CST)
10825 return 0;
10827 /* Avoid const char foo[4] = "abcde"; */
10828 if (DECL_SIZE_UNIT (array) == NULL_TREE
10829 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10830 || (length = TREE_STRING_LENGTH (init)) <= 0
10831 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10832 return 0;
10834 /* If variable is bigger than the string literal, OFFSET must be constant
10835 and inside of the bounds of the string literal. */
10836 offset = fold_convert (sizetype, offset);
10837 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10838 && (! tree_fits_uhwi_p (offset)
10839 || compare_tree_int (offset, length) >= 0))
10840 return 0;
10842 *ptr_offset = offset;
10843 return init;
10846 return 0;
10849 /* Generate code to calculate OPS, and exploded expression
10850 using a store-flag instruction and return an rtx for the result.
10851 OPS reflects a comparison.
10853 If TARGET is nonzero, store the result there if convenient.
10855 Return zero if there is no suitable set-flag instruction
10856 available on this machine.
10858 Once expand_expr has been called on the arguments of the comparison,
10859 we are committed to doing the store flag, since it is not safe to
10860 re-evaluate the expression. We emit the store-flag insn by calling
10861 emit_store_flag, but only expand the arguments if we have a reason
10862 to believe that emit_store_flag will be successful. If we think that
10863 it will, but it isn't, we have to simulate the store-flag with a
10864 set/jump/set sequence. */
10866 static rtx
10867 do_store_flag (sepops ops, rtx target, machine_mode mode)
10869 enum rtx_code code;
10870 tree arg0, arg1, type;
10871 machine_mode operand_mode;
10872 int unsignedp;
10873 rtx op0, op1;
10874 rtx subtarget = target;
10875 location_t loc = ops->location;
10877 arg0 = ops->op0;
10878 arg1 = ops->op1;
10880 /* Don't crash if the comparison was erroneous. */
10881 if (arg0 == error_mark_node || arg1 == error_mark_node)
10882 return const0_rtx;
10884 type = TREE_TYPE (arg0);
10885 operand_mode = TYPE_MODE (type);
10886 unsignedp = TYPE_UNSIGNED (type);
10888 /* We won't bother with BLKmode store-flag operations because it would mean
10889 passing a lot of information to emit_store_flag. */
10890 if (operand_mode == BLKmode)
10891 return 0;
10893 /* We won't bother with store-flag operations involving function pointers
10894 when function pointers must be canonicalized before comparisons. */
10895 if (targetm.have_canonicalize_funcptr_for_compare ()
10896 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
10897 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
10898 == FUNCTION_TYPE))
10899 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
10900 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
10901 == FUNCTION_TYPE))))
10902 return 0;
10904 STRIP_NOPS (arg0);
10905 STRIP_NOPS (arg1);
10907 /* For vector typed comparisons emit code to generate the desired
10908 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10909 expander for this. */
10910 if (TREE_CODE (ops->type) == VECTOR_TYPE)
10912 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10913 tree if_true = constant_boolean_node (true, ops->type);
10914 tree if_false = constant_boolean_node (false, ops->type);
10915 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10918 /* Get the rtx comparison code to use. We know that EXP is a comparison
10919 operation of some type. Some comparisons against 1 and -1 can be
10920 converted to comparisons with zero. Do so here so that the tests
10921 below will be aware that we have a comparison with zero. These
10922 tests will not catch constants in the first operand, but constants
10923 are rarely passed as the first operand. */
10925 switch (ops->code)
10927 case EQ_EXPR:
10928 code = EQ;
10929 break;
10930 case NE_EXPR:
10931 code = NE;
10932 break;
10933 case LT_EXPR:
10934 if (integer_onep (arg1))
10935 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10936 else
10937 code = unsignedp ? LTU : LT;
10938 break;
10939 case LE_EXPR:
10940 if (! unsignedp && integer_all_onesp (arg1))
10941 arg1 = integer_zero_node, code = LT;
10942 else
10943 code = unsignedp ? LEU : LE;
10944 break;
10945 case GT_EXPR:
10946 if (! unsignedp && integer_all_onesp (arg1))
10947 arg1 = integer_zero_node, code = GE;
10948 else
10949 code = unsignedp ? GTU : GT;
10950 break;
10951 case GE_EXPR:
10952 if (integer_onep (arg1))
10953 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10954 else
10955 code = unsignedp ? GEU : GE;
10956 break;
10958 case UNORDERED_EXPR:
10959 code = UNORDERED;
10960 break;
10961 case ORDERED_EXPR:
10962 code = ORDERED;
10963 break;
10964 case UNLT_EXPR:
10965 code = UNLT;
10966 break;
10967 case UNLE_EXPR:
10968 code = UNLE;
10969 break;
10970 case UNGT_EXPR:
10971 code = UNGT;
10972 break;
10973 case UNGE_EXPR:
10974 code = UNGE;
10975 break;
10976 case UNEQ_EXPR:
10977 code = UNEQ;
10978 break;
10979 case LTGT_EXPR:
10980 code = LTGT;
10981 break;
10983 default:
10984 gcc_unreachable ();
10987 /* Put a constant second. */
10988 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
10989 || TREE_CODE (arg0) == FIXED_CST)
10991 std::swap (arg0, arg1);
10992 code = swap_condition (code);
10995 /* If this is an equality or inequality test of a single bit, we can
10996 do this by shifting the bit being tested to the low-order bit and
10997 masking the result with the constant 1. If the condition was EQ,
10998 we xor it with 1. This does not require an scc insn and is faster
10999 than an scc insn even if we have it.
11001 The code to make this transformation was moved into fold_single_bit_test,
11002 so we just call into the folder and expand its result. */
11004 if ((code == NE || code == EQ)
11005 && integer_zerop (arg1)
11006 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
11008 gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
11009 if (srcstmt
11010 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
11012 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
11013 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
11014 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
11015 gimple_assign_rhs1 (srcstmt),
11016 gimple_assign_rhs2 (srcstmt));
11017 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
11018 if (temp)
11019 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
11023 if (! get_subtarget (target)
11024 || GET_MODE (subtarget) != operand_mode)
11025 subtarget = 0;
11027 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
11029 if (target == 0)
11030 target = gen_reg_rtx (mode);
11032 /* Try a cstore if possible. */
11033 return emit_store_flag_force (target, code, op0, op1,
11034 operand_mode, unsignedp,
11035 (TYPE_PRECISION (ops->type) == 1
11036 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
11039 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11040 0 otherwise (i.e. if there is no casesi instruction).
11042 DEFAULT_PROBABILITY is the probability of jumping to the default
11043 label. */
11045 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
11046 rtx table_label, rtx default_label, rtx fallback_label,
11047 int default_probability)
11049 struct expand_operand ops[5];
11050 machine_mode index_mode = SImode;
11051 rtx op1, op2, index;
11053 if (! targetm.have_casesi ())
11054 return 0;
11056 /* Convert the index to SImode. */
11057 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11059 machine_mode omode = TYPE_MODE (index_type);
11060 rtx rangertx = expand_normal (range);
11062 /* We must handle the endpoints in the original mode. */
11063 index_expr = build2 (MINUS_EXPR, index_type,
11064 index_expr, minval);
11065 minval = integer_zero_node;
11066 index = expand_normal (index_expr);
11067 if (default_label)
11068 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11069 omode, 1, default_label,
11070 default_probability);
11071 /* Now we can safely truncate. */
11072 index = convert_to_mode (index_mode, index, 0);
11074 else
11076 if (TYPE_MODE (index_type) != index_mode)
11078 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
11079 index_expr = fold_convert (index_type, index_expr);
11082 index = expand_normal (index_expr);
11085 do_pending_stack_adjust ();
11087 op1 = expand_normal (minval);
11088 op2 = expand_normal (range);
11090 create_input_operand (&ops[0], index, index_mode);
11091 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11092 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11093 create_fixed_operand (&ops[3], table_label);
11094 create_fixed_operand (&ops[4], (default_label
11095 ? default_label
11096 : fallback_label));
11097 expand_jump_insn (targetm.code_for_casesi, 5, ops);
11098 return 1;
11101 /* Attempt to generate a tablejump instruction; same concept. */
11102 /* Subroutine of the next function.
11104 INDEX is the value being switched on, with the lowest value
11105 in the table already subtracted.
11106 MODE is its expected mode (needed if INDEX is constant).
11107 RANGE is the length of the jump table.
11108 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11110 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11111 index value is out of range.
11112 DEFAULT_PROBABILITY is the probability of jumping to
11113 the default label. */
11115 static void
11116 do_tablejump (rtx index, machine_mode mode, rtx range, rtx table_label,
11117 rtx default_label, int default_probability)
11119 rtx temp, vector;
11121 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11122 cfun->cfg->max_jumptable_ents = INTVAL (range);
11124 /* Do an unsigned comparison (in the proper mode) between the index
11125 expression and the value which represents the length of the range.
11126 Since we just finished subtracting the lower bound of the range
11127 from the index expression, this comparison allows us to simultaneously
11128 check that the original index expression value is both greater than
11129 or equal to the minimum value of the range and less than or equal to
11130 the maximum value of the range. */
11132 if (default_label)
11133 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11134 default_label, default_probability);
11137 /* If index is in range, it must fit in Pmode.
11138 Convert to Pmode so we can index with it. */
11139 if (mode != Pmode)
11140 index = convert_to_mode (Pmode, index, 1);
11142 /* Don't let a MEM slip through, because then INDEX that comes
11143 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11144 and break_out_memory_refs will go to work on it and mess it up. */
11145 #ifdef PIC_CASE_VECTOR_ADDRESS
11146 if (flag_pic && !REG_P (index))
11147 index = copy_to_mode_reg (Pmode, index);
11148 #endif
11150 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11151 GET_MODE_SIZE, because this indicates how large insns are. The other
11152 uses should all be Pmode, because they are addresses. This code
11153 could fail if addresses and insns are not the same size. */
11154 index = simplify_gen_binary (MULT, Pmode, index,
11155 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
11156 Pmode));
11157 index = simplify_gen_binary (PLUS, Pmode, index,
11158 gen_rtx_LABEL_REF (Pmode, table_label));
11160 #ifdef PIC_CASE_VECTOR_ADDRESS
11161 if (flag_pic)
11162 index = PIC_CASE_VECTOR_ADDRESS (index);
11163 else
11164 #endif
11165 index = memory_address (CASE_VECTOR_MODE, index);
11166 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11167 vector = gen_const_mem (CASE_VECTOR_MODE, index);
11168 convert_move (temp, vector, 0);
11170 emit_jump_insn (targetm.gen_tablejump (temp, table_label));
11172 /* If we are generating PIC code or if the table is PC-relative, the
11173 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11174 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11175 emit_barrier ();
11179 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11180 rtx table_label, rtx default_label, int default_probability)
11182 rtx index;
11184 if (! targetm.have_tablejump ())
11185 return 0;
11187 index_expr = fold_build2 (MINUS_EXPR, index_type,
11188 fold_convert (index_type, index_expr),
11189 fold_convert (index_type, minval));
11190 index = expand_normal (index_expr);
11191 do_pending_stack_adjust ();
11193 do_tablejump (index, TYPE_MODE (index_type),
11194 convert_modes (TYPE_MODE (index_type),
11195 TYPE_MODE (TREE_TYPE (range)),
11196 expand_normal (range),
11197 TYPE_UNSIGNED (TREE_TYPE (range))),
11198 table_label, default_label, default_probability);
11199 return 1;
11202 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11203 static rtx
11204 const_vector_from_tree (tree exp)
11206 rtvec v;
11207 unsigned i;
11208 int units;
11209 tree elt;
11210 machine_mode inner, mode;
11212 mode = TYPE_MODE (TREE_TYPE (exp));
11214 if (initializer_zerop (exp))
11215 return CONST0_RTX (mode);
11217 units = GET_MODE_NUNITS (mode);
11218 inner = GET_MODE_INNER (mode);
11220 v = rtvec_alloc (units);
11222 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11224 elt = VECTOR_CST_ELT (exp, i);
11226 if (TREE_CODE (elt) == REAL_CST)
11227 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11228 inner);
11229 else if (TREE_CODE (elt) == FIXED_CST)
11230 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11231 inner);
11232 else
11233 RTVEC_ELT (v, i) = immed_wide_int_const (elt, inner);
11236 return gen_rtx_CONST_VECTOR (mode, v);
11239 /* Build a decl for a personality function given a language prefix. */
11241 tree
11242 build_personality_function (const char *lang)
11244 const char *unwind_and_version;
11245 tree decl, type;
11246 char *name;
11248 switch (targetm_common.except_unwind_info (&global_options))
11250 case UI_NONE:
11251 return NULL;
11252 case UI_SJLJ:
11253 unwind_and_version = "_sj0";
11254 break;
11255 case UI_DWARF2:
11256 case UI_TARGET:
11257 unwind_and_version = "_v0";
11258 break;
11259 case UI_SEH:
11260 unwind_and_version = "_seh0";
11261 break;
11262 default:
11263 gcc_unreachable ();
11266 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11268 type = build_function_type_list (integer_type_node, integer_type_node,
11269 long_long_unsigned_type_node,
11270 ptr_type_node, ptr_type_node, NULL_TREE);
11271 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11272 get_identifier (name), type);
11273 DECL_ARTIFICIAL (decl) = 1;
11274 DECL_EXTERNAL (decl) = 1;
11275 TREE_PUBLIC (decl) = 1;
11277 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11278 are the flags assigned by targetm.encode_section_info. */
11279 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11281 return decl;
11284 /* Extracts the personality function of DECL and returns the corresponding
11285 libfunc. */
11288 get_personality_function (tree decl)
11290 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11291 enum eh_personality_kind pk;
11293 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11294 if (pk == eh_personality_none)
11295 return NULL;
11297 if (!personality
11298 && pk == eh_personality_any)
11299 personality = lang_hooks.eh_personality ();
11301 if (pk == eh_personality_lang)
11302 gcc_assert (personality != NULL_TREE);
11304 return XEXP (DECL_RTL (personality), 0);
11307 /* Returns a tree for the size of EXP in bytes. */
11309 static tree
11310 tree_expr_size (const_tree exp)
11312 if (DECL_P (exp)
11313 && DECL_SIZE_UNIT (exp) != 0)
11314 return DECL_SIZE_UNIT (exp);
11315 else
11316 return size_in_bytes (TREE_TYPE (exp));
11319 /* Return an rtx for the size in bytes of the value of EXP. */
11322 expr_size (tree exp)
11324 tree size;
11326 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11327 size = TREE_OPERAND (exp, 1);
11328 else
11330 size = tree_expr_size (exp);
11331 gcc_assert (size);
11332 gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
11335 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
11338 /* Return a wide integer for the size in bytes of the value of EXP, or -1
11339 if the size can vary or is larger than an integer. */
11341 static HOST_WIDE_INT
11342 int_expr_size (tree exp)
11344 tree size;
11346 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11347 size = TREE_OPERAND (exp, 1);
11348 else
11350 size = tree_expr_size (exp);
11351 gcc_assert (size);
11354 if (size == 0 || !tree_fits_shwi_p (size))
11355 return -1;
11357 return tree_to_shwi (size);
11360 #include "gt-expr.h"