gcc/testsuite/
[official-gcc.git] / gcc / expr.c
blob2868d9d3443e7ce568793d9bff453038e62f7a83
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
29 #include "attribs.h"
30 #include "varasm.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
39 #include "expr.h"
40 #include "optabs.h"
41 #include "libfuncs.h"
42 #include "recog.h"
43 #include "reload.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "langhooks.h"
47 #include "intl.h"
48 #include "tm_p.h"
49 #include "tree-iterator.h"
50 #include "basic-block.h"
51 #include "tree-ssa-alias.h"
52 #include "internal-fn.h"
53 #include "gimple-expr.h"
54 #include "is-a.h"
55 #include "gimple.h"
56 #include "gimple-ssa.h"
57 #include "cgraph.h"
58 #include "tree-ssanames.h"
59 #include "target.h"
60 #include "common/common-target.h"
61 #include "timevar.h"
62 #include "df.h"
63 #include "diagnostic.h"
64 #include "tree-ssa-live.h"
65 #include "tree-outof-ssa.h"
66 #include "target-globals.h"
67 #include "params.h"
68 #include "tree-ssa-address.h"
69 #include "cfgexpand.h"
71 #ifndef STACK_PUSH_CODE
72 #ifdef STACK_GROWS_DOWNWARD
73 #define STACK_PUSH_CODE PRE_DEC
74 #else
75 #define STACK_PUSH_CODE PRE_INC
76 #endif
77 #endif
80 /* If this is nonzero, we do not bother generating VOLATILE
81 around volatile memory references, and we are willing to
82 output indirect addresses. If cse is to follow, we reject
83 indirect addresses so a useful potential cse is generated;
84 if it is used only once, instruction combination will produce
85 the same indirect address eventually. */
86 int cse_not_expected;
88 /* This structure is used by move_by_pieces to describe the move to
89 be performed. */
90 struct move_by_pieces_d
92 rtx to;
93 rtx to_addr;
94 int autinc_to;
95 int explicit_inc_to;
96 rtx from;
97 rtx from_addr;
98 int autinc_from;
99 int explicit_inc_from;
100 unsigned HOST_WIDE_INT len;
101 HOST_WIDE_INT offset;
102 int reverse;
105 /* This structure is used by store_by_pieces to describe the clear to
106 be performed. */
108 struct store_by_pieces_d
110 rtx to;
111 rtx to_addr;
112 int autinc_to;
113 int explicit_inc_to;
114 unsigned HOST_WIDE_INT len;
115 HOST_WIDE_INT offset;
116 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
117 void *constfundata;
118 int reverse;
121 static void move_by_pieces_1 (insn_gen_fn, machine_mode,
122 struct move_by_pieces_d *);
123 static bool block_move_libcall_safe_for_call_parm (void);
124 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
125 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
126 unsigned HOST_WIDE_INT);
127 static tree emit_block_move_libcall_fn (int);
128 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
129 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
130 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
131 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
132 static void store_by_pieces_2 (insn_gen_fn, machine_mode,
133 struct store_by_pieces_d *);
134 static tree clear_storage_libcall_fn (int);
135 static rtx compress_float_constant (rtx, rtx);
136 static rtx get_subtarget (rtx);
137 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
138 HOST_WIDE_INT, enum machine_mode,
139 tree, int, alias_set_type);
140 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
141 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
142 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
143 enum machine_mode, tree, alias_set_type, bool);
145 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
147 static int is_aligning_offset (const_tree, const_tree);
148 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
149 enum expand_modifier);
150 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
151 static rtx do_store_flag (sepops, rtx, enum machine_mode);
152 #ifdef PUSH_ROUNDING
153 static void emit_single_push_insn (enum machine_mode, rtx, tree);
154 #endif
155 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx, int);
156 static rtx const_vector_from_tree (tree);
157 static void write_complex_part (rtx, rtx, bool);
159 /* This macro is used to determine whether move_by_pieces should be called
160 to perform a structure copy. */
161 #ifndef MOVE_BY_PIECES_P
162 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
163 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
164 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
165 #endif
167 /* This macro is used to determine whether clear_by_pieces should be
168 called to clear storage. */
169 #ifndef CLEAR_BY_PIECES_P
170 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
171 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
172 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
173 #endif
175 /* This macro is used to determine whether store_by_pieces should be
176 called to "memset" storage with byte values other than zero. */
177 #ifndef SET_BY_PIECES_P
178 #define SET_BY_PIECES_P(SIZE, ALIGN) \
179 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
180 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
181 #endif
183 /* This macro is used to determine whether store_by_pieces should be
184 called to "memcpy" storage when the source is a constant string. */
185 #ifndef STORE_BY_PIECES_P
186 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
187 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
188 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
189 #endif
191 /* This is run to set up which modes can be used
192 directly in memory and to initialize the block move optab. It is run
193 at the beginning of compilation and when the target is reinitialized. */
195 void
196 init_expr_target (void)
198 rtx insn, pat;
199 enum machine_mode mode;
200 int num_clobbers;
201 rtx mem, mem1;
202 rtx reg;
204 /* Try indexing by frame ptr and try by stack ptr.
205 It is known that on the Convex the stack ptr isn't a valid index.
206 With luck, one or the other is valid on any machine. */
207 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
208 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
210 /* A scratch register we can modify in-place below to avoid
211 useless RTL allocations. */
212 reg = gen_rtx_REG (VOIDmode, -1);
214 insn = rtx_alloc (INSN);
215 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
216 PATTERN (insn) = pat;
218 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
219 mode = (enum machine_mode) ((int) mode + 1))
221 int regno;
223 direct_load[(int) mode] = direct_store[(int) mode] = 0;
224 PUT_MODE (mem, mode);
225 PUT_MODE (mem1, mode);
226 PUT_MODE (reg, mode);
228 /* See if there is some register that can be used in this mode and
229 directly loaded or stored from memory. */
231 if (mode != VOIDmode && mode != BLKmode)
232 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
233 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
234 regno++)
236 if (! HARD_REGNO_MODE_OK (regno, mode))
237 continue;
239 SET_REGNO (reg, regno);
241 SET_SRC (pat) = mem;
242 SET_DEST (pat) = reg;
243 if (recog (pat, insn, &num_clobbers) >= 0)
244 direct_load[(int) mode] = 1;
246 SET_SRC (pat) = mem1;
247 SET_DEST (pat) = reg;
248 if (recog (pat, insn, &num_clobbers) >= 0)
249 direct_load[(int) mode] = 1;
251 SET_SRC (pat) = reg;
252 SET_DEST (pat) = mem;
253 if (recog (pat, insn, &num_clobbers) >= 0)
254 direct_store[(int) mode] = 1;
256 SET_SRC (pat) = reg;
257 SET_DEST (pat) = mem1;
258 if (recog (pat, insn, &num_clobbers) >= 0)
259 direct_store[(int) mode] = 1;
263 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
265 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
266 mode = GET_MODE_WIDER_MODE (mode))
268 enum machine_mode srcmode;
269 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
270 srcmode = GET_MODE_WIDER_MODE (srcmode))
272 enum insn_code ic;
274 ic = can_extend_p (mode, srcmode, 0);
275 if (ic == CODE_FOR_nothing)
276 continue;
278 PUT_MODE (mem, srcmode);
280 if (insn_operand_matches (ic, 1, mem))
281 float_extend_from_mem[mode][srcmode] = true;
286 /* This is run at the start of compiling a function. */
288 void
289 init_expr (void)
291 memset (&crtl->expr, 0, sizeof (crtl->expr));
294 /* Copy data from FROM to TO, where the machine modes are not the same.
295 Both modes may be integer, or both may be floating, or both may be
296 fixed-point.
297 UNSIGNEDP should be nonzero if FROM is an unsigned type.
298 This causes zero-extension instead of sign-extension. */
300 void
301 convert_move (rtx to, rtx from, int unsignedp)
303 enum machine_mode to_mode = GET_MODE (to);
304 enum machine_mode from_mode = GET_MODE (from);
305 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
306 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
307 enum insn_code code;
308 rtx libcall;
310 /* rtx code for making an equivalent value. */
311 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
312 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
315 gcc_assert (to_real == from_real);
316 gcc_assert (to_mode != BLKmode);
317 gcc_assert (from_mode != BLKmode);
319 /* If the source and destination are already the same, then there's
320 nothing to do. */
321 if (to == from)
322 return;
324 /* If FROM is a SUBREG that indicates that we have already done at least
325 the required extension, strip it. We don't handle such SUBREGs as
326 TO here. */
328 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
329 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
330 >= GET_MODE_PRECISION (to_mode))
331 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
332 from = gen_lowpart (to_mode, from), from_mode = to_mode;
334 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
336 if (to_mode == from_mode
337 || (from_mode == VOIDmode && CONSTANT_P (from)))
339 emit_move_insn (to, from);
340 return;
343 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
345 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
347 if (VECTOR_MODE_P (to_mode))
348 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
349 else
350 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
352 emit_move_insn (to, from);
353 return;
356 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
358 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
359 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
360 return;
363 if (to_real)
365 rtx value, insns;
366 convert_optab tab;
368 gcc_assert ((GET_MODE_PRECISION (from_mode)
369 != GET_MODE_PRECISION (to_mode))
370 || (DECIMAL_FLOAT_MODE_P (from_mode)
371 != DECIMAL_FLOAT_MODE_P (to_mode)));
373 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
374 /* Conversion between decimal float and binary float, same size. */
375 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
376 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
377 tab = sext_optab;
378 else
379 tab = trunc_optab;
381 /* Try converting directly if the insn is supported. */
383 code = convert_optab_handler (tab, to_mode, from_mode);
384 if (code != CODE_FOR_nothing)
386 emit_unop_insn (code, to, from,
387 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
388 return;
391 /* Otherwise use a libcall. */
392 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
394 /* Is this conversion implemented yet? */
395 gcc_assert (libcall);
397 start_sequence ();
398 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
399 1, from, from_mode);
400 insns = get_insns ();
401 end_sequence ();
402 emit_libcall_block (insns, to, value,
403 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
404 from)
405 : gen_rtx_FLOAT_EXTEND (to_mode, from));
406 return;
409 /* Handle pointer conversion. */ /* SPEE 900220. */
410 /* Targets are expected to provide conversion insns between PxImode and
411 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
412 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
414 enum machine_mode full_mode
415 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
417 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
418 != CODE_FOR_nothing);
420 if (full_mode != from_mode)
421 from = convert_to_mode (full_mode, from, unsignedp);
422 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
423 to, from, UNKNOWN);
424 return;
426 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
428 rtx new_from;
429 enum machine_mode full_mode
430 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
431 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
432 enum insn_code icode;
434 icode = convert_optab_handler (ctab, full_mode, from_mode);
435 gcc_assert (icode != CODE_FOR_nothing);
437 if (to_mode == full_mode)
439 emit_unop_insn (icode, to, from, UNKNOWN);
440 return;
443 new_from = gen_reg_rtx (full_mode);
444 emit_unop_insn (icode, new_from, from, UNKNOWN);
446 /* else proceed to integer conversions below. */
447 from_mode = full_mode;
448 from = new_from;
451 /* Make sure both are fixed-point modes or both are not. */
452 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
453 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
454 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
456 /* If we widen from_mode to to_mode and they are in the same class,
457 we won't saturate the result.
458 Otherwise, always saturate the result to play safe. */
459 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
460 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
461 expand_fixed_convert (to, from, 0, 0);
462 else
463 expand_fixed_convert (to, from, 0, 1);
464 return;
467 /* Now both modes are integers. */
469 /* Handle expanding beyond a word. */
470 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
471 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
473 rtx insns;
474 rtx lowpart;
475 rtx fill_value;
476 rtx lowfrom;
477 int i;
478 enum machine_mode lowpart_mode;
479 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
481 /* Try converting directly if the insn is supported. */
482 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
483 != CODE_FOR_nothing)
485 /* If FROM is a SUBREG, put it into a register. Do this
486 so that we always generate the same set of insns for
487 better cse'ing; if an intermediate assignment occurred,
488 we won't be doing the operation directly on the SUBREG. */
489 if (optimize > 0 && GET_CODE (from) == SUBREG)
490 from = force_reg (from_mode, from);
491 emit_unop_insn (code, to, from, equiv_code);
492 return;
494 /* Next, try converting via full word. */
495 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
496 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
497 != CODE_FOR_nothing))
499 rtx word_to = gen_reg_rtx (word_mode);
500 if (REG_P (to))
502 if (reg_overlap_mentioned_p (to, from))
503 from = force_reg (from_mode, from);
504 emit_clobber (to);
506 convert_move (word_to, from, unsignedp);
507 emit_unop_insn (code, to, word_to, equiv_code);
508 return;
511 /* No special multiword conversion insn; do it by hand. */
512 start_sequence ();
514 /* Since we will turn this into a no conflict block, we must ensure the
515 the source does not overlap the target so force it into an isolated
516 register when maybe so. Likewise for any MEM input, since the
517 conversion sequence might require several references to it and we
518 must ensure we're getting the same value every time. */
520 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
521 from = force_reg (from_mode, from);
523 /* Get a copy of FROM widened to a word, if necessary. */
524 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
525 lowpart_mode = word_mode;
526 else
527 lowpart_mode = from_mode;
529 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
531 lowpart = gen_lowpart (lowpart_mode, to);
532 emit_move_insn (lowpart, lowfrom);
534 /* Compute the value to put in each remaining word. */
535 if (unsignedp)
536 fill_value = const0_rtx;
537 else
538 fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
539 LT, lowfrom, const0_rtx,
540 lowpart_mode, 0, -1);
542 /* Fill the remaining words. */
543 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
545 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
546 rtx subword = operand_subword (to, index, 1, to_mode);
548 gcc_assert (subword);
550 if (fill_value != subword)
551 emit_move_insn (subword, fill_value);
554 insns = get_insns ();
555 end_sequence ();
557 emit_insn (insns);
558 return;
561 /* Truncating multi-word to a word or less. */
562 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
563 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
565 if (!((MEM_P (from)
566 && ! MEM_VOLATILE_P (from)
567 && direct_load[(int) to_mode]
568 && ! mode_dependent_address_p (XEXP (from, 0),
569 MEM_ADDR_SPACE (from)))
570 || REG_P (from)
571 || GET_CODE (from) == SUBREG))
572 from = force_reg (from_mode, from);
573 convert_move (to, gen_lowpart (word_mode, from), 0);
574 return;
577 /* Now follow all the conversions between integers
578 no more than a word long. */
580 /* For truncation, usually we can just refer to FROM in a narrower mode. */
581 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
582 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
584 if (!((MEM_P (from)
585 && ! MEM_VOLATILE_P (from)
586 && direct_load[(int) to_mode]
587 && ! mode_dependent_address_p (XEXP (from, 0),
588 MEM_ADDR_SPACE (from)))
589 || REG_P (from)
590 || GET_CODE (from) == SUBREG))
591 from = force_reg (from_mode, from);
592 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
593 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
594 from = copy_to_reg (from);
595 emit_move_insn (to, gen_lowpart (to_mode, from));
596 return;
599 /* Handle extension. */
600 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
602 /* Convert directly if that works. */
603 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
604 != CODE_FOR_nothing)
606 emit_unop_insn (code, to, from, equiv_code);
607 return;
609 else
611 enum machine_mode intermediate;
612 rtx tmp;
613 int shift_amount;
615 /* Search for a mode to convert via. */
616 for (intermediate = from_mode; intermediate != VOIDmode;
617 intermediate = GET_MODE_WIDER_MODE (intermediate))
618 if (((can_extend_p (to_mode, intermediate, unsignedp)
619 != CODE_FOR_nothing)
620 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
621 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
622 && (can_extend_p (intermediate, from_mode, unsignedp)
623 != CODE_FOR_nothing))
625 convert_move (to, convert_to_mode (intermediate, from,
626 unsignedp), unsignedp);
627 return;
630 /* No suitable intermediate mode.
631 Generate what we need with shifts. */
632 shift_amount = (GET_MODE_PRECISION (to_mode)
633 - GET_MODE_PRECISION (from_mode));
634 from = gen_lowpart (to_mode, force_reg (from_mode, from));
635 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
636 to, unsignedp);
637 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
638 to, unsignedp);
639 if (tmp != to)
640 emit_move_insn (to, tmp);
641 return;
645 /* Support special truncate insns for certain modes. */
646 if (convert_optab_handler (trunc_optab, to_mode,
647 from_mode) != CODE_FOR_nothing)
649 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
650 to, from, UNKNOWN);
651 return;
654 /* Handle truncation of volatile memrefs, and so on;
655 the things that couldn't be truncated directly,
656 and for which there was no special instruction.
658 ??? Code above formerly short-circuited this, for most integer
659 mode pairs, with a force_reg in from_mode followed by a recursive
660 call to this routine. Appears always to have been wrong. */
661 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
663 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
664 emit_move_insn (to, temp);
665 return;
668 /* Mode combination is not recognized. */
669 gcc_unreachable ();
672 /* Return an rtx for a value that would result
673 from converting X to mode MODE.
674 Both X and MODE may be floating, or both integer.
675 UNSIGNEDP is nonzero if X is an unsigned value.
676 This can be done by referring to a part of X in place
677 or by copying to a new temporary with conversion. */
680 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
682 return convert_modes (mode, VOIDmode, x, unsignedp);
685 /* Return an rtx for a value that would result
686 from converting X from mode OLDMODE to mode MODE.
687 Both modes may be floating, or both integer.
688 UNSIGNEDP is nonzero if X is an unsigned value.
690 This can be done by referring to a part of X in place
691 or by copying to a new temporary with conversion.
693 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
696 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
698 rtx temp;
700 /* If FROM is a SUBREG that indicates that we have already done at least
701 the required extension, strip it. */
703 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
704 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
705 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
706 x = gen_lowpart (mode, SUBREG_REG (x));
708 if (GET_MODE (x) != VOIDmode)
709 oldmode = GET_MODE (x);
711 if (mode == oldmode)
712 return x;
714 if (CONST_SCALAR_INT_P (x) && GET_MODE_CLASS (mode) == MODE_INT)
716 /* If the caller did not tell us the old mode, then there is not
717 much to do with respect to canonicalization. We have to
718 assume that all the bits are significant. */
719 if (GET_MODE_CLASS (oldmode) != MODE_INT)
720 oldmode = MAX_MODE_INT;
721 wide_int w = wide_int::from (std::make_pair (x, oldmode),
722 GET_MODE_PRECISION (mode),
723 unsignedp ? UNSIGNED : SIGNED);
724 return immed_wide_int_const (w, mode);
727 /* We can do this with a gen_lowpart if both desired and current modes
728 are integer, and this is either a constant integer, a register, or a
729 non-volatile MEM. */
730 if (GET_MODE_CLASS (mode) == MODE_INT
731 && GET_MODE_CLASS (oldmode) == MODE_INT
732 && GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
733 && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) mode])
734 || (REG_P (x)
735 && (!HARD_REGISTER_P (x)
736 || HARD_REGNO_MODE_OK (REGNO (x), mode))
737 && TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (x)))))
739 return gen_lowpart (mode, x);
741 /* Converting from integer constant into mode is always equivalent to an
742 subreg operation. */
743 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
745 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
746 return simplify_gen_subreg (mode, x, oldmode, 0);
749 temp = gen_reg_rtx (mode);
750 convert_move (temp, x, unsignedp);
751 return temp;
754 /* Return the largest alignment we can use for doing a move (or store)
755 of MAX_PIECES. ALIGN is the largest alignment we could use. */
757 static unsigned int
758 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
760 enum machine_mode tmode;
762 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
763 if (align >= GET_MODE_ALIGNMENT (tmode))
764 align = GET_MODE_ALIGNMENT (tmode);
765 else
767 enum machine_mode tmode, xmode;
769 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
770 tmode != VOIDmode;
771 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
772 if (GET_MODE_SIZE (tmode) > max_pieces
773 || SLOW_UNALIGNED_ACCESS (tmode, align))
774 break;
776 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
779 return align;
782 /* Return the widest integer mode no wider than SIZE. If no such mode
783 can be found, return VOIDmode. */
785 static enum machine_mode
786 widest_int_mode_for_size (unsigned int size)
788 enum machine_mode tmode, mode = VOIDmode;
790 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
791 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
792 if (GET_MODE_SIZE (tmode) < size)
793 mode = tmode;
795 return mode;
798 /* STORE_MAX_PIECES is the number of bytes at a time that we can
799 store efficiently. Due to internal GCC limitations, this is
800 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
801 for an immediate constant. */
803 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
805 /* Determine whether the LEN bytes can be moved by using several move
806 instructions. Return nonzero if a call to move_by_pieces should
807 succeed. */
810 can_move_by_pieces (unsigned HOST_WIDE_INT len ATTRIBUTE_UNUSED,
811 unsigned int align ATTRIBUTE_UNUSED)
813 return MOVE_BY_PIECES_P (len, align);
816 /* Generate several move instructions to copy LEN bytes from block FROM to
817 block TO. (These are MEM rtx's with BLKmode).
819 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
820 used to push FROM to the stack.
822 ALIGN is maximum stack alignment we can assume.
824 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
825 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
826 stpcpy. */
829 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
830 unsigned int align, int endp)
832 struct move_by_pieces_d data;
833 enum machine_mode to_addr_mode;
834 enum machine_mode from_addr_mode = get_address_mode (from);
835 rtx to_addr, from_addr = XEXP (from, 0);
836 unsigned int max_size = MOVE_MAX_PIECES + 1;
837 enum insn_code icode;
839 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
841 data.offset = 0;
842 data.from_addr = from_addr;
843 if (to)
845 to_addr_mode = get_address_mode (to);
846 to_addr = XEXP (to, 0);
847 data.to = to;
848 data.autinc_to
849 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
850 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
851 data.reverse
852 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
854 else
856 to_addr_mode = VOIDmode;
857 to_addr = NULL_RTX;
858 data.to = NULL_RTX;
859 data.autinc_to = 1;
860 #ifdef STACK_GROWS_DOWNWARD
861 data.reverse = 1;
862 #else
863 data.reverse = 0;
864 #endif
866 data.to_addr = to_addr;
867 data.from = from;
868 data.autinc_from
869 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
870 || GET_CODE (from_addr) == POST_INC
871 || GET_CODE (from_addr) == POST_DEC);
873 data.explicit_inc_from = 0;
874 data.explicit_inc_to = 0;
875 if (data.reverse) data.offset = len;
876 data.len = len;
878 /* If copying requires more than two move insns,
879 copy addresses to registers (to make displacements shorter)
880 and use post-increment if available. */
881 if (!(data.autinc_from && data.autinc_to)
882 && move_by_pieces_ninsns (len, align, max_size) > 2)
884 /* Find the mode of the largest move...
885 MODE might not be used depending on the definitions of the
886 USE_* macros below. */
887 enum machine_mode mode ATTRIBUTE_UNUSED
888 = widest_int_mode_for_size (max_size);
890 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
892 data.from_addr = copy_to_mode_reg (from_addr_mode,
893 plus_constant (from_addr_mode,
894 from_addr, len));
895 data.autinc_from = 1;
896 data.explicit_inc_from = -1;
898 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
900 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
901 data.autinc_from = 1;
902 data.explicit_inc_from = 1;
904 if (!data.autinc_from && CONSTANT_P (from_addr))
905 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
906 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
908 data.to_addr = copy_to_mode_reg (to_addr_mode,
909 plus_constant (to_addr_mode,
910 to_addr, len));
911 data.autinc_to = 1;
912 data.explicit_inc_to = -1;
914 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
916 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
917 data.autinc_to = 1;
918 data.explicit_inc_to = 1;
920 if (!data.autinc_to && CONSTANT_P (to_addr))
921 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
924 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
926 /* First move what we can in the largest integer mode, then go to
927 successively smaller modes. */
929 while (max_size > 1 && data.len > 0)
931 enum machine_mode mode = widest_int_mode_for_size (max_size);
933 if (mode == VOIDmode)
934 break;
936 icode = optab_handler (mov_optab, mode);
937 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
938 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
940 max_size = GET_MODE_SIZE (mode);
943 /* The code above should have handled everything. */
944 gcc_assert (!data.len);
946 if (endp)
948 rtx to1;
950 gcc_assert (!data.reverse);
951 if (data.autinc_to)
953 if (endp == 2)
955 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
956 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
957 else
958 data.to_addr = copy_to_mode_reg (to_addr_mode,
959 plus_constant (to_addr_mode,
960 data.to_addr,
961 -1));
963 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
964 data.offset);
966 else
968 if (endp == 2)
969 --data.offset;
970 to1 = adjust_address (data.to, QImode, data.offset);
972 return to1;
974 else
975 return data.to;
978 /* Return number of insns required to move L bytes by pieces.
979 ALIGN (in bits) is maximum alignment we can assume. */
981 unsigned HOST_WIDE_INT
982 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
983 unsigned int max_size)
985 unsigned HOST_WIDE_INT n_insns = 0;
987 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
989 while (max_size > 1 && l > 0)
991 enum machine_mode mode;
992 enum insn_code icode;
994 mode = widest_int_mode_for_size (max_size);
996 if (mode == VOIDmode)
997 break;
999 icode = optab_handler (mov_optab, mode);
1000 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1001 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1003 max_size = GET_MODE_SIZE (mode);
1006 gcc_assert (!l);
1007 return n_insns;
1010 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1011 with move instructions for mode MODE. GENFUN is the gen_... function
1012 to make a move insn for that mode. DATA has all the other info. */
1014 static void
1015 move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
1016 struct move_by_pieces_d *data)
1018 unsigned int size = GET_MODE_SIZE (mode);
1019 rtx to1 = NULL_RTX, from1;
1021 while (data->len >= size)
1023 if (data->reverse)
1024 data->offset -= size;
1026 if (data->to)
1028 if (data->autinc_to)
1029 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1030 data->offset);
1031 else
1032 to1 = adjust_address (data->to, mode, data->offset);
1035 if (data->autinc_from)
1036 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1037 data->offset);
1038 else
1039 from1 = adjust_address (data->from, mode, data->offset);
1041 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1042 emit_insn (gen_add2_insn (data->to_addr,
1043 gen_int_mode (-(HOST_WIDE_INT) size,
1044 GET_MODE (data->to_addr))));
1045 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1046 emit_insn (gen_add2_insn (data->from_addr,
1047 gen_int_mode (-(HOST_WIDE_INT) size,
1048 GET_MODE (data->from_addr))));
1050 if (data->to)
1051 emit_insn ((*genfun) (to1, from1));
1052 else
1054 #ifdef PUSH_ROUNDING
1055 emit_single_push_insn (mode, from1, NULL);
1056 #else
1057 gcc_unreachable ();
1058 #endif
1061 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1062 emit_insn (gen_add2_insn (data->to_addr,
1063 gen_int_mode (size,
1064 GET_MODE (data->to_addr))));
1065 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1066 emit_insn (gen_add2_insn (data->from_addr,
1067 gen_int_mode (size,
1068 GET_MODE (data->from_addr))));
1070 if (! data->reverse)
1071 data->offset += size;
1073 data->len -= size;
1077 /* Emit code to move a block Y to a block X. This may be done with
1078 string-move instructions, with multiple scalar move instructions,
1079 or with a library call.
1081 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1082 SIZE is an rtx that says how long they are.
1083 ALIGN is the maximum alignment we can assume they have.
1084 METHOD describes what kind of copy this is, and what mechanisms may be used.
1085 MIN_SIZE is the minimal size of block to move
1086 MAX_SIZE is the maximal size of block to move, if it can not be represented
1087 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1089 Return the address of the new block, if memcpy is called and returns it,
1090 0 otherwise. */
1093 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1094 unsigned int expected_align, HOST_WIDE_INT expected_size,
1095 unsigned HOST_WIDE_INT min_size,
1096 unsigned HOST_WIDE_INT max_size,
1097 unsigned HOST_WIDE_INT probable_max_size)
1099 bool may_use_call;
1100 rtx retval = 0;
1101 unsigned int align;
1103 gcc_assert (size);
1104 if (CONST_INT_P (size)
1105 && INTVAL (size) == 0)
1106 return 0;
1108 switch (method)
1110 case BLOCK_OP_NORMAL:
1111 case BLOCK_OP_TAILCALL:
1112 may_use_call = true;
1113 break;
1115 case BLOCK_OP_CALL_PARM:
1116 may_use_call = block_move_libcall_safe_for_call_parm ();
1118 /* Make inhibit_defer_pop nonzero around the library call
1119 to force it to pop the arguments right away. */
1120 NO_DEFER_POP;
1121 break;
1123 case BLOCK_OP_NO_LIBCALL:
1124 may_use_call = false;
1125 break;
1127 default:
1128 gcc_unreachable ();
1131 gcc_assert (MEM_P (x) && MEM_P (y));
1132 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1133 gcc_assert (align >= BITS_PER_UNIT);
1135 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1136 block copy is more efficient for other large modes, e.g. DCmode. */
1137 x = adjust_address (x, BLKmode, 0);
1138 y = adjust_address (y, BLKmode, 0);
1140 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1141 can be incorrect is coming from __builtin_memcpy. */
1142 if (CONST_INT_P (size))
1144 x = shallow_copy_rtx (x);
1145 y = shallow_copy_rtx (y);
1146 set_mem_size (x, INTVAL (size));
1147 set_mem_size (y, INTVAL (size));
1150 if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1151 move_by_pieces (x, y, INTVAL (size), align, 0);
1152 else if (emit_block_move_via_movmem (x, y, size, align,
1153 expected_align, expected_size,
1154 min_size, max_size, probable_max_size))
1156 else if (may_use_call
1157 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1158 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1160 /* Since x and y are passed to a libcall, mark the corresponding
1161 tree EXPR as addressable. */
1162 tree y_expr = MEM_EXPR (y);
1163 tree x_expr = MEM_EXPR (x);
1164 if (y_expr)
1165 mark_addressable (y_expr);
1166 if (x_expr)
1167 mark_addressable (x_expr);
1168 retval = emit_block_move_via_libcall (x, y, size,
1169 method == BLOCK_OP_TAILCALL);
1172 else
1173 emit_block_move_via_loop (x, y, size, align);
1175 if (method == BLOCK_OP_CALL_PARM)
1176 OK_DEFER_POP;
1178 return retval;
1182 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1184 unsigned HOST_WIDE_INT max, min = 0;
1185 if (GET_CODE (size) == CONST_INT)
1186 min = max = UINTVAL (size);
1187 else
1188 max = GET_MODE_MASK (GET_MODE (size));
1189 return emit_block_move_hints (x, y, size, method, 0, -1,
1190 min, max, max);
1193 /* A subroutine of emit_block_move. Returns true if calling the
1194 block move libcall will not clobber any parameters which may have
1195 already been placed on the stack. */
1197 static bool
1198 block_move_libcall_safe_for_call_parm (void)
1200 #if defined (REG_PARM_STACK_SPACE)
1201 tree fn;
1202 #endif
1204 /* If arguments are pushed on the stack, then they're safe. */
1205 if (PUSH_ARGS)
1206 return true;
1208 /* If registers go on the stack anyway, any argument is sure to clobber
1209 an outgoing argument. */
1210 #if defined (REG_PARM_STACK_SPACE)
1211 fn = emit_block_move_libcall_fn (false);
1212 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1213 depend on its argument. */
1214 (void) fn;
1215 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1216 && REG_PARM_STACK_SPACE (fn) != 0)
1217 return false;
1218 #endif
1220 /* If any argument goes in memory, then it might clobber an outgoing
1221 argument. */
1223 CUMULATIVE_ARGS args_so_far_v;
1224 cumulative_args_t args_so_far;
1225 tree fn, arg;
1227 fn = emit_block_move_libcall_fn (false);
1228 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1229 args_so_far = pack_cumulative_args (&args_so_far_v);
1231 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1232 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1234 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1235 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1236 NULL_TREE, true);
1237 if (!tmp || !REG_P (tmp))
1238 return false;
1239 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1240 return false;
1241 targetm.calls.function_arg_advance (args_so_far, mode,
1242 NULL_TREE, true);
1245 return true;
1248 /* A subroutine of emit_block_move. Expand a movmem pattern;
1249 return true if successful. */
1251 static bool
1252 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1253 unsigned int expected_align, HOST_WIDE_INT expected_size,
1254 unsigned HOST_WIDE_INT min_size,
1255 unsigned HOST_WIDE_INT max_size,
1256 unsigned HOST_WIDE_INT probable_max_size)
1258 int save_volatile_ok = volatile_ok;
1259 enum machine_mode mode;
1261 if (expected_align < align)
1262 expected_align = align;
1263 if (expected_size != -1)
1265 if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1266 expected_size = probable_max_size;
1267 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1268 expected_size = min_size;
1271 /* Since this is a move insn, we don't care about volatility. */
1272 volatile_ok = 1;
1274 /* Try the most limited insn first, because there's no point
1275 including more than one in the machine description unless
1276 the more limited one has some advantage. */
1278 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1279 mode = GET_MODE_WIDER_MODE (mode))
1281 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1283 if (code != CODE_FOR_nothing
1284 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1285 here because if SIZE is less than the mode mask, as it is
1286 returned by the macro, it will definitely be less than the
1287 actual mode mask. Since SIZE is within the Pmode address
1288 space, we limit MODE to Pmode. */
1289 && ((CONST_INT_P (size)
1290 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1291 <= (GET_MODE_MASK (mode) >> 1)))
1292 || max_size <= (GET_MODE_MASK (mode) >> 1)
1293 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1295 struct expand_operand ops[9];
1296 unsigned int nops;
1298 /* ??? When called via emit_block_move_for_call, it'd be
1299 nice if there were some way to inform the backend, so
1300 that it doesn't fail the expansion because it thinks
1301 emitting the libcall would be more efficient. */
1302 nops = insn_data[(int) code].n_generator_args;
1303 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1305 create_fixed_operand (&ops[0], x);
1306 create_fixed_operand (&ops[1], y);
1307 /* The check above guarantees that this size conversion is valid. */
1308 create_convert_operand_to (&ops[2], size, mode, true);
1309 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1310 if (nops >= 6)
1312 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1313 create_integer_operand (&ops[5], expected_size);
1315 if (nops >= 8)
1317 create_integer_operand (&ops[6], min_size);
1318 /* If we can not represent the maximal size,
1319 make parameter NULL. */
1320 if ((HOST_WIDE_INT) max_size != -1)
1321 create_integer_operand (&ops[7], max_size);
1322 else
1323 create_fixed_operand (&ops[7], NULL);
1325 if (nops == 9)
1327 /* If we can not represent the maximal size,
1328 make parameter NULL. */
1329 if ((HOST_WIDE_INT) probable_max_size != -1)
1330 create_integer_operand (&ops[8], probable_max_size);
1331 else
1332 create_fixed_operand (&ops[8], NULL);
1334 if (maybe_expand_insn (code, nops, ops))
1336 volatile_ok = save_volatile_ok;
1337 return true;
1342 volatile_ok = save_volatile_ok;
1343 return false;
1346 /* A subroutine of emit_block_move. Expand a call to memcpy.
1347 Return the return value from memcpy, 0 otherwise. */
1350 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1352 rtx dst_addr, src_addr;
1353 tree call_expr, fn, src_tree, dst_tree, size_tree;
1354 enum machine_mode size_mode;
1355 rtx retval;
1357 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1358 pseudos. We can then place those new pseudos into a VAR_DECL and
1359 use them later. */
1361 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1362 src_addr = copy_addr_to_reg (XEXP (src, 0));
1364 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1365 src_addr = convert_memory_address (ptr_mode, src_addr);
1367 dst_tree = make_tree (ptr_type_node, dst_addr);
1368 src_tree = make_tree (ptr_type_node, src_addr);
1370 size_mode = TYPE_MODE (sizetype);
1372 size = convert_to_mode (size_mode, size, 1);
1373 size = copy_to_mode_reg (size_mode, size);
1375 /* It is incorrect to use the libcall calling conventions to call
1376 memcpy in this context. This could be a user call to memcpy and
1377 the user may wish to examine the return value from memcpy. For
1378 targets where libcalls and normal calls have different conventions
1379 for returning pointers, we could end up generating incorrect code. */
1381 size_tree = make_tree (sizetype, size);
1383 fn = emit_block_move_libcall_fn (true);
1384 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1385 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1387 retval = expand_normal (call_expr);
1389 return retval;
1392 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1393 for the function we use for block copies. */
1395 static GTY(()) tree block_move_fn;
1397 void
1398 init_block_move_fn (const char *asmspec)
1400 if (!block_move_fn)
1402 tree args, fn, attrs, attr_args;
1404 fn = get_identifier ("memcpy");
1405 args = build_function_type_list (ptr_type_node, ptr_type_node,
1406 const_ptr_type_node, sizetype,
1407 NULL_TREE);
1409 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1410 DECL_EXTERNAL (fn) = 1;
1411 TREE_PUBLIC (fn) = 1;
1412 DECL_ARTIFICIAL (fn) = 1;
1413 TREE_NOTHROW (fn) = 1;
1414 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1415 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1417 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1418 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1420 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1422 block_move_fn = fn;
1425 if (asmspec)
1426 set_user_assembler_name (block_move_fn, asmspec);
1429 static tree
1430 emit_block_move_libcall_fn (int for_call)
1432 static bool emitted_extern;
1434 if (!block_move_fn)
1435 init_block_move_fn (NULL);
1437 if (for_call && !emitted_extern)
1439 emitted_extern = true;
1440 make_decl_rtl (block_move_fn);
1443 return block_move_fn;
1446 /* A subroutine of emit_block_move. Copy the data via an explicit
1447 loop. This is used only when libcalls are forbidden. */
1448 /* ??? It'd be nice to copy in hunks larger than QImode. */
1450 static void
1451 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1452 unsigned int align ATTRIBUTE_UNUSED)
1454 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1455 enum machine_mode x_addr_mode = get_address_mode (x);
1456 enum machine_mode y_addr_mode = get_address_mode (y);
1457 enum machine_mode iter_mode;
1459 iter_mode = GET_MODE (size);
1460 if (iter_mode == VOIDmode)
1461 iter_mode = word_mode;
1463 top_label = gen_label_rtx ();
1464 cmp_label = gen_label_rtx ();
1465 iter = gen_reg_rtx (iter_mode);
1467 emit_move_insn (iter, const0_rtx);
1469 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1470 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1471 do_pending_stack_adjust ();
1473 emit_jump (cmp_label);
1474 emit_label (top_label);
1476 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1477 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1479 if (x_addr_mode != y_addr_mode)
1480 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1481 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1483 x = change_address (x, QImode, x_addr);
1484 y = change_address (y, QImode, y_addr);
1486 emit_move_insn (x, y);
1488 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1489 true, OPTAB_LIB_WIDEN);
1490 if (tmp != iter)
1491 emit_move_insn (iter, tmp);
1493 emit_label (cmp_label);
1495 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1496 true, top_label, REG_BR_PROB_BASE * 90 / 100);
1499 /* Copy all or part of a value X into registers starting at REGNO.
1500 The number of registers to be filled is NREGS. */
1502 void
1503 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1505 int i;
1506 #ifdef HAVE_load_multiple
1507 rtx pat;
1508 rtx last;
1509 #endif
1511 if (nregs == 0)
1512 return;
1514 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1515 x = validize_mem (force_const_mem (mode, x));
1517 /* See if the machine can do this with a load multiple insn. */
1518 #ifdef HAVE_load_multiple
1519 if (HAVE_load_multiple)
1521 last = get_last_insn ();
1522 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1523 GEN_INT (nregs));
1524 if (pat)
1526 emit_insn (pat);
1527 return;
1529 else
1530 delete_insns_since (last);
1532 #endif
1534 for (i = 0; i < nregs; i++)
1535 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1536 operand_subword_force (x, i, mode));
1539 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1540 The number of registers to be filled is NREGS. */
1542 void
1543 move_block_from_reg (int regno, rtx x, int nregs)
1545 int i;
1547 if (nregs == 0)
1548 return;
1550 /* See if the machine can do this with a store multiple insn. */
1551 #ifdef HAVE_store_multiple
1552 if (HAVE_store_multiple)
1554 rtx last = get_last_insn ();
1555 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1556 GEN_INT (nregs));
1557 if (pat)
1559 emit_insn (pat);
1560 return;
1562 else
1563 delete_insns_since (last);
1565 #endif
1567 for (i = 0; i < nregs; i++)
1569 rtx tem = operand_subword (x, i, 1, BLKmode);
1571 gcc_assert (tem);
1573 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1577 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1578 ORIG, where ORIG is a non-consecutive group of registers represented by
1579 a PARALLEL. The clone is identical to the original except in that the
1580 original set of registers is replaced by a new set of pseudo registers.
1581 The new set has the same modes as the original set. */
1584 gen_group_rtx (rtx orig)
1586 int i, length;
1587 rtx *tmps;
1589 gcc_assert (GET_CODE (orig) == PARALLEL);
1591 length = XVECLEN (orig, 0);
1592 tmps = XALLOCAVEC (rtx, length);
1594 /* Skip a NULL entry in first slot. */
1595 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1597 if (i)
1598 tmps[0] = 0;
1600 for (; i < length; i++)
1602 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1603 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1605 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1608 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1611 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1612 except that values are placed in TMPS[i], and must later be moved
1613 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1615 static void
1616 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1618 rtx src;
1619 int start, i;
1620 enum machine_mode m = GET_MODE (orig_src);
1622 gcc_assert (GET_CODE (dst) == PARALLEL);
1624 if (m != VOIDmode
1625 && !SCALAR_INT_MODE_P (m)
1626 && !MEM_P (orig_src)
1627 && GET_CODE (orig_src) != CONCAT)
1629 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1630 if (imode == BLKmode)
1631 src = assign_stack_temp (GET_MODE (orig_src), ssize);
1632 else
1633 src = gen_reg_rtx (imode);
1634 if (imode != BLKmode)
1635 src = gen_lowpart (GET_MODE (orig_src), src);
1636 emit_move_insn (src, orig_src);
1637 /* ...and back again. */
1638 if (imode != BLKmode)
1639 src = gen_lowpart (imode, src);
1640 emit_group_load_1 (tmps, dst, src, type, ssize);
1641 return;
1644 /* Check for a NULL entry, used to indicate that the parameter goes
1645 both on the stack and in registers. */
1646 if (XEXP (XVECEXP (dst, 0, 0), 0))
1647 start = 0;
1648 else
1649 start = 1;
1651 /* Process the pieces. */
1652 for (i = start; i < XVECLEN (dst, 0); i++)
1654 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1655 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1656 unsigned int bytelen = GET_MODE_SIZE (mode);
1657 int shift = 0;
1659 /* Handle trailing fragments that run over the size of the struct. */
1660 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1662 /* Arrange to shift the fragment to where it belongs.
1663 extract_bit_field loads to the lsb of the reg. */
1664 if (
1665 #ifdef BLOCK_REG_PADDING
1666 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1667 == (BYTES_BIG_ENDIAN ? upward : downward)
1668 #else
1669 BYTES_BIG_ENDIAN
1670 #endif
1672 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1673 bytelen = ssize - bytepos;
1674 gcc_assert (bytelen > 0);
1677 /* If we won't be loading directly from memory, protect the real source
1678 from strange tricks we might play; but make sure that the source can
1679 be loaded directly into the destination. */
1680 src = orig_src;
1681 if (!MEM_P (orig_src)
1682 && (!CONSTANT_P (orig_src)
1683 || (GET_MODE (orig_src) != mode
1684 && GET_MODE (orig_src) != VOIDmode)))
1686 if (GET_MODE (orig_src) == VOIDmode)
1687 src = gen_reg_rtx (mode);
1688 else
1689 src = gen_reg_rtx (GET_MODE (orig_src));
1691 emit_move_insn (src, orig_src);
1694 /* Optimize the access just a bit. */
1695 if (MEM_P (src)
1696 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1697 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1698 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1699 && bytelen == GET_MODE_SIZE (mode))
1701 tmps[i] = gen_reg_rtx (mode);
1702 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1704 else if (COMPLEX_MODE_P (mode)
1705 && GET_MODE (src) == mode
1706 && bytelen == GET_MODE_SIZE (mode))
1707 /* Let emit_move_complex do the bulk of the work. */
1708 tmps[i] = src;
1709 else if (GET_CODE (src) == CONCAT)
1711 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1712 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1714 if ((bytepos == 0 && bytelen == slen0)
1715 || (bytepos != 0 && bytepos + bytelen <= slen))
1717 /* The following assumes that the concatenated objects all
1718 have the same size. In this case, a simple calculation
1719 can be used to determine the object and the bit field
1720 to be extracted. */
1721 tmps[i] = XEXP (src, bytepos / slen0);
1722 if (! CONSTANT_P (tmps[i])
1723 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1724 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1725 (bytepos % slen0) * BITS_PER_UNIT,
1726 1, NULL_RTX, mode, mode);
1728 else
1730 rtx mem;
1732 gcc_assert (!bytepos);
1733 mem = assign_stack_temp (GET_MODE (src), slen);
1734 emit_move_insn (mem, src);
1735 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1736 0, 1, NULL_RTX, mode, mode);
1739 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1740 SIMD register, which is currently broken. While we get GCC
1741 to emit proper RTL for these cases, let's dump to memory. */
1742 else if (VECTOR_MODE_P (GET_MODE (dst))
1743 && REG_P (src))
1745 int slen = GET_MODE_SIZE (GET_MODE (src));
1746 rtx mem;
1748 mem = assign_stack_temp (GET_MODE (src), slen);
1749 emit_move_insn (mem, src);
1750 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1752 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1753 && XVECLEN (dst, 0) > 1)
1754 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
1755 else if (CONSTANT_P (src))
1757 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1759 if (len == ssize)
1760 tmps[i] = src;
1761 else
1763 rtx first, second;
1765 /* TODO: const_wide_int can have sizes other than this... */
1766 gcc_assert (2 * len == ssize);
1767 split_double (src, &first, &second);
1768 if (i)
1769 tmps[i] = second;
1770 else
1771 tmps[i] = first;
1774 else if (REG_P (src) && GET_MODE (src) == mode)
1775 tmps[i] = src;
1776 else
1777 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1778 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1779 mode, mode);
1781 if (shift)
1782 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1783 shift, tmps[i], 0);
1787 /* Emit code to move a block SRC of type TYPE to a block DST,
1788 where DST is non-consecutive registers represented by a PARALLEL.
1789 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1790 if not known. */
1792 void
1793 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1795 rtx *tmps;
1796 int i;
1798 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1799 emit_group_load_1 (tmps, dst, src, type, ssize);
1801 /* Copy the extracted pieces into the proper (probable) hard regs. */
1802 for (i = 0; i < XVECLEN (dst, 0); i++)
1804 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1805 if (d == NULL)
1806 continue;
1807 emit_move_insn (d, tmps[i]);
1811 /* Similar, but load SRC into new pseudos in a format that looks like
1812 PARALLEL. This can later be fed to emit_group_move to get things
1813 in the right place. */
1816 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1818 rtvec vec;
1819 int i;
1821 vec = rtvec_alloc (XVECLEN (parallel, 0));
1822 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1824 /* Convert the vector to look just like the original PARALLEL, except
1825 with the computed values. */
1826 for (i = 0; i < XVECLEN (parallel, 0); i++)
1828 rtx e = XVECEXP (parallel, 0, i);
1829 rtx d = XEXP (e, 0);
1831 if (d)
1833 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1834 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1836 RTVEC_ELT (vec, i) = e;
1839 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1842 /* Emit code to move a block SRC to block DST, where SRC and DST are
1843 non-consecutive groups of registers, each represented by a PARALLEL. */
1845 void
1846 emit_group_move (rtx dst, rtx src)
1848 int i;
1850 gcc_assert (GET_CODE (src) == PARALLEL
1851 && GET_CODE (dst) == PARALLEL
1852 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1854 /* Skip first entry if NULL. */
1855 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1856 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1857 XEXP (XVECEXP (src, 0, i), 0));
1860 /* Move a group of registers represented by a PARALLEL into pseudos. */
1863 emit_group_move_into_temps (rtx src)
1865 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1866 int i;
1868 for (i = 0; i < XVECLEN (src, 0); i++)
1870 rtx e = XVECEXP (src, 0, i);
1871 rtx d = XEXP (e, 0);
1873 if (d)
1874 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1875 RTVEC_ELT (vec, i) = e;
1878 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1881 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1882 where SRC is non-consecutive registers represented by a PARALLEL.
1883 SSIZE represents the total size of block ORIG_DST, or -1 if not
1884 known. */
1886 void
1887 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1889 rtx *tmps, dst;
1890 int start, finish, i;
1891 enum machine_mode m = GET_MODE (orig_dst);
1893 gcc_assert (GET_CODE (src) == PARALLEL);
1895 if (!SCALAR_INT_MODE_P (m)
1896 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1898 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1899 if (imode == BLKmode)
1900 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1901 else
1902 dst = gen_reg_rtx (imode);
1903 emit_group_store (dst, src, type, ssize);
1904 if (imode != BLKmode)
1905 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1906 emit_move_insn (orig_dst, dst);
1907 return;
1910 /* Check for a NULL entry, used to indicate that the parameter goes
1911 both on the stack and in registers. */
1912 if (XEXP (XVECEXP (src, 0, 0), 0))
1913 start = 0;
1914 else
1915 start = 1;
1916 finish = XVECLEN (src, 0);
1918 tmps = XALLOCAVEC (rtx, finish);
1920 /* Copy the (probable) hard regs into pseudos. */
1921 for (i = start; i < finish; i++)
1923 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1924 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1926 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1927 emit_move_insn (tmps[i], reg);
1929 else
1930 tmps[i] = reg;
1933 /* If we won't be storing directly into memory, protect the real destination
1934 from strange tricks we might play. */
1935 dst = orig_dst;
1936 if (GET_CODE (dst) == PARALLEL)
1938 rtx temp;
1940 /* We can get a PARALLEL dst if there is a conditional expression in
1941 a return statement. In that case, the dst and src are the same,
1942 so no action is necessary. */
1943 if (rtx_equal_p (dst, src))
1944 return;
1946 /* It is unclear if we can ever reach here, but we may as well handle
1947 it. Allocate a temporary, and split this into a store/load to/from
1948 the temporary. */
1949 temp = assign_stack_temp (GET_MODE (dst), ssize);
1950 emit_group_store (temp, src, type, ssize);
1951 emit_group_load (dst, temp, type, ssize);
1952 return;
1954 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1956 enum machine_mode outer = GET_MODE (dst);
1957 enum machine_mode inner;
1958 HOST_WIDE_INT bytepos;
1959 bool done = false;
1960 rtx temp;
1962 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1963 dst = gen_reg_rtx (outer);
1965 /* Make life a bit easier for combine. */
1966 /* If the first element of the vector is the low part
1967 of the destination mode, use a paradoxical subreg to
1968 initialize the destination. */
1969 if (start < finish)
1971 inner = GET_MODE (tmps[start]);
1972 bytepos = subreg_lowpart_offset (inner, outer);
1973 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1975 temp = simplify_gen_subreg (outer, tmps[start],
1976 inner, 0);
1977 if (temp)
1979 emit_move_insn (dst, temp);
1980 done = true;
1981 start++;
1986 /* If the first element wasn't the low part, try the last. */
1987 if (!done
1988 && start < finish - 1)
1990 inner = GET_MODE (tmps[finish - 1]);
1991 bytepos = subreg_lowpart_offset (inner, outer);
1992 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1994 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1995 inner, 0);
1996 if (temp)
1998 emit_move_insn (dst, temp);
1999 done = true;
2000 finish--;
2005 /* Otherwise, simply initialize the result to zero. */
2006 if (!done)
2007 emit_move_insn (dst, CONST0_RTX (outer));
2010 /* Process the pieces. */
2011 for (i = start; i < finish; i++)
2013 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2014 enum machine_mode mode = GET_MODE (tmps[i]);
2015 unsigned int bytelen = GET_MODE_SIZE (mode);
2016 unsigned int adj_bytelen;
2017 rtx dest = dst;
2019 /* Handle trailing fragments that run over the size of the struct. */
2020 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2021 adj_bytelen = ssize - bytepos;
2022 else
2023 adj_bytelen = bytelen;
2025 if (GET_CODE (dst) == CONCAT)
2027 if (bytepos + adj_bytelen
2028 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2029 dest = XEXP (dst, 0);
2030 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2032 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2033 dest = XEXP (dst, 1);
2035 else
2037 enum machine_mode dest_mode = GET_MODE (dest);
2038 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2040 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2042 if (GET_MODE_ALIGNMENT (dest_mode)
2043 >= GET_MODE_ALIGNMENT (tmp_mode))
2045 dest = assign_stack_temp (dest_mode,
2046 GET_MODE_SIZE (dest_mode));
2047 emit_move_insn (adjust_address (dest,
2048 tmp_mode,
2049 bytepos),
2050 tmps[i]);
2051 dst = dest;
2053 else
2055 dest = assign_stack_temp (tmp_mode,
2056 GET_MODE_SIZE (tmp_mode));
2057 emit_move_insn (dest, tmps[i]);
2058 dst = adjust_address (dest, dest_mode, bytepos);
2060 break;
2064 /* Handle trailing fragments that run over the size of the struct. */
2065 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2067 /* store_bit_field always takes its value from the lsb.
2068 Move the fragment to the lsb if it's not already there. */
2069 if (
2070 #ifdef BLOCK_REG_PADDING
2071 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2072 == (BYTES_BIG_ENDIAN ? upward : downward)
2073 #else
2074 BYTES_BIG_ENDIAN
2075 #endif
2078 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2079 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2080 shift, tmps[i], 0);
2083 /* Make sure not to write past the end of the struct. */
2084 store_bit_field (dest,
2085 adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2086 bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2087 VOIDmode, tmps[i]);
2090 /* Optimize the access just a bit. */
2091 else if (MEM_P (dest)
2092 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2093 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2094 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2095 && bytelen == GET_MODE_SIZE (mode))
2096 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2098 else
2099 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2100 0, 0, mode, tmps[i]);
2103 /* Copy from the pseudo into the (probable) hard reg. */
2104 if (orig_dst != dst)
2105 emit_move_insn (orig_dst, dst);
2108 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2109 of the value stored in X. */
2112 maybe_emit_group_store (rtx x, tree type)
2114 enum machine_mode mode = TYPE_MODE (type);
2115 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2116 if (GET_CODE (x) == PARALLEL)
2118 rtx result = gen_reg_rtx (mode);
2119 emit_group_store (result, x, type, int_size_in_bytes (type));
2120 return result;
2122 return x;
2125 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2127 This is used on targets that return BLKmode values in registers. */
2129 void
2130 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2132 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2133 rtx src = NULL, dst = NULL;
2134 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2135 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2136 enum machine_mode mode = GET_MODE (srcreg);
2137 enum machine_mode tmode = GET_MODE (target);
2138 enum machine_mode copy_mode;
2140 /* BLKmode registers created in the back-end shouldn't have survived. */
2141 gcc_assert (mode != BLKmode);
2143 /* If the structure doesn't take up a whole number of words, see whether
2144 SRCREG is padded on the left or on the right. If it's on the left,
2145 set PADDING_CORRECTION to the number of bits to skip.
2147 In most ABIs, the structure will be returned at the least end of
2148 the register, which translates to right padding on little-endian
2149 targets and left padding on big-endian targets. The opposite
2150 holds if the structure is returned at the most significant
2151 end of the register. */
2152 if (bytes % UNITS_PER_WORD != 0
2153 && (targetm.calls.return_in_msb (type)
2154 ? !BYTES_BIG_ENDIAN
2155 : BYTES_BIG_ENDIAN))
2156 padding_correction
2157 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2159 /* We can use a single move if we have an exact mode for the size. */
2160 else if (MEM_P (target)
2161 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2162 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2163 && bytes == GET_MODE_SIZE (mode))
2165 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2166 return;
2169 /* And if we additionally have the same mode for a register. */
2170 else if (REG_P (target)
2171 && GET_MODE (target) == mode
2172 && bytes == GET_MODE_SIZE (mode))
2174 emit_move_insn (target, srcreg);
2175 return;
2178 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2179 into a new pseudo which is a full word. */
2180 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2182 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2183 mode = word_mode;
2186 /* Copy the structure BITSIZE bits at a time. If the target lives in
2187 memory, take care of not reading/writing past its end by selecting
2188 a copy mode suited to BITSIZE. This should always be possible given
2189 how it is computed.
2191 If the target lives in register, make sure not to select a copy mode
2192 larger than the mode of the register.
2194 We could probably emit more efficient code for machines which do not use
2195 strict alignment, but it doesn't seem worth the effort at the current
2196 time. */
2198 copy_mode = word_mode;
2199 if (MEM_P (target))
2201 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2202 if (mem_mode != BLKmode)
2203 copy_mode = mem_mode;
2205 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2206 copy_mode = tmode;
2208 for (bitpos = 0, xbitpos = padding_correction;
2209 bitpos < bytes * BITS_PER_UNIT;
2210 bitpos += bitsize, xbitpos += bitsize)
2212 /* We need a new source operand each time xbitpos is on a
2213 word boundary and when xbitpos == padding_correction
2214 (the first time through). */
2215 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2216 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2218 /* We need a new destination operand each time bitpos is on
2219 a word boundary. */
2220 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2221 dst = target;
2222 else if (bitpos % BITS_PER_WORD == 0)
2223 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2225 /* Use xbitpos for the source extraction (right justified) and
2226 bitpos for the destination store (left justified). */
2227 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2228 extract_bit_field (src, bitsize,
2229 xbitpos % BITS_PER_WORD, 1,
2230 NULL_RTX, copy_mode, copy_mode));
2234 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2235 register if it contains any data, otherwise return null.
2237 This is used on targets that return BLKmode values in registers. */
2240 copy_blkmode_to_reg (enum machine_mode mode, tree src)
2242 int i, n_regs;
2243 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2244 unsigned int bitsize;
2245 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2246 enum machine_mode dst_mode;
2248 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2250 x = expand_normal (src);
2252 bytes = int_size_in_bytes (TREE_TYPE (src));
2253 if (bytes == 0)
2254 return NULL_RTX;
2256 /* If the structure doesn't take up a whole number of words, see
2257 whether the register value should be padded on the left or on
2258 the right. Set PADDING_CORRECTION to the number of padding
2259 bits needed on the left side.
2261 In most ABIs, the structure will be returned at the least end of
2262 the register, which translates to right padding on little-endian
2263 targets and left padding on big-endian targets. The opposite
2264 holds if the structure is returned at the most significant
2265 end of the register. */
2266 if (bytes % UNITS_PER_WORD != 0
2267 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2268 ? !BYTES_BIG_ENDIAN
2269 : BYTES_BIG_ENDIAN))
2270 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2271 * BITS_PER_UNIT));
2273 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2274 dst_words = XALLOCAVEC (rtx, n_regs);
2275 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2277 /* Copy the structure BITSIZE bits at a time. */
2278 for (bitpos = 0, xbitpos = padding_correction;
2279 bitpos < bytes * BITS_PER_UNIT;
2280 bitpos += bitsize, xbitpos += bitsize)
2282 /* We need a new destination pseudo each time xbitpos is
2283 on a word boundary and when xbitpos == padding_correction
2284 (the first time through). */
2285 if (xbitpos % BITS_PER_WORD == 0
2286 || xbitpos == padding_correction)
2288 /* Generate an appropriate register. */
2289 dst_word = gen_reg_rtx (word_mode);
2290 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2292 /* Clear the destination before we move anything into it. */
2293 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2296 /* We need a new source operand each time bitpos is on a word
2297 boundary. */
2298 if (bitpos % BITS_PER_WORD == 0)
2299 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2301 /* Use bitpos for the source extraction (left justified) and
2302 xbitpos for the destination store (right justified). */
2303 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2304 0, 0, word_mode,
2305 extract_bit_field (src_word, bitsize,
2306 bitpos % BITS_PER_WORD, 1,
2307 NULL_RTX, word_mode, word_mode));
2310 if (mode == BLKmode)
2312 /* Find the smallest integer mode large enough to hold the
2313 entire structure. */
2314 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2315 mode != VOIDmode;
2316 mode = GET_MODE_WIDER_MODE (mode))
2317 /* Have we found a large enough mode? */
2318 if (GET_MODE_SIZE (mode) >= bytes)
2319 break;
2321 /* A suitable mode should have been found. */
2322 gcc_assert (mode != VOIDmode);
2325 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2326 dst_mode = word_mode;
2327 else
2328 dst_mode = mode;
2329 dst = gen_reg_rtx (dst_mode);
2331 for (i = 0; i < n_regs; i++)
2332 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2334 if (mode != dst_mode)
2335 dst = gen_lowpart (mode, dst);
2337 return dst;
2340 /* Add a USE expression for REG to the (possibly empty) list pointed
2341 to by CALL_FUSAGE. REG must denote a hard register. */
2343 void
2344 use_reg_mode (rtx *call_fusage, rtx reg, enum machine_mode mode)
2346 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2348 *call_fusage
2349 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2352 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2353 to by CALL_FUSAGE. REG must denote a hard register. */
2355 void
2356 clobber_reg_mode (rtx *call_fusage, rtx reg, enum machine_mode mode)
2358 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2360 *call_fusage
2361 = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2364 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2365 starting at REGNO. All of these registers must be hard registers. */
2367 void
2368 use_regs (rtx *call_fusage, int regno, int nregs)
2370 int i;
2372 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2374 for (i = 0; i < nregs; i++)
2375 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2378 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2379 PARALLEL REGS. This is for calls that pass values in multiple
2380 non-contiguous locations. The Irix 6 ABI has examples of this. */
2382 void
2383 use_group_regs (rtx *call_fusage, rtx regs)
2385 int i;
2387 for (i = 0; i < XVECLEN (regs, 0); i++)
2389 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2391 /* A NULL entry means the parameter goes both on the stack and in
2392 registers. This can also be a MEM for targets that pass values
2393 partially on the stack and partially in registers. */
2394 if (reg != 0 && REG_P (reg))
2395 use_reg (call_fusage, reg);
2399 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2400 assigment and the code of the expresion on the RHS is CODE. Return
2401 NULL otherwise. */
2403 static gimple
2404 get_def_for_expr (tree name, enum tree_code code)
2406 gimple def_stmt;
2408 if (TREE_CODE (name) != SSA_NAME)
2409 return NULL;
2411 def_stmt = get_gimple_for_ssa_name (name);
2412 if (!def_stmt
2413 || gimple_assign_rhs_code (def_stmt) != code)
2414 return NULL;
2416 return def_stmt;
2419 #ifdef HAVE_conditional_move
2420 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2421 assigment and the class of the expresion on the RHS is CLASS. Return
2422 NULL otherwise. */
2424 static gimple
2425 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2427 gimple def_stmt;
2429 if (TREE_CODE (name) != SSA_NAME)
2430 return NULL;
2432 def_stmt = get_gimple_for_ssa_name (name);
2433 if (!def_stmt
2434 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2435 return NULL;
2437 return def_stmt;
2439 #endif
2442 /* Determine whether the LEN bytes generated by CONSTFUN can be
2443 stored to memory using several move instructions. CONSTFUNDATA is
2444 a pointer which will be passed as argument in every CONSTFUN call.
2445 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2446 a memset operation and false if it's a copy of a constant string.
2447 Return nonzero if a call to store_by_pieces should succeed. */
2450 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2451 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2452 void *constfundata, unsigned int align, bool memsetp)
2454 unsigned HOST_WIDE_INT l;
2455 unsigned int max_size;
2456 HOST_WIDE_INT offset = 0;
2457 enum machine_mode mode;
2458 enum insn_code icode;
2459 int reverse;
2460 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2461 rtx cst ATTRIBUTE_UNUSED;
2463 if (len == 0)
2464 return 1;
2466 if (! (memsetp
2467 ? SET_BY_PIECES_P (len, align)
2468 : STORE_BY_PIECES_P (len, align)))
2469 return 0;
2471 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2473 /* We would first store what we can in the largest integer mode, then go to
2474 successively smaller modes. */
2476 for (reverse = 0;
2477 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2478 reverse++)
2480 l = len;
2481 max_size = STORE_MAX_PIECES + 1;
2482 while (max_size > 1 && l > 0)
2484 mode = widest_int_mode_for_size (max_size);
2486 if (mode == VOIDmode)
2487 break;
2489 icode = optab_handler (mov_optab, mode);
2490 if (icode != CODE_FOR_nothing
2491 && align >= GET_MODE_ALIGNMENT (mode))
2493 unsigned int size = GET_MODE_SIZE (mode);
2495 while (l >= size)
2497 if (reverse)
2498 offset -= size;
2500 cst = (*constfun) (constfundata, offset, mode);
2501 if (!targetm.legitimate_constant_p (mode, cst))
2502 return 0;
2504 if (!reverse)
2505 offset += size;
2507 l -= size;
2511 max_size = GET_MODE_SIZE (mode);
2514 /* The code above should have handled everything. */
2515 gcc_assert (!l);
2518 return 1;
2521 /* Generate several move instructions to store LEN bytes generated by
2522 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2523 pointer which will be passed as argument in every CONSTFUN call.
2524 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2525 a memset operation and false if it's a copy of a constant string.
2526 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2527 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2528 stpcpy. */
2531 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2532 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2533 void *constfundata, unsigned int align, bool memsetp, int endp)
2535 enum machine_mode to_addr_mode = get_address_mode (to);
2536 struct store_by_pieces_d data;
2538 if (len == 0)
2540 gcc_assert (endp != 2);
2541 return to;
2544 gcc_assert (memsetp
2545 ? SET_BY_PIECES_P (len, align)
2546 : STORE_BY_PIECES_P (len, align));
2547 data.constfun = constfun;
2548 data.constfundata = constfundata;
2549 data.len = len;
2550 data.to = to;
2551 store_by_pieces_1 (&data, align);
2552 if (endp)
2554 rtx to1;
2556 gcc_assert (!data.reverse);
2557 if (data.autinc_to)
2559 if (endp == 2)
2561 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2562 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2563 else
2564 data.to_addr = copy_to_mode_reg (to_addr_mode,
2565 plus_constant (to_addr_mode,
2566 data.to_addr,
2567 -1));
2569 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2570 data.offset);
2572 else
2574 if (endp == 2)
2575 --data.offset;
2576 to1 = adjust_address (data.to, QImode, data.offset);
2578 return to1;
2580 else
2581 return data.to;
2584 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2585 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2587 static void
2588 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2590 struct store_by_pieces_d data;
2592 if (len == 0)
2593 return;
2595 data.constfun = clear_by_pieces_1;
2596 data.constfundata = NULL;
2597 data.len = len;
2598 data.to = to;
2599 store_by_pieces_1 (&data, align);
2602 /* Callback routine for clear_by_pieces.
2603 Return const0_rtx unconditionally. */
2605 static rtx
2606 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2607 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2608 enum machine_mode mode ATTRIBUTE_UNUSED)
2610 return const0_rtx;
2613 /* Subroutine of clear_by_pieces and store_by_pieces.
2614 Generate several move instructions to store LEN bytes of block TO. (A MEM
2615 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2617 static void
2618 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2619 unsigned int align ATTRIBUTE_UNUSED)
2621 enum machine_mode to_addr_mode = get_address_mode (data->to);
2622 rtx to_addr = XEXP (data->to, 0);
2623 unsigned int max_size = STORE_MAX_PIECES + 1;
2624 enum insn_code icode;
2626 data->offset = 0;
2627 data->to_addr = to_addr;
2628 data->autinc_to
2629 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2630 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2632 data->explicit_inc_to = 0;
2633 data->reverse
2634 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2635 if (data->reverse)
2636 data->offset = data->len;
2638 /* If storing requires more than two move insns,
2639 copy addresses to registers (to make displacements shorter)
2640 and use post-increment if available. */
2641 if (!data->autinc_to
2642 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2644 /* Determine the main mode we'll be using.
2645 MODE might not be used depending on the definitions of the
2646 USE_* macros below. */
2647 enum machine_mode mode ATTRIBUTE_UNUSED
2648 = widest_int_mode_for_size (max_size);
2650 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2652 data->to_addr = copy_to_mode_reg (to_addr_mode,
2653 plus_constant (to_addr_mode,
2654 to_addr,
2655 data->len));
2656 data->autinc_to = 1;
2657 data->explicit_inc_to = -1;
2660 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2661 && ! data->autinc_to)
2663 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2664 data->autinc_to = 1;
2665 data->explicit_inc_to = 1;
2668 if ( !data->autinc_to && CONSTANT_P (to_addr))
2669 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2672 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2674 /* First store what we can in the largest integer mode, then go to
2675 successively smaller modes. */
2677 while (max_size > 1 && data->len > 0)
2679 enum machine_mode mode = widest_int_mode_for_size (max_size);
2681 if (mode == VOIDmode)
2682 break;
2684 icode = optab_handler (mov_optab, mode);
2685 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2686 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2688 max_size = GET_MODE_SIZE (mode);
2691 /* The code above should have handled everything. */
2692 gcc_assert (!data->len);
2695 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2696 with move instructions for mode MODE. GENFUN is the gen_... function
2697 to make a move insn for that mode. DATA has all the other info. */
2699 static void
2700 store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
2701 struct store_by_pieces_d *data)
2703 unsigned int size = GET_MODE_SIZE (mode);
2704 rtx to1, cst;
2706 while (data->len >= size)
2708 if (data->reverse)
2709 data->offset -= size;
2711 if (data->autinc_to)
2712 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2713 data->offset);
2714 else
2715 to1 = adjust_address (data->to, mode, data->offset);
2717 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2718 emit_insn (gen_add2_insn (data->to_addr,
2719 gen_int_mode (-(HOST_WIDE_INT) size,
2720 GET_MODE (data->to_addr))));
2722 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2723 emit_insn ((*genfun) (to1, cst));
2725 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2726 emit_insn (gen_add2_insn (data->to_addr,
2727 gen_int_mode (size,
2728 GET_MODE (data->to_addr))));
2730 if (! data->reverse)
2731 data->offset += size;
2733 data->len -= size;
2737 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2738 its length in bytes. */
2741 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2742 unsigned int expected_align, HOST_WIDE_INT expected_size,
2743 unsigned HOST_WIDE_INT min_size,
2744 unsigned HOST_WIDE_INT max_size,
2745 unsigned HOST_WIDE_INT probable_max_size)
2747 enum machine_mode mode = GET_MODE (object);
2748 unsigned int align;
2750 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2752 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2753 just move a zero. Otherwise, do this a piece at a time. */
2754 if (mode != BLKmode
2755 && CONST_INT_P (size)
2756 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2758 rtx zero = CONST0_RTX (mode);
2759 if (zero != NULL)
2761 emit_move_insn (object, zero);
2762 return NULL;
2765 if (COMPLEX_MODE_P (mode))
2767 zero = CONST0_RTX (GET_MODE_INNER (mode));
2768 if (zero != NULL)
2770 write_complex_part (object, zero, 0);
2771 write_complex_part (object, zero, 1);
2772 return NULL;
2777 if (size == const0_rtx)
2778 return NULL;
2780 align = MEM_ALIGN (object);
2782 if (CONST_INT_P (size)
2783 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2784 clear_by_pieces (object, INTVAL (size), align);
2785 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2786 expected_align, expected_size,
2787 min_size, max_size, probable_max_size))
2789 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2790 return set_storage_via_libcall (object, size, const0_rtx,
2791 method == BLOCK_OP_TAILCALL);
2792 else
2793 gcc_unreachable ();
2795 return NULL;
2799 clear_storage (rtx object, rtx size, enum block_op_methods method)
2801 unsigned HOST_WIDE_INT max, min = 0;
2802 if (GET_CODE (size) == CONST_INT)
2803 min = max = UINTVAL (size);
2804 else
2805 max = GET_MODE_MASK (GET_MODE (size));
2806 return clear_storage_hints (object, size, method, 0, -1, min, max, max);
2810 /* A subroutine of clear_storage. Expand a call to memset.
2811 Return the return value of memset, 0 otherwise. */
2814 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2816 tree call_expr, fn, object_tree, size_tree, val_tree;
2817 enum machine_mode size_mode;
2818 rtx retval;
2820 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2821 place those into new pseudos into a VAR_DECL and use them later. */
2823 object = copy_addr_to_reg (XEXP (object, 0));
2825 size_mode = TYPE_MODE (sizetype);
2826 size = convert_to_mode (size_mode, size, 1);
2827 size = copy_to_mode_reg (size_mode, size);
2829 /* It is incorrect to use the libcall calling conventions to call
2830 memset in this context. This could be a user call to memset and
2831 the user may wish to examine the return value from memset. For
2832 targets where libcalls and normal calls have different conventions
2833 for returning pointers, we could end up generating incorrect code. */
2835 object_tree = make_tree (ptr_type_node, object);
2836 if (!CONST_INT_P (val))
2837 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2838 size_tree = make_tree (sizetype, size);
2839 val_tree = make_tree (integer_type_node, val);
2841 fn = clear_storage_libcall_fn (true);
2842 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2843 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2845 retval = expand_normal (call_expr);
2847 return retval;
2850 /* A subroutine of set_storage_via_libcall. Create the tree node
2851 for the function we use for block clears. */
2853 tree block_clear_fn;
2855 void
2856 init_block_clear_fn (const char *asmspec)
2858 if (!block_clear_fn)
2860 tree fn, args;
2862 fn = get_identifier ("memset");
2863 args = build_function_type_list (ptr_type_node, ptr_type_node,
2864 integer_type_node, sizetype,
2865 NULL_TREE);
2867 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2868 DECL_EXTERNAL (fn) = 1;
2869 TREE_PUBLIC (fn) = 1;
2870 DECL_ARTIFICIAL (fn) = 1;
2871 TREE_NOTHROW (fn) = 1;
2872 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2873 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2875 block_clear_fn = fn;
2878 if (asmspec)
2879 set_user_assembler_name (block_clear_fn, asmspec);
2882 static tree
2883 clear_storage_libcall_fn (int for_call)
2885 static bool emitted_extern;
2887 if (!block_clear_fn)
2888 init_block_clear_fn (NULL);
2890 if (for_call && !emitted_extern)
2892 emitted_extern = true;
2893 make_decl_rtl (block_clear_fn);
2896 return block_clear_fn;
2899 /* Expand a setmem pattern; return true if successful. */
2901 bool
2902 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2903 unsigned int expected_align, HOST_WIDE_INT expected_size,
2904 unsigned HOST_WIDE_INT min_size,
2905 unsigned HOST_WIDE_INT max_size,
2906 unsigned HOST_WIDE_INT probable_max_size)
2908 /* Try the most limited insn first, because there's no point
2909 including more than one in the machine description unless
2910 the more limited one has some advantage. */
2912 enum machine_mode mode;
2914 if (expected_align < align)
2915 expected_align = align;
2916 if (expected_size != -1)
2918 if ((unsigned HOST_WIDE_INT)expected_size > max_size)
2919 expected_size = max_size;
2920 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
2921 expected_size = min_size;
2924 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2925 mode = GET_MODE_WIDER_MODE (mode))
2927 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2929 if (code != CODE_FOR_nothing
2930 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2931 here because if SIZE is less than the mode mask, as it is
2932 returned by the macro, it will definitely be less than the
2933 actual mode mask. Since SIZE is within the Pmode address
2934 space, we limit MODE to Pmode. */
2935 && ((CONST_INT_P (size)
2936 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2937 <= (GET_MODE_MASK (mode) >> 1)))
2938 || max_size <= (GET_MODE_MASK (mode) >> 1)
2939 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
2941 struct expand_operand ops[9];
2942 unsigned int nops;
2944 nops = insn_data[(int) code].n_generator_args;
2945 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
2947 create_fixed_operand (&ops[0], object);
2948 /* The check above guarantees that this size conversion is valid. */
2949 create_convert_operand_to (&ops[1], size, mode, true);
2950 create_convert_operand_from (&ops[2], val, byte_mode, true);
2951 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2952 if (nops >= 6)
2954 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2955 create_integer_operand (&ops[5], expected_size);
2957 if (nops >= 8)
2959 create_integer_operand (&ops[6], min_size);
2960 /* If we can not represent the maximal size,
2961 make parameter NULL. */
2962 if ((HOST_WIDE_INT) max_size != -1)
2963 create_integer_operand (&ops[7], max_size);
2964 else
2965 create_fixed_operand (&ops[7], NULL);
2967 if (nops == 9)
2969 /* If we can not represent the maximal size,
2970 make parameter NULL. */
2971 if ((HOST_WIDE_INT) probable_max_size != -1)
2972 create_integer_operand (&ops[8], probable_max_size);
2973 else
2974 create_fixed_operand (&ops[8], NULL);
2976 if (maybe_expand_insn (code, nops, ops))
2977 return true;
2981 return false;
2985 /* Write to one of the components of the complex value CPLX. Write VAL to
2986 the real part if IMAG_P is false, and the imaginary part if its true. */
2988 static void
2989 write_complex_part (rtx cplx, rtx val, bool imag_p)
2991 enum machine_mode cmode;
2992 enum machine_mode imode;
2993 unsigned ibitsize;
2995 if (GET_CODE (cplx) == CONCAT)
2997 emit_move_insn (XEXP (cplx, imag_p), val);
2998 return;
3001 cmode = GET_MODE (cplx);
3002 imode = GET_MODE_INNER (cmode);
3003 ibitsize = GET_MODE_BITSIZE (imode);
3005 /* For MEMs simplify_gen_subreg may generate an invalid new address
3006 because, e.g., the original address is considered mode-dependent
3007 by the target, which restricts simplify_subreg from invoking
3008 adjust_address_nv. Instead of preparing fallback support for an
3009 invalid address, we call adjust_address_nv directly. */
3010 if (MEM_P (cplx))
3012 emit_move_insn (adjust_address_nv (cplx, imode,
3013 imag_p ? GET_MODE_SIZE (imode) : 0),
3014 val);
3015 return;
3018 /* If the sub-object is at least word sized, then we know that subregging
3019 will work. This special case is important, since store_bit_field
3020 wants to operate on integer modes, and there's rarely an OImode to
3021 correspond to TCmode. */
3022 if (ibitsize >= BITS_PER_WORD
3023 /* For hard regs we have exact predicates. Assume we can split
3024 the original object if it spans an even number of hard regs.
3025 This special case is important for SCmode on 64-bit platforms
3026 where the natural size of floating-point regs is 32-bit. */
3027 || (REG_P (cplx)
3028 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3029 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3031 rtx part = simplify_gen_subreg (imode, cplx, cmode,
3032 imag_p ? GET_MODE_SIZE (imode) : 0);
3033 if (part)
3035 emit_move_insn (part, val);
3036 return;
3038 else
3039 /* simplify_gen_subreg may fail for sub-word MEMs. */
3040 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3043 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
3046 /* Extract one of the components of the complex value CPLX. Extract the
3047 real part if IMAG_P is false, and the imaginary part if it's true. */
3049 static rtx
3050 read_complex_part (rtx cplx, bool imag_p)
3052 enum machine_mode cmode, imode;
3053 unsigned ibitsize;
3055 if (GET_CODE (cplx) == CONCAT)
3056 return XEXP (cplx, imag_p);
3058 cmode = GET_MODE (cplx);
3059 imode = GET_MODE_INNER (cmode);
3060 ibitsize = GET_MODE_BITSIZE (imode);
3062 /* Special case reads from complex constants that got spilled to memory. */
3063 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3065 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3066 if (decl && TREE_CODE (decl) == COMPLEX_CST)
3068 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3069 if (CONSTANT_CLASS_P (part))
3070 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3074 /* For MEMs simplify_gen_subreg may generate an invalid new address
3075 because, e.g., the original address is considered mode-dependent
3076 by the target, which restricts simplify_subreg from invoking
3077 adjust_address_nv. Instead of preparing fallback support for an
3078 invalid address, we call adjust_address_nv directly. */
3079 if (MEM_P (cplx))
3080 return adjust_address_nv (cplx, imode,
3081 imag_p ? GET_MODE_SIZE (imode) : 0);
3083 /* If the sub-object is at least word sized, then we know that subregging
3084 will work. This special case is important, since extract_bit_field
3085 wants to operate on integer modes, and there's rarely an OImode to
3086 correspond to TCmode. */
3087 if (ibitsize >= BITS_PER_WORD
3088 /* For hard regs we have exact predicates. Assume we can split
3089 the original object if it spans an even number of hard regs.
3090 This special case is important for SCmode on 64-bit platforms
3091 where the natural size of floating-point regs is 32-bit. */
3092 || (REG_P (cplx)
3093 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3094 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3096 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3097 imag_p ? GET_MODE_SIZE (imode) : 0);
3098 if (ret)
3099 return ret;
3100 else
3101 /* simplify_gen_subreg may fail for sub-word MEMs. */
3102 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3105 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3106 true, NULL_RTX, imode, imode);
3109 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3110 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3111 represented in NEW_MODE. If FORCE is true, this will never happen, as
3112 we'll force-create a SUBREG if needed. */
3114 static rtx
3115 emit_move_change_mode (enum machine_mode new_mode,
3116 enum machine_mode old_mode, rtx x, bool force)
3118 rtx ret;
3120 if (push_operand (x, GET_MODE (x)))
3122 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3123 MEM_COPY_ATTRIBUTES (ret, x);
3125 else if (MEM_P (x))
3127 /* We don't have to worry about changing the address since the
3128 size in bytes is supposed to be the same. */
3129 if (reload_in_progress)
3131 /* Copy the MEM to change the mode and move any
3132 substitutions from the old MEM to the new one. */
3133 ret = adjust_address_nv (x, new_mode, 0);
3134 copy_replacements (x, ret);
3136 else
3137 ret = adjust_address (x, new_mode, 0);
3139 else
3141 /* Note that we do want simplify_subreg's behavior of validating
3142 that the new mode is ok for a hard register. If we were to use
3143 simplify_gen_subreg, we would create the subreg, but would
3144 probably run into the target not being able to implement it. */
3145 /* Except, of course, when FORCE is true, when this is exactly what
3146 we want. Which is needed for CCmodes on some targets. */
3147 if (force)
3148 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3149 else
3150 ret = simplify_subreg (new_mode, x, old_mode, 0);
3153 return ret;
3156 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3157 an integer mode of the same size as MODE. Returns the instruction
3158 emitted, or NULL if such a move could not be generated. */
3160 static rtx
3161 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
3163 enum machine_mode imode;
3164 enum insn_code code;
3166 /* There must exist a mode of the exact size we require. */
3167 imode = int_mode_for_mode (mode);
3168 if (imode == BLKmode)
3169 return NULL_RTX;
3171 /* The target must support moves in this mode. */
3172 code = optab_handler (mov_optab, imode);
3173 if (code == CODE_FOR_nothing)
3174 return NULL_RTX;
3176 x = emit_move_change_mode (imode, mode, x, force);
3177 if (x == NULL_RTX)
3178 return NULL_RTX;
3179 y = emit_move_change_mode (imode, mode, y, force);
3180 if (y == NULL_RTX)
3181 return NULL_RTX;
3182 return emit_insn (GEN_FCN (code) (x, y));
3185 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3186 Return an equivalent MEM that does not use an auto-increment. */
3189 emit_move_resolve_push (enum machine_mode mode, rtx x)
3191 enum rtx_code code = GET_CODE (XEXP (x, 0));
3192 HOST_WIDE_INT adjust;
3193 rtx temp;
3195 adjust = GET_MODE_SIZE (mode);
3196 #ifdef PUSH_ROUNDING
3197 adjust = PUSH_ROUNDING (adjust);
3198 #endif
3199 if (code == PRE_DEC || code == POST_DEC)
3200 adjust = -adjust;
3201 else if (code == PRE_MODIFY || code == POST_MODIFY)
3203 rtx expr = XEXP (XEXP (x, 0), 1);
3204 HOST_WIDE_INT val;
3206 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3207 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3208 val = INTVAL (XEXP (expr, 1));
3209 if (GET_CODE (expr) == MINUS)
3210 val = -val;
3211 gcc_assert (adjust == val || adjust == -val);
3212 adjust = val;
3215 /* Do not use anti_adjust_stack, since we don't want to update
3216 stack_pointer_delta. */
3217 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3218 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3219 0, OPTAB_LIB_WIDEN);
3220 if (temp != stack_pointer_rtx)
3221 emit_move_insn (stack_pointer_rtx, temp);
3223 switch (code)
3225 case PRE_INC:
3226 case PRE_DEC:
3227 case PRE_MODIFY:
3228 temp = stack_pointer_rtx;
3229 break;
3230 case POST_INC:
3231 case POST_DEC:
3232 case POST_MODIFY:
3233 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3234 break;
3235 default:
3236 gcc_unreachable ();
3239 return replace_equiv_address (x, temp);
3242 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3243 X is known to satisfy push_operand, and MODE is known to be complex.
3244 Returns the last instruction emitted. */
3247 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3249 enum machine_mode submode = GET_MODE_INNER (mode);
3250 bool imag_first;
3252 #ifdef PUSH_ROUNDING
3253 unsigned int submodesize = GET_MODE_SIZE (submode);
3255 /* In case we output to the stack, but the size is smaller than the
3256 machine can push exactly, we need to use move instructions. */
3257 if (PUSH_ROUNDING (submodesize) != submodesize)
3259 x = emit_move_resolve_push (mode, x);
3260 return emit_move_insn (x, y);
3262 #endif
3264 /* Note that the real part always precedes the imag part in memory
3265 regardless of machine's endianness. */
3266 switch (GET_CODE (XEXP (x, 0)))
3268 case PRE_DEC:
3269 case POST_DEC:
3270 imag_first = true;
3271 break;
3272 case PRE_INC:
3273 case POST_INC:
3274 imag_first = false;
3275 break;
3276 default:
3277 gcc_unreachable ();
3280 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3281 read_complex_part (y, imag_first));
3282 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3283 read_complex_part (y, !imag_first));
3286 /* A subroutine of emit_move_complex. Perform the move from Y to X
3287 via two moves of the parts. Returns the last instruction emitted. */
3290 emit_move_complex_parts (rtx x, rtx y)
3292 /* Show the output dies here. This is necessary for SUBREGs
3293 of pseudos since we cannot track their lifetimes correctly;
3294 hard regs shouldn't appear here except as return values. */
3295 if (!reload_completed && !reload_in_progress
3296 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3297 emit_clobber (x);
3299 write_complex_part (x, read_complex_part (y, false), false);
3300 write_complex_part (x, read_complex_part (y, true), true);
3302 return get_last_insn ();
3305 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3306 MODE is known to be complex. Returns the last instruction emitted. */
3308 static rtx
3309 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3311 bool try_int;
3313 /* Need to take special care for pushes, to maintain proper ordering
3314 of the data, and possibly extra padding. */
3315 if (push_operand (x, mode))
3316 return emit_move_complex_push (mode, x, y);
3318 /* See if we can coerce the target into moving both values at once, except
3319 for floating point where we favor moving as parts if this is easy. */
3320 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3321 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3322 && !(REG_P (x)
3323 && HARD_REGISTER_P (x)
3324 && hard_regno_nregs[REGNO (x)][mode] == 1)
3325 && !(REG_P (y)
3326 && HARD_REGISTER_P (y)
3327 && hard_regno_nregs[REGNO (y)][mode] == 1))
3328 try_int = false;
3329 /* Not possible if the values are inherently not adjacent. */
3330 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3331 try_int = false;
3332 /* Is possible if both are registers (or subregs of registers). */
3333 else if (register_operand (x, mode) && register_operand (y, mode))
3334 try_int = true;
3335 /* If one of the operands is a memory, and alignment constraints
3336 are friendly enough, we may be able to do combined memory operations.
3337 We do not attempt this if Y is a constant because that combination is
3338 usually better with the by-parts thing below. */
3339 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3340 && (!STRICT_ALIGNMENT
3341 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3342 try_int = true;
3343 else
3344 try_int = false;
3346 if (try_int)
3348 rtx ret;
3350 /* For memory to memory moves, optimal behavior can be had with the
3351 existing block move logic. */
3352 if (MEM_P (x) && MEM_P (y))
3354 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3355 BLOCK_OP_NO_LIBCALL);
3356 return get_last_insn ();
3359 ret = emit_move_via_integer (mode, x, y, true);
3360 if (ret)
3361 return ret;
3364 return emit_move_complex_parts (x, y);
3367 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3368 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3370 static rtx
3371 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3373 rtx ret;
3375 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3376 if (mode != CCmode)
3378 enum insn_code code = optab_handler (mov_optab, CCmode);
3379 if (code != CODE_FOR_nothing)
3381 x = emit_move_change_mode (CCmode, mode, x, true);
3382 y = emit_move_change_mode (CCmode, mode, y, true);
3383 return emit_insn (GEN_FCN (code) (x, y));
3387 /* Otherwise, find the MODE_INT mode of the same width. */
3388 ret = emit_move_via_integer (mode, x, y, false);
3389 gcc_assert (ret != NULL);
3390 return ret;
3393 /* Return true if word I of OP lies entirely in the
3394 undefined bits of a paradoxical subreg. */
3396 static bool
3397 undefined_operand_subword_p (const_rtx op, int i)
3399 enum machine_mode innermode, innermostmode;
3400 int offset;
3401 if (GET_CODE (op) != SUBREG)
3402 return false;
3403 innermode = GET_MODE (op);
3404 innermostmode = GET_MODE (SUBREG_REG (op));
3405 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3406 /* The SUBREG_BYTE represents offset, as if the value were stored in
3407 memory, except for a paradoxical subreg where we define
3408 SUBREG_BYTE to be 0; undo this exception as in
3409 simplify_subreg. */
3410 if (SUBREG_BYTE (op) == 0
3411 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3413 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3414 if (WORDS_BIG_ENDIAN)
3415 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3416 if (BYTES_BIG_ENDIAN)
3417 offset += difference % UNITS_PER_WORD;
3419 if (offset >= GET_MODE_SIZE (innermostmode)
3420 || offset <= -GET_MODE_SIZE (word_mode))
3421 return true;
3422 return false;
3425 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3426 MODE is any multi-word or full-word mode that lacks a move_insn
3427 pattern. Note that you will get better code if you define such
3428 patterns, even if they must turn into multiple assembler instructions. */
3430 static rtx
3431 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3433 rtx last_insn = 0;
3434 rtx seq, inner;
3435 bool need_clobber;
3436 int i;
3438 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3440 /* If X is a push on the stack, do the push now and replace
3441 X with a reference to the stack pointer. */
3442 if (push_operand (x, mode))
3443 x = emit_move_resolve_push (mode, x);
3445 /* If we are in reload, see if either operand is a MEM whose address
3446 is scheduled for replacement. */
3447 if (reload_in_progress && MEM_P (x)
3448 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3449 x = replace_equiv_address_nv (x, inner);
3450 if (reload_in_progress && MEM_P (y)
3451 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3452 y = replace_equiv_address_nv (y, inner);
3454 start_sequence ();
3456 need_clobber = false;
3457 for (i = 0;
3458 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3459 i++)
3461 rtx xpart = operand_subword (x, i, 1, mode);
3462 rtx ypart;
3464 /* Do not generate code for a move if it would come entirely
3465 from the undefined bits of a paradoxical subreg. */
3466 if (undefined_operand_subword_p (y, i))
3467 continue;
3469 ypart = operand_subword (y, i, 1, mode);
3471 /* If we can't get a part of Y, put Y into memory if it is a
3472 constant. Otherwise, force it into a register. Then we must
3473 be able to get a part of Y. */
3474 if (ypart == 0 && CONSTANT_P (y))
3476 y = use_anchored_address (force_const_mem (mode, y));
3477 ypart = operand_subword (y, i, 1, mode);
3479 else if (ypart == 0)
3480 ypart = operand_subword_force (y, i, mode);
3482 gcc_assert (xpart && ypart);
3484 need_clobber |= (GET_CODE (xpart) == SUBREG);
3486 last_insn = emit_move_insn (xpart, ypart);
3489 seq = get_insns ();
3490 end_sequence ();
3492 /* Show the output dies here. This is necessary for SUBREGs
3493 of pseudos since we cannot track their lifetimes correctly;
3494 hard regs shouldn't appear here except as return values.
3495 We never want to emit such a clobber after reload. */
3496 if (x != y
3497 && ! (reload_in_progress || reload_completed)
3498 && need_clobber != 0)
3499 emit_clobber (x);
3501 emit_insn (seq);
3503 return last_insn;
3506 /* Low level part of emit_move_insn.
3507 Called just like emit_move_insn, but assumes X and Y
3508 are basically valid. */
3511 emit_move_insn_1 (rtx x, rtx y)
3513 enum machine_mode mode = GET_MODE (x);
3514 enum insn_code code;
3516 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3518 code = optab_handler (mov_optab, mode);
3519 if (code != CODE_FOR_nothing)
3520 return emit_insn (GEN_FCN (code) (x, y));
3522 /* Expand complex moves by moving real part and imag part. */
3523 if (COMPLEX_MODE_P (mode))
3524 return emit_move_complex (mode, x, y);
3526 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3527 || ALL_FIXED_POINT_MODE_P (mode))
3529 rtx result = emit_move_via_integer (mode, x, y, true);
3531 /* If we can't find an integer mode, use multi words. */
3532 if (result)
3533 return result;
3534 else
3535 return emit_move_multi_word (mode, x, y);
3538 if (GET_MODE_CLASS (mode) == MODE_CC)
3539 return emit_move_ccmode (mode, x, y);
3541 /* Try using a move pattern for the corresponding integer mode. This is
3542 only safe when simplify_subreg can convert MODE constants into integer
3543 constants. At present, it can only do this reliably if the value
3544 fits within a HOST_WIDE_INT. */
3545 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3547 rtx ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3549 if (ret)
3551 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3552 return ret;
3556 return emit_move_multi_word (mode, x, y);
3559 /* Generate code to copy Y into X.
3560 Both Y and X must have the same mode, except that
3561 Y can be a constant with VOIDmode.
3562 This mode cannot be BLKmode; use emit_block_move for that.
3564 Return the last instruction emitted. */
3567 emit_move_insn (rtx x, rtx y)
3569 enum machine_mode mode = GET_MODE (x);
3570 rtx y_cst = NULL_RTX;
3571 rtx last_insn, set;
3573 gcc_assert (mode != BLKmode
3574 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3576 if (CONSTANT_P (y))
3578 if (optimize
3579 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3580 && (last_insn = compress_float_constant (x, y)))
3581 return last_insn;
3583 y_cst = y;
3585 if (!targetm.legitimate_constant_p (mode, y))
3587 y = force_const_mem (mode, y);
3589 /* If the target's cannot_force_const_mem prevented the spill,
3590 assume that the target's move expanders will also take care
3591 of the non-legitimate constant. */
3592 if (!y)
3593 y = y_cst;
3594 else
3595 y = use_anchored_address (y);
3599 /* If X or Y are memory references, verify that their addresses are valid
3600 for the machine. */
3601 if (MEM_P (x)
3602 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3603 MEM_ADDR_SPACE (x))
3604 && ! push_operand (x, GET_MODE (x))))
3605 x = validize_mem (x);
3607 if (MEM_P (y)
3608 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3609 MEM_ADDR_SPACE (y)))
3610 y = validize_mem (y);
3612 gcc_assert (mode != BLKmode);
3614 last_insn = emit_move_insn_1 (x, y);
3616 if (y_cst && REG_P (x)
3617 && (set = single_set (last_insn)) != NULL_RTX
3618 && SET_DEST (set) == x
3619 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3620 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3622 return last_insn;
3625 /* If Y is representable exactly in a narrower mode, and the target can
3626 perform the extension directly from constant or memory, then emit the
3627 move as an extension. */
3629 static rtx
3630 compress_float_constant (rtx x, rtx y)
3632 enum machine_mode dstmode = GET_MODE (x);
3633 enum machine_mode orig_srcmode = GET_MODE (y);
3634 enum machine_mode srcmode;
3635 REAL_VALUE_TYPE r;
3636 int oldcost, newcost;
3637 bool speed = optimize_insn_for_speed_p ();
3639 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3641 if (targetm.legitimate_constant_p (dstmode, y))
3642 oldcost = set_src_cost (y, speed);
3643 else
3644 oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3646 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3647 srcmode != orig_srcmode;
3648 srcmode = GET_MODE_WIDER_MODE (srcmode))
3650 enum insn_code ic;
3651 rtx trunc_y, last_insn;
3653 /* Skip if the target can't extend this way. */
3654 ic = can_extend_p (dstmode, srcmode, 0);
3655 if (ic == CODE_FOR_nothing)
3656 continue;
3658 /* Skip if the narrowed value isn't exact. */
3659 if (! exact_real_truncate (srcmode, &r))
3660 continue;
3662 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3664 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3666 /* Skip if the target needs extra instructions to perform
3667 the extension. */
3668 if (!insn_operand_matches (ic, 1, trunc_y))
3669 continue;
3670 /* This is valid, but may not be cheaper than the original. */
3671 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3672 speed);
3673 if (oldcost < newcost)
3674 continue;
3676 else if (float_extend_from_mem[dstmode][srcmode])
3678 trunc_y = force_const_mem (srcmode, trunc_y);
3679 /* This is valid, but may not be cheaper than the original. */
3680 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3681 speed);
3682 if (oldcost < newcost)
3683 continue;
3684 trunc_y = validize_mem (trunc_y);
3686 else
3687 continue;
3689 /* For CSE's benefit, force the compressed constant pool entry
3690 into a new pseudo. This constant may be used in different modes,
3691 and if not, combine will put things back together for us. */
3692 trunc_y = force_reg (srcmode, trunc_y);
3694 /* If x is a hard register, perform the extension into a pseudo,
3695 so that e.g. stack realignment code is aware of it. */
3696 rtx target = x;
3697 if (REG_P (x) && HARD_REGISTER_P (x))
3698 target = gen_reg_rtx (dstmode);
3700 emit_unop_insn (ic, target, trunc_y, UNKNOWN);
3701 last_insn = get_last_insn ();
3703 if (REG_P (target))
3704 set_unique_reg_note (last_insn, REG_EQUAL, y);
3706 if (target != x)
3707 return emit_move_insn (x, target);
3708 return last_insn;
3711 return NULL_RTX;
3714 /* Pushing data onto the stack. */
3716 /* Push a block of length SIZE (perhaps variable)
3717 and return an rtx to address the beginning of the block.
3718 The value may be virtual_outgoing_args_rtx.
3720 EXTRA is the number of bytes of padding to push in addition to SIZE.
3721 BELOW nonzero means this padding comes at low addresses;
3722 otherwise, the padding comes at high addresses. */
3725 push_block (rtx size, int extra, int below)
3727 rtx temp;
3729 size = convert_modes (Pmode, ptr_mode, size, 1);
3730 if (CONSTANT_P (size))
3731 anti_adjust_stack (plus_constant (Pmode, size, extra));
3732 else if (REG_P (size) && extra == 0)
3733 anti_adjust_stack (size);
3734 else
3736 temp = copy_to_mode_reg (Pmode, size);
3737 if (extra != 0)
3738 temp = expand_binop (Pmode, add_optab, temp,
3739 gen_int_mode (extra, Pmode),
3740 temp, 0, OPTAB_LIB_WIDEN);
3741 anti_adjust_stack (temp);
3744 #ifndef STACK_GROWS_DOWNWARD
3745 if (0)
3746 #else
3747 if (1)
3748 #endif
3750 temp = virtual_outgoing_args_rtx;
3751 if (extra != 0 && below)
3752 temp = plus_constant (Pmode, temp, extra);
3754 else
3756 if (CONST_INT_P (size))
3757 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3758 -INTVAL (size) - (below ? 0 : extra));
3759 else if (extra != 0 && !below)
3760 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3761 negate_rtx (Pmode, plus_constant (Pmode, size,
3762 extra)));
3763 else
3764 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3765 negate_rtx (Pmode, size));
3768 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3771 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3773 static rtx
3774 mem_autoinc_base (rtx mem)
3776 if (MEM_P (mem))
3778 rtx addr = XEXP (mem, 0);
3779 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3780 return XEXP (addr, 0);
3782 return NULL;
3785 /* A utility routine used here, in reload, and in try_split. The insns
3786 after PREV up to and including LAST are known to adjust the stack,
3787 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3788 placing notes as appropriate. PREV may be NULL, indicating the
3789 entire insn sequence prior to LAST should be scanned.
3791 The set of allowed stack pointer modifications is small:
3792 (1) One or more auto-inc style memory references (aka pushes),
3793 (2) One or more addition/subtraction with the SP as destination,
3794 (3) A single move insn with the SP as destination,
3795 (4) A call_pop insn,
3796 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3798 Insns in the sequence that do not modify the SP are ignored,
3799 except for noreturn calls.
3801 The return value is the amount of adjustment that can be trivially
3802 verified, via immediate operand or auto-inc. If the adjustment
3803 cannot be trivially extracted, the return value is INT_MIN. */
3805 HOST_WIDE_INT
3806 find_args_size_adjust (rtx insn)
3808 rtx dest, set, pat;
3809 int i;
3811 pat = PATTERN (insn);
3812 set = NULL;
3814 /* Look for a call_pop pattern. */
3815 if (CALL_P (insn))
3817 /* We have to allow non-call_pop patterns for the case
3818 of emit_single_push_insn of a TLS address. */
3819 if (GET_CODE (pat) != PARALLEL)
3820 return 0;
3822 /* All call_pop have a stack pointer adjust in the parallel.
3823 The call itself is always first, and the stack adjust is
3824 usually last, so search from the end. */
3825 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3827 set = XVECEXP (pat, 0, i);
3828 if (GET_CODE (set) != SET)
3829 continue;
3830 dest = SET_DEST (set);
3831 if (dest == stack_pointer_rtx)
3832 break;
3834 /* We'd better have found the stack pointer adjust. */
3835 if (i == 0)
3836 return 0;
3837 /* Fall through to process the extracted SET and DEST
3838 as if it was a standalone insn. */
3840 else if (GET_CODE (pat) == SET)
3841 set = pat;
3842 else if ((set = single_set (insn)) != NULL)
3844 else if (GET_CODE (pat) == PARALLEL)
3846 /* ??? Some older ports use a parallel with a stack adjust
3847 and a store for a PUSH_ROUNDING pattern, rather than a
3848 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3849 /* ??? See h8300 and m68k, pushqi1. */
3850 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3852 set = XVECEXP (pat, 0, i);
3853 if (GET_CODE (set) != SET)
3854 continue;
3855 dest = SET_DEST (set);
3856 if (dest == stack_pointer_rtx)
3857 break;
3859 /* We do not expect an auto-inc of the sp in the parallel. */
3860 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3861 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3862 != stack_pointer_rtx);
3864 if (i < 0)
3865 return 0;
3867 else
3868 return 0;
3870 dest = SET_DEST (set);
3872 /* Look for direct modifications of the stack pointer. */
3873 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3875 /* Look for a trivial adjustment, otherwise assume nothing. */
3876 /* Note that the SPU restore_stack_block pattern refers to
3877 the stack pointer in V4SImode. Consider that non-trivial. */
3878 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3879 && GET_CODE (SET_SRC (set)) == PLUS
3880 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3881 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3882 return INTVAL (XEXP (SET_SRC (set), 1));
3883 /* ??? Reload can generate no-op moves, which will be cleaned
3884 up later. Recognize it and continue searching. */
3885 else if (rtx_equal_p (dest, SET_SRC (set)))
3886 return 0;
3887 else
3888 return HOST_WIDE_INT_MIN;
3890 else
3892 rtx mem, addr;
3894 /* Otherwise only think about autoinc patterns. */
3895 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3897 mem = dest;
3898 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3899 != stack_pointer_rtx);
3901 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3902 mem = SET_SRC (set);
3903 else
3904 return 0;
3906 addr = XEXP (mem, 0);
3907 switch (GET_CODE (addr))
3909 case PRE_INC:
3910 case POST_INC:
3911 return GET_MODE_SIZE (GET_MODE (mem));
3912 case PRE_DEC:
3913 case POST_DEC:
3914 return -GET_MODE_SIZE (GET_MODE (mem));
3915 case PRE_MODIFY:
3916 case POST_MODIFY:
3917 addr = XEXP (addr, 1);
3918 gcc_assert (GET_CODE (addr) == PLUS);
3919 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3920 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3921 return INTVAL (XEXP (addr, 1));
3922 default:
3923 gcc_unreachable ();
3929 fixup_args_size_notes (rtx prev, rtx last, int end_args_size)
3931 int args_size = end_args_size;
3932 bool saw_unknown = false;
3933 rtx insn;
3935 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3937 HOST_WIDE_INT this_delta;
3939 if (!NONDEBUG_INSN_P (insn))
3940 continue;
3942 this_delta = find_args_size_adjust (insn);
3943 if (this_delta == 0)
3945 if (!CALL_P (insn)
3946 || ACCUMULATE_OUTGOING_ARGS
3947 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3948 continue;
3951 gcc_assert (!saw_unknown);
3952 if (this_delta == HOST_WIDE_INT_MIN)
3953 saw_unknown = true;
3955 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3956 #ifdef STACK_GROWS_DOWNWARD
3957 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
3958 #endif
3959 args_size -= this_delta;
3962 return saw_unknown ? INT_MIN : args_size;
3965 #ifdef PUSH_ROUNDING
3966 /* Emit single push insn. */
3968 static void
3969 emit_single_push_insn_1 (enum machine_mode mode, rtx x, tree type)
3971 rtx dest_addr;
3972 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3973 rtx dest;
3974 enum insn_code icode;
3976 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3977 /* If there is push pattern, use it. Otherwise try old way of throwing
3978 MEM representing push operation to move expander. */
3979 icode = optab_handler (push_optab, mode);
3980 if (icode != CODE_FOR_nothing)
3982 struct expand_operand ops[1];
3984 create_input_operand (&ops[0], x, mode);
3985 if (maybe_expand_insn (icode, 1, ops))
3986 return;
3988 if (GET_MODE_SIZE (mode) == rounded_size)
3989 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3990 /* If we are to pad downward, adjust the stack pointer first and
3991 then store X into the stack location using an offset. This is
3992 because emit_move_insn does not know how to pad; it does not have
3993 access to type. */
3994 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3996 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3997 HOST_WIDE_INT offset;
3999 emit_move_insn (stack_pointer_rtx,
4000 expand_binop (Pmode,
4001 #ifdef STACK_GROWS_DOWNWARD
4002 sub_optab,
4003 #else
4004 add_optab,
4005 #endif
4006 stack_pointer_rtx,
4007 gen_int_mode (rounded_size, Pmode),
4008 NULL_RTX, 0, OPTAB_LIB_WIDEN));
4010 offset = (HOST_WIDE_INT) padding_size;
4011 #ifdef STACK_GROWS_DOWNWARD
4012 if (STACK_PUSH_CODE == POST_DEC)
4013 /* We have already decremented the stack pointer, so get the
4014 previous value. */
4015 offset += (HOST_WIDE_INT) rounded_size;
4016 #else
4017 if (STACK_PUSH_CODE == POST_INC)
4018 /* We have already incremented the stack pointer, so get the
4019 previous value. */
4020 offset -= (HOST_WIDE_INT) rounded_size;
4021 #endif
4022 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4023 gen_int_mode (offset, Pmode));
4025 else
4027 #ifdef STACK_GROWS_DOWNWARD
4028 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4029 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4030 gen_int_mode (-(HOST_WIDE_INT) rounded_size,
4031 Pmode));
4032 #else
4033 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4034 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4035 gen_int_mode (rounded_size, Pmode));
4036 #endif
4037 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4040 dest = gen_rtx_MEM (mode, dest_addr);
4042 if (type != 0)
4044 set_mem_attributes (dest, type, 1);
4046 if (cfun->tail_call_marked)
4047 /* Function incoming arguments may overlap with sibling call
4048 outgoing arguments and we cannot allow reordering of reads
4049 from function arguments with stores to outgoing arguments
4050 of sibling calls. */
4051 set_mem_alias_set (dest, 0);
4053 emit_move_insn (dest, x);
4056 /* Emit and annotate a single push insn. */
4058 static void
4059 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
4061 int delta, old_delta = stack_pointer_delta;
4062 rtx prev = get_last_insn ();
4063 rtx last;
4065 emit_single_push_insn_1 (mode, x, type);
4067 last = get_last_insn ();
4069 /* Notice the common case where we emitted exactly one insn. */
4070 if (PREV_INSN (last) == prev)
4072 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4073 return;
4076 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4077 gcc_assert (delta == INT_MIN || delta == old_delta);
4079 #endif
4081 /* Generate code to push X onto the stack, assuming it has mode MODE and
4082 type TYPE.
4083 MODE is redundant except when X is a CONST_INT (since they don't
4084 carry mode info).
4085 SIZE is an rtx for the size of data to be copied (in bytes),
4086 needed only if X is BLKmode.
4088 ALIGN (in bits) is maximum alignment we can assume.
4090 If PARTIAL and REG are both nonzero, then copy that many of the first
4091 bytes of X into registers starting with REG, and push the rest of X.
4092 The amount of space pushed is decreased by PARTIAL bytes.
4093 REG must be a hard register in this case.
4094 If REG is zero but PARTIAL is not, take any all others actions for an
4095 argument partially in registers, but do not actually load any
4096 registers.
4098 EXTRA is the amount in bytes of extra space to leave next to this arg.
4099 This is ignored if an argument block has already been allocated.
4101 On a machine that lacks real push insns, ARGS_ADDR is the address of
4102 the bottom of the argument block for this call. We use indexing off there
4103 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4104 argument block has not been preallocated.
4106 ARGS_SO_FAR is the size of args previously pushed for this call.
4108 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4109 for arguments passed in registers. If nonzero, it will be the number
4110 of bytes required. */
4112 void
4113 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
4114 unsigned int align, int partial, rtx reg, int extra,
4115 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4116 rtx alignment_pad)
4118 rtx xinner;
4119 enum direction stack_direction
4120 #ifdef STACK_GROWS_DOWNWARD
4121 = downward;
4122 #else
4123 = upward;
4124 #endif
4126 /* Decide where to pad the argument: `downward' for below,
4127 `upward' for above, or `none' for don't pad it.
4128 Default is below for small data on big-endian machines; else above. */
4129 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4131 /* Invert direction if stack is post-decrement.
4132 FIXME: why? */
4133 if (STACK_PUSH_CODE == POST_DEC)
4134 if (where_pad != none)
4135 where_pad = (where_pad == downward ? upward : downward);
4137 xinner = x;
4139 if (mode == BLKmode
4140 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4142 /* Copy a block into the stack, entirely or partially. */
4144 rtx temp;
4145 int used;
4146 int offset;
4147 int skip;
4149 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4150 used = partial - offset;
4152 if (mode != BLKmode)
4154 /* A value is to be stored in an insufficiently aligned
4155 stack slot; copy via a suitably aligned slot if
4156 necessary. */
4157 size = GEN_INT (GET_MODE_SIZE (mode));
4158 if (!MEM_P (xinner))
4160 temp = assign_temp (type, 1, 1);
4161 emit_move_insn (temp, xinner);
4162 xinner = temp;
4166 gcc_assert (size);
4168 /* USED is now the # of bytes we need not copy to the stack
4169 because registers will take care of them. */
4171 if (partial != 0)
4172 xinner = adjust_address (xinner, BLKmode, used);
4174 /* If the partial register-part of the arg counts in its stack size,
4175 skip the part of stack space corresponding to the registers.
4176 Otherwise, start copying to the beginning of the stack space,
4177 by setting SKIP to 0. */
4178 skip = (reg_parm_stack_space == 0) ? 0 : used;
4180 #ifdef PUSH_ROUNDING
4181 /* Do it with several push insns if that doesn't take lots of insns
4182 and if there is no difficulty with push insns that skip bytes
4183 on the stack for alignment purposes. */
4184 if (args_addr == 0
4185 && PUSH_ARGS
4186 && CONST_INT_P (size)
4187 && skip == 0
4188 && MEM_ALIGN (xinner) >= align
4189 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
4190 /* Here we avoid the case of a structure whose weak alignment
4191 forces many pushes of a small amount of data,
4192 and such small pushes do rounding that causes trouble. */
4193 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4194 || align >= BIGGEST_ALIGNMENT
4195 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4196 == (align / BITS_PER_UNIT)))
4197 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4199 /* Push padding now if padding above and stack grows down,
4200 or if padding below and stack grows up.
4201 But if space already allocated, this has already been done. */
4202 if (extra && args_addr == 0
4203 && where_pad != none && where_pad != stack_direction)
4204 anti_adjust_stack (GEN_INT (extra));
4206 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4208 else
4209 #endif /* PUSH_ROUNDING */
4211 rtx target;
4213 /* Otherwise make space on the stack and copy the data
4214 to the address of that space. */
4216 /* Deduct words put into registers from the size we must copy. */
4217 if (partial != 0)
4219 if (CONST_INT_P (size))
4220 size = GEN_INT (INTVAL (size) - used);
4221 else
4222 size = expand_binop (GET_MODE (size), sub_optab, size,
4223 gen_int_mode (used, GET_MODE (size)),
4224 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4227 /* Get the address of the stack space.
4228 In this case, we do not deal with EXTRA separately.
4229 A single stack adjust will do. */
4230 if (! args_addr)
4232 temp = push_block (size, extra, where_pad == downward);
4233 extra = 0;
4235 else if (CONST_INT_P (args_so_far))
4236 temp = memory_address (BLKmode,
4237 plus_constant (Pmode, args_addr,
4238 skip + INTVAL (args_so_far)));
4239 else
4240 temp = memory_address (BLKmode,
4241 plus_constant (Pmode,
4242 gen_rtx_PLUS (Pmode,
4243 args_addr,
4244 args_so_far),
4245 skip));
4247 if (!ACCUMULATE_OUTGOING_ARGS)
4249 /* If the source is referenced relative to the stack pointer,
4250 copy it to another register to stabilize it. We do not need
4251 to do this if we know that we won't be changing sp. */
4253 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4254 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4255 temp = copy_to_reg (temp);
4258 target = gen_rtx_MEM (BLKmode, temp);
4260 /* We do *not* set_mem_attributes here, because incoming arguments
4261 may overlap with sibling call outgoing arguments and we cannot
4262 allow reordering of reads from function arguments with stores
4263 to outgoing arguments of sibling calls. We do, however, want
4264 to record the alignment of the stack slot. */
4265 /* ALIGN may well be better aligned than TYPE, e.g. due to
4266 PARM_BOUNDARY. Assume the caller isn't lying. */
4267 set_mem_align (target, align);
4269 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4272 else if (partial > 0)
4274 /* Scalar partly in registers. */
4276 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4277 int i;
4278 int not_stack;
4279 /* # bytes of start of argument
4280 that we must make space for but need not store. */
4281 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4282 int args_offset = INTVAL (args_so_far);
4283 int skip;
4285 /* Push padding now if padding above and stack grows down,
4286 or if padding below and stack grows up.
4287 But if space already allocated, this has already been done. */
4288 if (extra && args_addr == 0
4289 && where_pad != none && where_pad != stack_direction)
4290 anti_adjust_stack (GEN_INT (extra));
4292 /* If we make space by pushing it, we might as well push
4293 the real data. Otherwise, we can leave OFFSET nonzero
4294 and leave the space uninitialized. */
4295 if (args_addr == 0)
4296 offset = 0;
4298 /* Now NOT_STACK gets the number of words that we don't need to
4299 allocate on the stack. Convert OFFSET to words too. */
4300 not_stack = (partial - offset) / UNITS_PER_WORD;
4301 offset /= UNITS_PER_WORD;
4303 /* If the partial register-part of the arg counts in its stack size,
4304 skip the part of stack space corresponding to the registers.
4305 Otherwise, start copying to the beginning of the stack space,
4306 by setting SKIP to 0. */
4307 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4309 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4310 x = validize_mem (force_const_mem (mode, x));
4312 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4313 SUBREGs of such registers are not allowed. */
4314 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4315 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4316 x = copy_to_reg (x);
4318 /* Loop over all the words allocated on the stack for this arg. */
4319 /* We can do it by words, because any scalar bigger than a word
4320 has a size a multiple of a word. */
4321 for (i = size - 1; i >= not_stack; i--)
4322 if (i >= not_stack + offset)
4323 emit_push_insn (operand_subword_force (x, i, mode),
4324 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4325 0, args_addr,
4326 GEN_INT (args_offset + ((i - not_stack + skip)
4327 * UNITS_PER_WORD)),
4328 reg_parm_stack_space, alignment_pad);
4330 else
4332 rtx addr;
4333 rtx dest;
4335 /* Push padding now if padding above and stack grows down,
4336 or if padding below and stack grows up.
4337 But if space already allocated, this has already been done. */
4338 if (extra && args_addr == 0
4339 && where_pad != none && where_pad != stack_direction)
4340 anti_adjust_stack (GEN_INT (extra));
4342 #ifdef PUSH_ROUNDING
4343 if (args_addr == 0 && PUSH_ARGS)
4344 emit_single_push_insn (mode, x, type);
4345 else
4346 #endif
4348 if (CONST_INT_P (args_so_far))
4349 addr
4350 = memory_address (mode,
4351 plus_constant (Pmode, args_addr,
4352 INTVAL (args_so_far)));
4353 else
4354 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4355 args_so_far));
4356 dest = gen_rtx_MEM (mode, addr);
4358 /* We do *not* set_mem_attributes here, because incoming arguments
4359 may overlap with sibling call outgoing arguments and we cannot
4360 allow reordering of reads from function arguments with stores
4361 to outgoing arguments of sibling calls. We do, however, want
4362 to record the alignment of the stack slot. */
4363 /* ALIGN may well be better aligned than TYPE, e.g. due to
4364 PARM_BOUNDARY. Assume the caller isn't lying. */
4365 set_mem_align (dest, align);
4367 emit_move_insn (dest, x);
4371 /* If part should go in registers, copy that part
4372 into the appropriate registers. Do this now, at the end,
4373 since mem-to-mem copies above may do function calls. */
4374 if (partial > 0 && reg != 0)
4376 /* Handle calls that pass values in multiple non-contiguous locations.
4377 The Irix 6 ABI has examples of this. */
4378 if (GET_CODE (reg) == PARALLEL)
4379 emit_group_load (reg, x, type, -1);
4380 else
4382 gcc_assert (partial % UNITS_PER_WORD == 0);
4383 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4387 if (extra && args_addr == 0 && where_pad == stack_direction)
4388 anti_adjust_stack (GEN_INT (extra));
4390 if (alignment_pad && args_addr == 0)
4391 anti_adjust_stack (alignment_pad);
4394 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4395 operations. */
4397 static rtx
4398 get_subtarget (rtx x)
4400 return (optimize
4401 || x == 0
4402 /* Only registers can be subtargets. */
4403 || !REG_P (x)
4404 /* Don't use hard regs to avoid extending their life. */
4405 || REGNO (x) < FIRST_PSEUDO_REGISTER
4406 ? 0 : x);
4409 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4410 FIELD is a bitfield. Returns true if the optimization was successful,
4411 and there's nothing else to do. */
4413 static bool
4414 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4415 unsigned HOST_WIDE_INT bitpos,
4416 unsigned HOST_WIDE_INT bitregion_start,
4417 unsigned HOST_WIDE_INT bitregion_end,
4418 enum machine_mode mode1, rtx str_rtx,
4419 tree to, tree src)
4421 enum machine_mode str_mode = GET_MODE (str_rtx);
4422 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4423 tree op0, op1;
4424 rtx value, result;
4425 optab binop;
4426 gimple srcstmt;
4427 enum tree_code code;
4429 if (mode1 != VOIDmode
4430 || bitsize >= BITS_PER_WORD
4431 || str_bitsize > BITS_PER_WORD
4432 || TREE_SIDE_EFFECTS (to)
4433 || TREE_THIS_VOLATILE (to))
4434 return false;
4436 STRIP_NOPS (src);
4437 if (TREE_CODE (src) != SSA_NAME)
4438 return false;
4439 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4440 return false;
4442 srcstmt = get_gimple_for_ssa_name (src);
4443 if (!srcstmt
4444 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4445 return false;
4447 code = gimple_assign_rhs_code (srcstmt);
4449 op0 = gimple_assign_rhs1 (srcstmt);
4451 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4452 to find its initialization. Hopefully the initialization will
4453 be from a bitfield load. */
4454 if (TREE_CODE (op0) == SSA_NAME)
4456 gimple op0stmt = get_gimple_for_ssa_name (op0);
4458 /* We want to eventually have OP0 be the same as TO, which
4459 should be a bitfield. */
4460 if (!op0stmt
4461 || !is_gimple_assign (op0stmt)
4462 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4463 return false;
4464 op0 = gimple_assign_rhs1 (op0stmt);
4467 op1 = gimple_assign_rhs2 (srcstmt);
4469 if (!operand_equal_p (to, op0, 0))
4470 return false;
4472 if (MEM_P (str_rtx))
4474 unsigned HOST_WIDE_INT offset1;
4476 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4477 str_mode = word_mode;
4478 str_mode = get_best_mode (bitsize, bitpos,
4479 bitregion_start, bitregion_end,
4480 MEM_ALIGN (str_rtx), str_mode, 0);
4481 if (str_mode == VOIDmode)
4482 return false;
4483 str_bitsize = GET_MODE_BITSIZE (str_mode);
4485 offset1 = bitpos;
4486 bitpos %= str_bitsize;
4487 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4488 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4490 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4491 return false;
4493 /* If the bit field covers the whole REG/MEM, store_field
4494 will likely generate better code. */
4495 if (bitsize >= str_bitsize)
4496 return false;
4498 /* We can't handle fields split across multiple entities. */
4499 if (bitpos + bitsize > str_bitsize)
4500 return false;
4502 if (BYTES_BIG_ENDIAN)
4503 bitpos = str_bitsize - bitpos - bitsize;
4505 switch (code)
4507 case PLUS_EXPR:
4508 case MINUS_EXPR:
4509 /* For now, just optimize the case of the topmost bitfield
4510 where we don't need to do any masking and also
4511 1 bit bitfields where xor can be used.
4512 We might win by one instruction for the other bitfields
4513 too if insv/extv instructions aren't used, so that
4514 can be added later. */
4515 if (bitpos + bitsize != str_bitsize
4516 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4517 break;
4519 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4520 value = convert_modes (str_mode,
4521 TYPE_MODE (TREE_TYPE (op1)), value,
4522 TYPE_UNSIGNED (TREE_TYPE (op1)));
4524 /* We may be accessing data outside the field, which means
4525 we can alias adjacent data. */
4526 if (MEM_P (str_rtx))
4528 str_rtx = shallow_copy_rtx (str_rtx);
4529 set_mem_alias_set (str_rtx, 0);
4530 set_mem_expr (str_rtx, 0);
4533 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4534 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4536 value = expand_and (str_mode, value, const1_rtx, NULL);
4537 binop = xor_optab;
4539 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4540 result = expand_binop (str_mode, binop, str_rtx,
4541 value, str_rtx, 1, OPTAB_WIDEN);
4542 if (result != str_rtx)
4543 emit_move_insn (str_rtx, result);
4544 return true;
4546 case BIT_IOR_EXPR:
4547 case BIT_XOR_EXPR:
4548 if (TREE_CODE (op1) != INTEGER_CST)
4549 break;
4550 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4551 value = convert_modes (str_mode,
4552 TYPE_MODE (TREE_TYPE (op1)), value,
4553 TYPE_UNSIGNED (TREE_TYPE (op1)));
4555 /* We may be accessing data outside the field, which means
4556 we can alias adjacent data. */
4557 if (MEM_P (str_rtx))
4559 str_rtx = shallow_copy_rtx (str_rtx);
4560 set_mem_alias_set (str_rtx, 0);
4561 set_mem_expr (str_rtx, 0);
4564 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4565 if (bitpos + bitsize != str_bitsize)
4567 rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4568 str_mode);
4569 value = expand_and (str_mode, value, mask, NULL_RTX);
4571 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4572 result = expand_binop (str_mode, binop, str_rtx,
4573 value, str_rtx, 1, OPTAB_WIDEN);
4574 if (result != str_rtx)
4575 emit_move_insn (str_rtx, result);
4576 return true;
4578 default:
4579 break;
4582 return false;
4585 /* In the C++ memory model, consecutive bit fields in a structure are
4586 considered one memory location.
4588 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4589 returns the bit range of consecutive bits in which this COMPONENT_REF
4590 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4591 and *OFFSET may be adjusted in the process.
4593 If the access does not need to be restricted, 0 is returned in both
4594 *BITSTART and *BITEND. */
4596 static void
4597 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4598 unsigned HOST_WIDE_INT *bitend,
4599 tree exp,
4600 HOST_WIDE_INT *bitpos,
4601 tree *offset)
4603 HOST_WIDE_INT bitoffset;
4604 tree field, repr;
4606 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4608 field = TREE_OPERAND (exp, 1);
4609 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4610 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4611 need to limit the range we can access. */
4612 if (!repr)
4614 *bitstart = *bitend = 0;
4615 return;
4618 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4619 part of a larger bit field, then the representative does not serve any
4620 useful purpose. This can occur in Ada. */
4621 if (handled_component_p (TREE_OPERAND (exp, 0)))
4623 enum machine_mode rmode;
4624 HOST_WIDE_INT rbitsize, rbitpos;
4625 tree roffset;
4626 int unsignedp;
4627 int volatilep = 0;
4628 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4629 &roffset, &rmode, &unsignedp, &volatilep, false);
4630 if ((rbitpos % BITS_PER_UNIT) != 0)
4632 *bitstart = *bitend = 0;
4633 return;
4637 /* Compute the adjustment to bitpos from the offset of the field
4638 relative to the representative. DECL_FIELD_OFFSET of field and
4639 repr are the same by construction if they are not constants,
4640 see finish_bitfield_layout. */
4641 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
4642 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
4643 bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
4644 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
4645 else
4646 bitoffset = 0;
4647 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4648 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4650 /* If the adjustment is larger than bitpos, we would have a negative bit
4651 position for the lower bound and this may wreak havoc later. Adjust
4652 offset and bitpos to make the lower bound non-negative in that case. */
4653 if (bitoffset > *bitpos)
4655 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4656 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4658 *bitpos += adjust;
4659 if (*offset == NULL_TREE)
4660 *offset = size_int (-adjust / BITS_PER_UNIT);
4661 else
4662 *offset
4663 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4664 *bitstart = 0;
4666 else
4667 *bitstart = *bitpos - bitoffset;
4669 *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4672 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4673 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4674 DECL_RTL was not set yet, return NORTL. */
4676 static inline bool
4677 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4679 if (TREE_CODE (addr) != ADDR_EXPR)
4680 return false;
4682 tree base = TREE_OPERAND (addr, 0);
4684 if (!DECL_P (base)
4685 || TREE_ADDRESSABLE (base)
4686 || DECL_MODE (base) == BLKmode)
4687 return false;
4689 if (!DECL_RTL_SET_P (base))
4690 return nortl;
4692 return (!MEM_P (DECL_RTL (base)));
4695 /* Returns true if the MEM_REF REF refers to an object that does not
4696 reside in memory and has non-BLKmode. */
4698 static inline bool
4699 mem_ref_refers_to_non_mem_p (tree ref)
4701 tree base = TREE_OPERAND (ref, 0);
4702 return addr_expr_of_non_mem_decl_p_1 (base, false);
4705 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4706 is true, try generating a nontemporal store. */
4708 void
4709 expand_assignment (tree to, tree from, bool nontemporal)
4711 rtx to_rtx = 0;
4712 rtx result;
4713 enum machine_mode mode;
4714 unsigned int align;
4715 enum insn_code icode;
4717 /* Don't crash if the lhs of the assignment was erroneous. */
4718 if (TREE_CODE (to) == ERROR_MARK)
4720 expand_normal (from);
4721 return;
4724 /* Optimize away no-op moves without side-effects. */
4725 if (operand_equal_p (to, from, 0))
4726 return;
4728 /* Handle misaligned stores. */
4729 mode = TYPE_MODE (TREE_TYPE (to));
4730 if ((TREE_CODE (to) == MEM_REF
4731 || TREE_CODE (to) == TARGET_MEM_REF)
4732 && mode != BLKmode
4733 && !mem_ref_refers_to_non_mem_p (to)
4734 && ((align = get_object_alignment (to))
4735 < GET_MODE_ALIGNMENT (mode))
4736 && (((icode = optab_handler (movmisalign_optab, mode))
4737 != CODE_FOR_nothing)
4738 || SLOW_UNALIGNED_ACCESS (mode, align)))
4740 rtx reg, mem;
4742 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4743 reg = force_not_mem (reg);
4744 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4746 if (icode != CODE_FOR_nothing)
4748 struct expand_operand ops[2];
4750 create_fixed_operand (&ops[0], mem);
4751 create_input_operand (&ops[1], reg, mode);
4752 /* The movmisalign<mode> pattern cannot fail, else the assignment
4753 would silently be omitted. */
4754 expand_insn (icode, 2, ops);
4756 else
4757 store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg);
4758 return;
4761 /* Assignment of a structure component needs special treatment
4762 if the structure component's rtx is not simply a MEM.
4763 Assignment of an array element at a constant index, and assignment of
4764 an array element in an unaligned packed structure field, has the same
4765 problem. Same for (partially) storing into a non-memory object. */
4766 if (handled_component_p (to)
4767 || (TREE_CODE (to) == MEM_REF
4768 && mem_ref_refers_to_non_mem_p (to))
4769 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4771 enum machine_mode mode1;
4772 HOST_WIDE_INT bitsize, bitpos;
4773 unsigned HOST_WIDE_INT bitregion_start = 0;
4774 unsigned HOST_WIDE_INT bitregion_end = 0;
4775 tree offset;
4776 int unsignedp;
4777 int volatilep = 0;
4778 tree tem;
4780 push_temp_slots ();
4781 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4782 &unsignedp, &volatilep, true);
4784 /* Make sure bitpos is not negative, it can wreak havoc later. */
4785 if (bitpos < 0)
4787 gcc_assert (offset == NULL_TREE);
4788 offset = size_int (bitpos >> (BITS_PER_UNIT == 8
4789 ? 3 : exact_log2 (BITS_PER_UNIT)));
4790 bitpos &= BITS_PER_UNIT - 1;
4793 if (TREE_CODE (to) == COMPONENT_REF
4794 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4795 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4796 /* The C++ memory model naturally applies to byte-aligned fields.
4797 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4798 BITSIZE are not byte-aligned, there is no need to limit the range
4799 we can access. This can occur with packed structures in Ada. */
4800 else if (bitsize > 0
4801 && bitsize % BITS_PER_UNIT == 0
4802 && bitpos % BITS_PER_UNIT == 0)
4804 bitregion_start = bitpos;
4805 bitregion_end = bitpos + bitsize - 1;
4808 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4810 /* If the field has a mode, we want to access it in the
4811 field's mode, not the computed mode.
4812 If a MEM has VOIDmode (external with incomplete type),
4813 use BLKmode for it instead. */
4814 if (MEM_P (to_rtx))
4816 if (mode1 != VOIDmode)
4817 to_rtx = adjust_address (to_rtx, mode1, 0);
4818 else if (GET_MODE (to_rtx) == VOIDmode)
4819 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4822 if (offset != 0)
4824 enum machine_mode address_mode;
4825 rtx offset_rtx;
4827 if (!MEM_P (to_rtx))
4829 /* We can get constant negative offsets into arrays with broken
4830 user code. Translate this to a trap instead of ICEing. */
4831 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4832 expand_builtin_trap ();
4833 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4836 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4837 address_mode = get_address_mode (to_rtx);
4838 if (GET_MODE (offset_rtx) != address_mode)
4839 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4841 /* The check for a constant address in TO_RTX not having VOIDmode
4842 is probably no longer necessary. */
4843 if (MEM_P (to_rtx)
4844 && GET_MODE (to_rtx) == BLKmode
4845 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4846 && bitsize > 0
4847 && (bitpos % bitsize) == 0
4848 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4849 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4851 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4852 bitregion_start = 0;
4853 if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos)
4854 bitregion_end -= bitpos;
4855 bitpos = 0;
4858 to_rtx = offset_address (to_rtx, offset_rtx,
4859 highest_pow2_factor_for_target (to,
4860 offset));
4863 /* No action is needed if the target is not a memory and the field
4864 lies completely outside that target. This can occur if the source
4865 code contains an out-of-bounds access to a small array. */
4866 if (!MEM_P (to_rtx)
4867 && GET_MODE (to_rtx) != BLKmode
4868 && (unsigned HOST_WIDE_INT) bitpos
4869 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4871 expand_normal (from);
4872 result = NULL;
4874 /* Handle expand_expr of a complex value returning a CONCAT. */
4875 else if (GET_CODE (to_rtx) == CONCAT)
4877 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4878 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4879 && bitpos == 0
4880 && bitsize == mode_bitsize)
4881 result = store_expr (from, to_rtx, false, nontemporal);
4882 else if (bitsize == mode_bitsize / 2
4883 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4884 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4885 nontemporal);
4886 else if (bitpos + bitsize <= mode_bitsize / 2)
4887 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4888 bitregion_start, bitregion_end,
4889 mode1, from,
4890 get_alias_set (to), nontemporal);
4891 else if (bitpos >= mode_bitsize / 2)
4892 result = store_field (XEXP (to_rtx, 1), bitsize,
4893 bitpos - mode_bitsize / 2,
4894 bitregion_start, bitregion_end,
4895 mode1, from,
4896 get_alias_set (to), nontemporal);
4897 else if (bitpos == 0 && bitsize == mode_bitsize)
4899 rtx from_rtx;
4900 result = expand_normal (from);
4901 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4902 TYPE_MODE (TREE_TYPE (from)), 0);
4903 emit_move_insn (XEXP (to_rtx, 0),
4904 read_complex_part (from_rtx, false));
4905 emit_move_insn (XEXP (to_rtx, 1),
4906 read_complex_part (from_rtx, true));
4908 else
4910 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4911 GET_MODE_SIZE (GET_MODE (to_rtx)));
4912 write_complex_part (temp, XEXP (to_rtx, 0), false);
4913 write_complex_part (temp, XEXP (to_rtx, 1), true);
4914 result = store_field (temp, bitsize, bitpos,
4915 bitregion_start, bitregion_end,
4916 mode1, from,
4917 get_alias_set (to), nontemporal);
4918 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4919 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4922 else
4924 if (MEM_P (to_rtx))
4926 /* If the field is at offset zero, we could have been given the
4927 DECL_RTX of the parent struct. Don't munge it. */
4928 to_rtx = shallow_copy_rtx (to_rtx);
4929 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4930 if (volatilep)
4931 MEM_VOLATILE_P (to_rtx) = 1;
4934 if (optimize_bitfield_assignment_op (bitsize, bitpos,
4935 bitregion_start, bitregion_end,
4936 mode1,
4937 to_rtx, to, from))
4938 result = NULL;
4939 else
4940 result = store_field (to_rtx, bitsize, bitpos,
4941 bitregion_start, bitregion_end,
4942 mode1, from,
4943 get_alias_set (to), nontemporal);
4946 if (result)
4947 preserve_temp_slots (result);
4948 pop_temp_slots ();
4949 return;
4952 /* If the rhs is a function call and its value is not an aggregate,
4953 call the function before we start to compute the lhs.
4954 This is needed for correct code for cases such as
4955 val = setjmp (buf) on machines where reference to val
4956 requires loading up part of an address in a separate insn.
4958 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4959 since it might be a promoted variable where the zero- or sign- extension
4960 needs to be done. Handling this in the normal way is safe because no
4961 computation is done before the call. The same is true for SSA names. */
4962 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4963 && COMPLETE_TYPE_P (TREE_TYPE (from))
4964 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4965 && ! (((TREE_CODE (to) == VAR_DECL
4966 || TREE_CODE (to) == PARM_DECL
4967 || TREE_CODE (to) == RESULT_DECL)
4968 && REG_P (DECL_RTL (to)))
4969 || TREE_CODE (to) == SSA_NAME))
4971 rtx value;
4973 push_temp_slots ();
4974 value = expand_normal (from);
4975 if (to_rtx == 0)
4976 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4978 /* Handle calls that return values in multiple non-contiguous locations.
4979 The Irix 6 ABI has examples of this. */
4980 if (GET_CODE (to_rtx) == PARALLEL)
4982 if (GET_CODE (value) == PARALLEL)
4983 emit_group_move (to_rtx, value);
4984 else
4985 emit_group_load (to_rtx, value, TREE_TYPE (from),
4986 int_size_in_bytes (TREE_TYPE (from)));
4988 else if (GET_CODE (value) == PARALLEL)
4989 emit_group_store (to_rtx, value, TREE_TYPE (from),
4990 int_size_in_bytes (TREE_TYPE (from)));
4991 else if (GET_MODE (to_rtx) == BLKmode)
4993 /* Handle calls that return BLKmode values in registers. */
4994 if (REG_P (value))
4995 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
4996 else
4997 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4999 else
5001 if (POINTER_TYPE_P (TREE_TYPE (to)))
5002 value = convert_memory_address_addr_space
5003 (GET_MODE (to_rtx), value,
5004 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5006 emit_move_insn (to_rtx, value);
5008 preserve_temp_slots (to_rtx);
5009 pop_temp_slots ();
5010 return;
5013 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5014 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5016 /* Don't move directly into a return register. */
5017 if (TREE_CODE (to) == RESULT_DECL
5018 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5020 rtx temp;
5022 push_temp_slots ();
5024 /* If the source is itself a return value, it still is in a pseudo at
5025 this point so we can move it back to the return register directly. */
5026 if (REG_P (to_rtx)
5027 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5028 && TREE_CODE (from) != CALL_EXPR)
5029 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5030 else
5031 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5033 /* Handle calls that return values in multiple non-contiguous locations.
5034 The Irix 6 ABI has examples of this. */
5035 if (GET_CODE (to_rtx) == PARALLEL)
5037 if (GET_CODE (temp) == PARALLEL)
5038 emit_group_move (to_rtx, temp);
5039 else
5040 emit_group_load (to_rtx, temp, TREE_TYPE (from),
5041 int_size_in_bytes (TREE_TYPE (from)));
5043 else if (temp)
5044 emit_move_insn (to_rtx, temp);
5046 preserve_temp_slots (to_rtx);
5047 pop_temp_slots ();
5048 return;
5051 /* In case we are returning the contents of an object which overlaps
5052 the place the value is being stored, use a safe function when copying
5053 a value through a pointer into a structure value return block. */
5054 if (TREE_CODE (to) == RESULT_DECL
5055 && TREE_CODE (from) == INDIRECT_REF
5056 && ADDR_SPACE_GENERIC_P
5057 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5058 && refs_may_alias_p (to, from)
5059 && cfun->returns_struct
5060 && !cfun->returns_pcc_struct)
5062 rtx from_rtx, size;
5064 push_temp_slots ();
5065 size = expr_size (from);
5066 from_rtx = expand_normal (from);
5068 emit_library_call (memmove_libfunc, LCT_NORMAL,
5069 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
5070 XEXP (from_rtx, 0), Pmode,
5071 convert_to_mode (TYPE_MODE (sizetype),
5072 size, TYPE_UNSIGNED (sizetype)),
5073 TYPE_MODE (sizetype));
5075 preserve_temp_slots (to_rtx);
5076 pop_temp_slots ();
5077 return;
5080 /* Compute FROM and store the value in the rtx we got. */
5082 push_temp_slots ();
5083 result = store_expr (from, to_rtx, 0, nontemporal);
5084 preserve_temp_slots (result);
5085 pop_temp_slots ();
5086 return;
5089 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5090 succeeded, false otherwise. */
5092 bool
5093 emit_storent_insn (rtx to, rtx from)
5095 struct expand_operand ops[2];
5096 enum machine_mode mode = GET_MODE (to);
5097 enum insn_code code = optab_handler (storent_optab, mode);
5099 if (code == CODE_FOR_nothing)
5100 return false;
5102 create_fixed_operand (&ops[0], to);
5103 create_input_operand (&ops[1], from, mode);
5104 return maybe_expand_insn (code, 2, ops);
5107 /* Generate code for computing expression EXP,
5108 and storing the value into TARGET.
5110 If the mode is BLKmode then we may return TARGET itself.
5111 It turns out that in BLKmode it doesn't cause a problem.
5112 because C has no operators that could combine two different
5113 assignments into the same BLKmode object with different values
5114 with no sequence point. Will other languages need this to
5115 be more thorough?
5117 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5118 stack, and block moves may need to be treated specially.
5120 If NONTEMPORAL is true, try using a nontemporal store instruction. */
5123 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
5125 rtx temp;
5126 rtx alt_rtl = NULL_RTX;
5127 location_t loc = curr_insn_location ();
5129 if (VOID_TYPE_P (TREE_TYPE (exp)))
5131 /* C++ can generate ?: expressions with a throw expression in one
5132 branch and an rvalue in the other. Here, we resolve attempts to
5133 store the throw expression's nonexistent result. */
5134 gcc_assert (!call_param_p);
5135 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5136 return NULL_RTX;
5138 if (TREE_CODE (exp) == COMPOUND_EXPR)
5140 /* Perform first part of compound expression, then assign from second
5141 part. */
5142 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5143 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5144 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5145 nontemporal);
5147 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5149 /* For conditional expression, get safe form of the target. Then
5150 test the condition, doing the appropriate assignment on either
5151 side. This avoids the creation of unnecessary temporaries.
5152 For non-BLKmode, it is more efficient not to do this. */
5154 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
5156 do_pending_stack_adjust ();
5157 NO_DEFER_POP;
5158 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5159 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5160 nontemporal);
5161 emit_jump_insn (gen_jump (lab2));
5162 emit_barrier ();
5163 emit_label (lab1);
5164 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
5165 nontemporal);
5166 emit_label (lab2);
5167 OK_DEFER_POP;
5169 return NULL_RTX;
5171 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5172 /* If this is a scalar in a register that is stored in a wider mode
5173 than the declared mode, compute the result into its declared mode
5174 and then convert to the wider mode. Our value is the computed
5175 expression. */
5177 rtx inner_target = 0;
5179 /* We can do the conversion inside EXP, which will often result
5180 in some optimizations. Do the conversion in two steps: first
5181 change the signedness, if needed, then the extend. But don't
5182 do this if the type of EXP is a subtype of something else
5183 since then the conversion might involve more than just
5184 converting modes. */
5185 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5186 && TREE_TYPE (TREE_TYPE (exp)) == 0
5187 && GET_MODE_PRECISION (GET_MODE (target))
5188 == TYPE_PRECISION (TREE_TYPE (exp)))
5190 if (TYPE_UNSIGNED (TREE_TYPE (exp))
5191 != SUBREG_PROMOTED_UNSIGNED_P (target))
5193 /* Some types, e.g. Fortran's logical*4, won't have a signed
5194 version, so use the mode instead. */
5195 tree ntype
5196 = (signed_or_unsigned_type_for
5197 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
5198 if (ntype == NULL)
5199 ntype = lang_hooks.types.type_for_mode
5200 (TYPE_MODE (TREE_TYPE (exp)),
5201 SUBREG_PROMOTED_UNSIGNED_P (target));
5203 exp = fold_convert_loc (loc, ntype, exp);
5206 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5207 (GET_MODE (SUBREG_REG (target)),
5208 SUBREG_PROMOTED_UNSIGNED_P (target)),
5209 exp);
5211 inner_target = SUBREG_REG (target);
5214 temp = expand_expr (exp, inner_target, VOIDmode,
5215 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5217 /* If TEMP is a VOIDmode constant, use convert_modes to make
5218 sure that we properly convert it. */
5219 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5221 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5222 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
5223 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5224 GET_MODE (target), temp,
5225 SUBREG_PROMOTED_UNSIGNED_P (target));
5228 convert_move (SUBREG_REG (target), temp,
5229 SUBREG_PROMOTED_UNSIGNED_P (target));
5231 return NULL_RTX;
5233 else if ((TREE_CODE (exp) == STRING_CST
5234 || (TREE_CODE (exp) == MEM_REF
5235 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5236 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5237 == STRING_CST
5238 && integer_zerop (TREE_OPERAND (exp, 1))))
5239 && !nontemporal && !call_param_p
5240 && MEM_P (target))
5242 /* Optimize initialization of an array with a STRING_CST. */
5243 HOST_WIDE_INT exp_len, str_copy_len;
5244 rtx dest_mem;
5245 tree str = TREE_CODE (exp) == STRING_CST
5246 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5248 exp_len = int_expr_size (exp);
5249 if (exp_len <= 0)
5250 goto normal_expr;
5252 if (TREE_STRING_LENGTH (str) <= 0)
5253 goto normal_expr;
5255 str_copy_len = strlen (TREE_STRING_POINTER (str));
5256 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5257 goto normal_expr;
5259 str_copy_len = TREE_STRING_LENGTH (str);
5260 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5261 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5263 str_copy_len += STORE_MAX_PIECES - 1;
5264 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5266 str_copy_len = MIN (str_copy_len, exp_len);
5267 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5268 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5269 MEM_ALIGN (target), false))
5270 goto normal_expr;
5272 dest_mem = target;
5274 dest_mem = store_by_pieces (dest_mem,
5275 str_copy_len, builtin_strncpy_read_str,
5276 CONST_CAST (char *,
5277 TREE_STRING_POINTER (str)),
5278 MEM_ALIGN (target), false,
5279 exp_len > str_copy_len ? 1 : 0);
5280 if (exp_len > str_copy_len)
5281 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5282 GEN_INT (exp_len - str_copy_len),
5283 BLOCK_OP_NORMAL);
5284 return NULL_RTX;
5286 else
5288 rtx tmp_target;
5290 normal_expr:
5291 /* If we want to use a nontemporal store, force the value to
5292 register first. */
5293 tmp_target = nontemporal ? NULL_RTX : target;
5294 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5295 (call_param_p
5296 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5297 &alt_rtl, false);
5300 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5301 the same as that of TARGET, adjust the constant. This is needed, for
5302 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5303 only a word-sized value. */
5304 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5305 && TREE_CODE (exp) != ERROR_MARK
5306 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5307 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5308 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5310 /* If value was not generated in the target, store it there.
5311 Convert the value to TARGET's type first if necessary and emit the
5312 pending incrementations that have been queued when expanding EXP.
5313 Note that we cannot emit the whole queue blindly because this will
5314 effectively disable the POST_INC optimization later.
5316 If TEMP and TARGET compare equal according to rtx_equal_p, but
5317 one or both of them are volatile memory refs, we have to distinguish
5318 two cases:
5319 - expand_expr has used TARGET. In this case, we must not generate
5320 another copy. This can be detected by TARGET being equal according
5321 to == .
5322 - expand_expr has not used TARGET - that means that the source just
5323 happens to have the same RTX form. Since temp will have been created
5324 by expand_expr, it will compare unequal according to == .
5325 We must generate a copy in this case, to reach the correct number
5326 of volatile memory references. */
5328 if ((! rtx_equal_p (temp, target)
5329 || (temp != target && (side_effects_p (temp)
5330 || side_effects_p (target))))
5331 && TREE_CODE (exp) != ERROR_MARK
5332 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5333 but TARGET is not valid memory reference, TEMP will differ
5334 from TARGET although it is really the same location. */
5335 && !(alt_rtl
5336 && rtx_equal_p (alt_rtl, target)
5337 && !side_effects_p (alt_rtl)
5338 && !side_effects_p (target))
5339 /* If there's nothing to copy, don't bother. Don't call
5340 expr_size unless necessary, because some front-ends (C++)
5341 expr_size-hook must not be given objects that are not
5342 supposed to be bit-copied or bit-initialized. */
5343 && expr_size (exp) != const0_rtx)
5345 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5347 if (GET_MODE (target) == BLKmode)
5349 /* Handle calls that return BLKmode values in registers. */
5350 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5351 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5352 else
5353 store_bit_field (target,
5354 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5355 0, 0, 0, GET_MODE (temp), temp);
5357 else
5358 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5361 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5363 /* Handle copying a string constant into an array. The string
5364 constant may be shorter than the array. So copy just the string's
5365 actual length, and clear the rest. First get the size of the data
5366 type of the string, which is actually the size of the target. */
5367 rtx size = expr_size (exp);
5369 if (CONST_INT_P (size)
5370 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5371 emit_block_move (target, temp, size,
5372 (call_param_p
5373 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5374 else
5376 enum machine_mode pointer_mode
5377 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5378 enum machine_mode address_mode = get_address_mode (target);
5380 /* Compute the size of the data to copy from the string. */
5381 tree copy_size
5382 = size_binop_loc (loc, MIN_EXPR,
5383 make_tree (sizetype, size),
5384 size_int (TREE_STRING_LENGTH (exp)));
5385 rtx copy_size_rtx
5386 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5387 (call_param_p
5388 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5389 rtx label = 0;
5391 /* Copy that much. */
5392 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5393 TYPE_UNSIGNED (sizetype));
5394 emit_block_move (target, temp, copy_size_rtx,
5395 (call_param_p
5396 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5398 /* Figure out how much is left in TARGET that we have to clear.
5399 Do all calculations in pointer_mode. */
5400 if (CONST_INT_P (copy_size_rtx))
5402 size = plus_constant (address_mode, size,
5403 -INTVAL (copy_size_rtx));
5404 target = adjust_address (target, BLKmode,
5405 INTVAL (copy_size_rtx));
5407 else
5409 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5410 copy_size_rtx, NULL_RTX, 0,
5411 OPTAB_LIB_WIDEN);
5413 if (GET_MODE (copy_size_rtx) != address_mode)
5414 copy_size_rtx = convert_to_mode (address_mode,
5415 copy_size_rtx,
5416 TYPE_UNSIGNED (sizetype));
5418 target = offset_address (target, copy_size_rtx,
5419 highest_pow2_factor (copy_size));
5420 label = gen_label_rtx ();
5421 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5422 GET_MODE (size), 0, label);
5425 if (size != const0_rtx)
5426 clear_storage (target, size, BLOCK_OP_NORMAL);
5428 if (label)
5429 emit_label (label);
5432 /* Handle calls that return values in multiple non-contiguous locations.
5433 The Irix 6 ABI has examples of this. */
5434 else if (GET_CODE (target) == PARALLEL)
5436 if (GET_CODE (temp) == PARALLEL)
5437 emit_group_move (target, temp);
5438 else
5439 emit_group_load (target, temp, TREE_TYPE (exp),
5440 int_size_in_bytes (TREE_TYPE (exp)));
5442 else if (GET_CODE (temp) == PARALLEL)
5443 emit_group_store (target, temp, TREE_TYPE (exp),
5444 int_size_in_bytes (TREE_TYPE (exp)));
5445 else if (GET_MODE (temp) == BLKmode)
5446 emit_block_move (target, temp, expr_size (exp),
5447 (call_param_p
5448 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5449 /* If we emit a nontemporal store, there is nothing else to do. */
5450 else if (nontemporal && emit_storent_insn (target, temp))
5452 else
5454 temp = force_operand (temp, target);
5455 if (temp != target)
5456 emit_move_insn (target, temp);
5460 return NULL_RTX;
5463 /* Return true if field F of structure TYPE is a flexible array. */
5465 static bool
5466 flexible_array_member_p (const_tree f, const_tree type)
5468 const_tree tf;
5470 tf = TREE_TYPE (f);
5471 return (DECL_CHAIN (f) == NULL
5472 && TREE_CODE (tf) == ARRAY_TYPE
5473 && TYPE_DOMAIN (tf)
5474 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5475 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5476 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5477 && int_size_in_bytes (type) >= 0);
5480 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5481 must have in order for it to completely initialize a value of type TYPE.
5482 Return -1 if the number isn't known.
5484 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5486 static HOST_WIDE_INT
5487 count_type_elements (const_tree type, bool for_ctor_p)
5489 switch (TREE_CODE (type))
5491 case ARRAY_TYPE:
5493 tree nelts;
5495 nelts = array_type_nelts (type);
5496 if (nelts && tree_fits_uhwi_p (nelts))
5498 unsigned HOST_WIDE_INT n;
5500 n = tree_to_uhwi (nelts) + 1;
5501 if (n == 0 || for_ctor_p)
5502 return n;
5503 else
5504 return n * count_type_elements (TREE_TYPE (type), false);
5506 return for_ctor_p ? -1 : 1;
5509 case RECORD_TYPE:
5511 unsigned HOST_WIDE_INT n;
5512 tree f;
5514 n = 0;
5515 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5516 if (TREE_CODE (f) == FIELD_DECL)
5518 if (!for_ctor_p)
5519 n += count_type_elements (TREE_TYPE (f), false);
5520 else if (!flexible_array_member_p (f, type))
5521 /* Don't count flexible arrays, which are not supposed
5522 to be initialized. */
5523 n += 1;
5526 return n;
5529 case UNION_TYPE:
5530 case QUAL_UNION_TYPE:
5532 tree f;
5533 HOST_WIDE_INT n, m;
5535 gcc_assert (!for_ctor_p);
5536 /* Estimate the number of scalars in each field and pick the
5537 maximum. Other estimates would do instead; the idea is simply
5538 to make sure that the estimate is not sensitive to the ordering
5539 of the fields. */
5540 n = 1;
5541 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5542 if (TREE_CODE (f) == FIELD_DECL)
5544 m = count_type_elements (TREE_TYPE (f), false);
5545 /* If the field doesn't span the whole union, add an extra
5546 scalar for the rest. */
5547 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5548 TYPE_SIZE (type)) != 1)
5549 m++;
5550 if (n < m)
5551 n = m;
5553 return n;
5556 case COMPLEX_TYPE:
5557 return 2;
5559 case VECTOR_TYPE:
5560 return TYPE_VECTOR_SUBPARTS (type);
5562 case INTEGER_TYPE:
5563 case REAL_TYPE:
5564 case FIXED_POINT_TYPE:
5565 case ENUMERAL_TYPE:
5566 case BOOLEAN_TYPE:
5567 case POINTER_TYPE:
5568 case OFFSET_TYPE:
5569 case REFERENCE_TYPE:
5570 case NULLPTR_TYPE:
5571 return 1;
5573 case ERROR_MARK:
5574 return 0;
5576 case VOID_TYPE:
5577 case METHOD_TYPE:
5578 case FUNCTION_TYPE:
5579 case LANG_TYPE:
5580 default:
5581 gcc_unreachable ();
5585 /* Helper for categorize_ctor_elements. Identical interface. */
5587 static bool
5588 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5589 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5591 unsigned HOST_WIDE_INT idx;
5592 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5593 tree value, purpose, elt_type;
5595 /* Whether CTOR is a valid constant initializer, in accordance with what
5596 initializer_constant_valid_p does. If inferred from the constructor
5597 elements, true until proven otherwise. */
5598 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5599 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5601 nz_elts = 0;
5602 init_elts = 0;
5603 num_fields = 0;
5604 elt_type = NULL_TREE;
5606 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5608 HOST_WIDE_INT mult = 1;
5610 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5612 tree lo_index = TREE_OPERAND (purpose, 0);
5613 tree hi_index = TREE_OPERAND (purpose, 1);
5615 if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
5616 mult = (tree_to_uhwi (hi_index)
5617 - tree_to_uhwi (lo_index) + 1);
5619 num_fields += mult;
5620 elt_type = TREE_TYPE (value);
5622 switch (TREE_CODE (value))
5624 case CONSTRUCTOR:
5626 HOST_WIDE_INT nz = 0, ic = 0;
5628 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5629 p_complete);
5631 nz_elts += mult * nz;
5632 init_elts += mult * ic;
5634 if (const_from_elts_p && const_p)
5635 const_p = const_elt_p;
5637 break;
5639 case INTEGER_CST:
5640 case REAL_CST:
5641 case FIXED_CST:
5642 if (!initializer_zerop (value))
5643 nz_elts += mult;
5644 init_elts += mult;
5645 break;
5647 case STRING_CST:
5648 nz_elts += mult * TREE_STRING_LENGTH (value);
5649 init_elts += mult * TREE_STRING_LENGTH (value);
5650 break;
5652 case COMPLEX_CST:
5653 if (!initializer_zerop (TREE_REALPART (value)))
5654 nz_elts += mult;
5655 if (!initializer_zerop (TREE_IMAGPART (value)))
5656 nz_elts += mult;
5657 init_elts += mult;
5658 break;
5660 case VECTOR_CST:
5662 unsigned i;
5663 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5665 tree v = VECTOR_CST_ELT (value, i);
5666 if (!initializer_zerop (v))
5667 nz_elts += mult;
5668 init_elts += mult;
5671 break;
5673 default:
5675 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5676 nz_elts += mult * tc;
5677 init_elts += mult * tc;
5679 if (const_from_elts_p && const_p)
5680 const_p = initializer_constant_valid_p (value, elt_type)
5681 != NULL_TREE;
5683 break;
5687 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5688 num_fields, elt_type))
5689 *p_complete = false;
5691 *p_nz_elts += nz_elts;
5692 *p_init_elts += init_elts;
5694 return const_p;
5697 /* Examine CTOR to discover:
5698 * how many scalar fields are set to nonzero values,
5699 and place it in *P_NZ_ELTS;
5700 * how many scalar fields in total are in CTOR,
5701 and place it in *P_ELT_COUNT.
5702 * whether the constructor is complete -- in the sense that every
5703 meaningful byte is explicitly given a value --
5704 and place it in *P_COMPLETE.
5706 Return whether or not CTOR is a valid static constant initializer, the same
5707 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5709 bool
5710 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5711 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5713 *p_nz_elts = 0;
5714 *p_init_elts = 0;
5715 *p_complete = true;
5717 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5720 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5721 of which had type LAST_TYPE. Each element was itself a complete
5722 initializer, in the sense that every meaningful byte was explicitly
5723 given a value. Return true if the same is true for the constructor
5724 as a whole. */
5726 bool
5727 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5728 const_tree last_type)
5730 if (TREE_CODE (type) == UNION_TYPE
5731 || TREE_CODE (type) == QUAL_UNION_TYPE)
5733 if (num_elts == 0)
5734 return false;
5736 gcc_assert (num_elts == 1 && last_type);
5738 /* ??? We could look at each element of the union, and find the
5739 largest element. Which would avoid comparing the size of the
5740 initialized element against any tail padding in the union.
5741 Doesn't seem worth the effort... */
5742 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5745 return count_type_elements (type, true) == num_elts;
5748 /* Return 1 if EXP contains mostly (3/4) zeros. */
5750 static int
5751 mostly_zeros_p (const_tree exp)
5753 if (TREE_CODE (exp) == CONSTRUCTOR)
5755 HOST_WIDE_INT nz_elts, init_elts;
5756 bool complete_p;
5758 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5759 return !complete_p || nz_elts < init_elts / 4;
5762 return initializer_zerop (exp);
5765 /* Return 1 if EXP contains all zeros. */
5767 static int
5768 all_zeros_p (const_tree exp)
5770 if (TREE_CODE (exp) == CONSTRUCTOR)
5772 HOST_WIDE_INT nz_elts, init_elts;
5773 bool complete_p;
5775 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5776 return nz_elts == 0;
5779 return initializer_zerop (exp);
5782 /* Helper function for store_constructor.
5783 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5784 CLEARED is as for store_constructor.
5785 ALIAS_SET is the alias set to use for any stores.
5787 This provides a recursive shortcut back to store_constructor when it isn't
5788 necessary to go through store_field. This is so that we can pass through
5789 the cleared field to let store_constructor know that we may not have to
5790 clear a substructure if the outer structure has already been cleared. */
5792 static void
5793 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5794 HOST_WIDE_INT bitpos, enum machine_mode mode,
5795 tree exp, int cleared, alias_set_type alias_set)
5797 if (TREE_CODE (exp) == CONSTRUCTOR
5798 /* We can only call store_constructor recursively if the size and
5799 bit position are on a byte boundary. */
5800 && bitpos % BITS_PER_UNIT == 0
5801 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5802 /* If we have a nonzero bitpos for a register target, then we just
5803 let store_field do the bitfield handling. This is unlikely to
5804 generate unnecessary clear instructions anyways. */
5805 && (bitpos == 0 || MEM_P (target)))
5807 if (MEM_P (target))
5808 target
5809 = adjust_address (target,
5810 GET_MODE (target) == BLKmode
5811 || 0 != (bitpos
5812 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5813 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5816 /* Update the alias set, if required. */
5817 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5818 && MEM_ALIAS_SET (target) != 0)
5820 target = copy_rtx (target);
5821 set_mem_alias_set (target, alias_set);
5824 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5826 else
5827 store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false);
5831 /* Returns the number of FIELD_DECLs in TYPE. */
5833 static int
5834 fields_length (const_tree type)
5836 tree t = TYPE_FIELDS (type);
5837 int count = 0;
5839 for (; t; t = DECL_CHAIN (t))
5840 if (TREE_CODE (t) == FIELD_DECL)
5841 ++count;
5843 return count;
5847 /* Store the value of constructor EXP into the rtx TARGET.
5848 TARGET is either a REG or a MEM; we know it cannot conflict, since
5849 safe_from_p has been called.
5850 CLEARED is true if TARGET is known to have been zero'd.
5851 SIZE is the number of bytes of TARGET we are allowed to modify: this
5852 may not be the same as the size of EXP if we are assigning to a field
5853 which has been packed to exclude padding bits. */
5855 static void
5856 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5858 tree type = TREE_TYPE (exp);
5859 #ifdef WORD_REGISTER_OPERATIONS
5860 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5861 #endif
5863 switch (TREE_CODE (type))
5865 case RECORD_TYPE:
5866 case UNION_TYPE:
5867 case QUAL_UNION_TYPE:
5869 unsigned HOST_WIDE_INT idx;
5870 tree field, value;
5872 /* If size is zero or the target is already cleared, do nothing. */
5873 if (size == 0 || cleared)
5874 cleared = 1;
5875 /* We either clear the aggregate or indicate the value is dead. */
5876 else if ((TREE_CODE (type) == UNION_TYPE
5877 || TREE_CODE (type) == QUAL_UNION_TYPE)
5878 && ! CONSTRUCTOR_ELTS (exp))
5879 /* If the constructor is empty, clear the union. */
5881 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5882 cleared = 1;
5885 /* If we are building a static constructor into a register,
5886 set the initial value as zero so we can fold the value into
5887 a constant. But if more than one register is involved,
5888 this probably loses. */
5889 else if (REG_P (target) && TREE_STATIC (exp)
5890 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5892 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5893 cleared = 1;
5896 /* If the constructor has fewer fields than the structure or
5897 if we are initializing the structure to mostly zeros, clear
5898 the whole structure first. Don't do this if TARGET is a
5899 register whose mode size isn't equal to SIZE since
5900 clear_storage can't handle this case. */
5901 else if (size > 0
5902 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
5903 != fields_length (type))
5904 || mostly_zeros_p (exp))
5905 && (!REG_P (target)
5906 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5907 == size)))
5909 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5910 cleared = 1;
5913 if (REG_P (target) && !cleared)
5914 emit_clobber (target);
5916 /* Store each element of the constructor into the
5917 corresponding field of TARGET. */
5918 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5920 enum machine_mode mode;
5921 HOST_WIDE_INT bitsize;
5922 HOST_WIDE_INT bitpos = 0;
5923 tree offset;
5924 rtx to_rtx = target;
5926 /* Just ignore missing fields. We cleared the whole
5927 structure, above, if any fields are missing. */
5928 if (field == 0)
5929 continue;
5931 if (cleared && initializer_zerop (value))
5932 continue;
5934 if (tree_fits_uhwi_p (DECL_SIZE (field)))
5935 bitsize = tree_to_uhwi (DECL_SIZE (field));
5936 else
5937 bitsize = -1;
5939 mode = DECL_MODE (field);
5940 if (DECL_BIT_FIELD (field))
5941 mode = VOIDmode;
5943 offset = DECL_FIELD_OFFSET (field);
5944 if (tree_fits_shwi_p (offset)
5945 && tree_fits_shwi_p (bit_position (field)))
5947 bitpos = int_bit_position (field);
5948 offset = 0;
5950 else
5951 bitpos = tree_to_shwi (DECL_FIELD_BIT_OFFSET (field));
5953 if (offset)
5955 enum machine_mode address_mode;
5956 rtx offset_rtx;
5958 offset
5959 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5960 make_tree (TREE_TYPE (exp),
5961 target));
5963 offset_rtx = expand_normal (offset);
5964 gcc_assert (MEM_P (to_rtx));
5966 address_mode = get_address_mode (to_rtx);
5967 if (GET_MODE (offset_rtx) != address_mode)
5968 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5970 to_rtx = offset_address (to_rtx, offset_rtx,
5971 highest_pow2_factor (offset));
5974 #ifdef WORD_REGISTER_OPERATIONS
5975 /* If this initializes a field that is smaller than a
5976 word, at the start of a word, try to widen it to a full
5977 word. This special case allows us to output C++ member
5978 function initializations in a form that the optimizers
5979 can understand. */
5980 if (REG_P (target)
5981 && bitsize < BITS_PER_WORD
5982 && bitpos % BITS_PER_WORD == 0
5983 && GET_MODE_CLASS (mode) == MODE_INT
5984 && TREE_CODE (value) == INTEGER_CST
5985 && exp_size >= 0
5986 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5988 tree type = TREE_TYPE (value);
5990 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5992 type = lang_hooks.types.type_for_mode
5993 (word_mode, TYPE_UNSIGNED (type));
5994 value = fold_convert (type, value);
5997 if (BYTES_BIG_ENDIAN)
5998 value
5999 = fold_build2 (LSHIFT_EXPR, type, value,
6000 build_int_cst (type,
6001 BITS_PER_WORD - bitsize));
6002 bitsize = BITS_PER_WORD;
6003 mode = word_mode;
6005 #endif
6007 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6008 && DECL_NONADDRESSABLE_P (field))
6010 to_rtx = copy_rtx (to_rtx);
6011 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6014 store_constructor_field (to_rtx, bitsize, bitpos, mode,
6015 value, cleared,
6016 get_alias_set (TREE_TYPE (field)));
6018 break;
6020 case ARRAY_TYPE:
6022 tree value, index;
6023 unsigned HOST_WIDE_INT i;
6024 int need_to_clear;
6025 tree domain;
6026 tree elttype = TREE_TYPE (type);
6027 int const_bounds_p;
6028 HOST_WIDE_INT minelt = 0;
6029 HOST_WIDE_INT maxelt = 0;
6031 domain = TYPE_DOMAIN (type);
6032 const_bounds_p = (TYPE_MIN_VALUE (domain)
6033 && TYPE_MAX_VALUE (domain)
6034 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6035 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6037 /* If we have constant bounds for the range of the type, get them. */
6038 if (const_bounds_p)
6040 minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6041 maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6044 /* If the constructor has fewer elements than the array, clear
6045 the whole array first. Similarly if this is static
6046 constructor of a non-BLKmode object. */
6047 if (cleared)
6048 need_to_clear = 0;
6049 else if (REG_P (target) && TREE_STATIC (exp))
6050 need_to_clear = 1;
6051 else
6053 unsigned HOST_WIDE_INT idx;
6054 tree index, value;
6055 HOST_WIDE_INT count = 0, zero_count = 0;
6056 need_to_clear = ! const_bounds_p;
6058 /* This loop is a more accurate version of the loop in
6059 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6060 is also needed to check for missing elements. */
6061 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6063 HOST_WIDE_INT this_node_count;
6065 if (need_to_clear)
6066 break;
6068 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6070 tree lo_index = TREE_OPERAND (index, 0);
6071 tree hi_index = TREE_OPERAND (index, 1);
6073 if (! tree_fits_uhwi_p (lo_index)
6074 || ! tree_fits_uhwi_p (hi_index))
6076 need_to_clear = 1;
6077 break;
6080 this_node_count = (tree_to_uhwi (hi_index)
6081 - tree_to_uhwi (lo_index) + 1);
6083 else
6084 this_node_count = 1;
6086 count += this_node_count;
6087 if (mostly_zeros_p (value))
6088 zero_count += this_node_count;
6091 /* Clear the entire array first if there are any missing
6092 elements, or if the incidence of zero elements is >=
6093 75%. */
6094 if (! need_to_clear
6095 && (count < maxelt - minelt + 1
6096 || 4 * zero_count >= 3 * count))
6097 need_to_clear = 1;
6100 if (need_to_clear && size > 0)
6102 if (REG_P (target))
6103 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6104 else
6105 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6106 cleared = 1;
6109 if (!cleared && REG_P (target))
6110 /* Inform later passes that the old value is dead. */
6111 emit_clobber (target);
6113 /* Store each element of the constructor into the
6114 corresponding element of TARGET, determined by counting the
6115 elements. */
6116 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6118 enum machine_mode mode;
6119 HOST_WIDE_INT bitsize;
6120 HOST_WIDE_INT bitpos;
6121 rtx xtarget = target;
6123 if (cleared && initializer_zerop (value))
6124 continue;
6126 mode = TYPE_MODE (elttype);
6127 if (mode == BLKmode)
6128 bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6129 ? tree_to_uhwi (TYPE_SIZE (elttype))
6130 : -1);
6131 else
6132 bitsize = GET_MODE_BITSIZE (mode);
6134 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6136 tree lo_index = TREE_OPERAND (index, 0);
6137 tree hi_index = TREE_OPERAND (index, 1);
6138 rtx index_r, pos_rtx;
6139 HOST_WIDE_INT lo, hi, count;
6140 tree position;
6142 /* If the range is constant and "small", unroll the loop. */
6143 if (const_bounds_p
6144 && tree_fits_shwi_p (lo_index)
6145 && tree_fits_shwi_p (hi_index)
6146 && (lo = tree_to_shwi (lo_index),
6147 hi = tree_to_shwi (hi_index),
6148 count = hi - lo + 1,
6149 (!MEM_P (target)
6150 || count <= 2
6151 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6152 && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6153 <= 40 * 8)))))
6155 lo -= minelt; hi -= minelt;
6156 for (; lo <= hi; lo++)
6158 bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6160 if (MEM_P (target)
6161 && !MEM_KEEP_ALIAS_SET_P (target)
6162 && TREE_CODE (type) == ARRAY_TYPE
6163 && TYPE_NONALIASED_COMPONENT (type))
6165 target = copy_rtx (target);
6166 MEM_KEEP_ALIAS_SET_P (target) = 1;
6169 store_constructor_field
6170 (target, bitsize, bitpos, mode, value, cleared,
6171 get_alias_set (elttype));
6174 else
6176 rtx loop_start = gen_label_rtx ();
6177 rtx loop_end = gen_label_rtx ();
6178 tree exit_cond;
6180 expand_normal (hi_index);
6182 index = build_decl (EXPR_LOCATION (exp),
6183 VAR_DECL, NULL_TREE, domain);
6184 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6185 SET_DECL_RTL (index, index_r);
6186 store_expr (lo_index, index_r, 0, false);
6188 /* Build the head of the loop. */
6189 do_pending_stack_adjust ();
6190 emit_label (loop_start);
6192 /* Assign value to element index. */
6193 position =
6194 fold_convert (ssizetype,
6195 fold_build2 (MINUS_EXPR,
6196 TREE_TYPE (index),
6197 index,
6198 TYPE_MIN_VALUE (domain)));
6200 position =
6201 size_binop (MULT_EXPR, position,
6202 fold_convert (ssizetype,
6203 TYPE_SIZE_UNIT (elttype)));
6205 pos_rtx = expand_normal (position);
6206 xtarget = offset_address (target, pos_rtx,
6207 highest_pow2_factor (position));
6208 xtarget = adjust_address (xtarget, mode, 0);
6209 if (TREE_CODE (value) == CONSTRUCTOR)
6210 store_constructor (value, xtarget, cleared,
6211 bitsize / BITS_PER_UNIT);
6212 else
6213 store_expr (value, xtarget, 0, false);
6215 /* Generate a conditional jump to exit the loop. */
6216 exit_cond = build2 (LT_EXPR, integer_type_node,
6217 index, hi_index);
6218 jumpif (exit_cond, loop_end, -1);
6220 /* Update the loop counter, and jump to the head of
6221 the loop. */
6222 expand_assignment (index,
6223 build2 (PLUS_EXPR, TREE_TYPE (index),
6224 index, integer_one_node),
6225 false);
6227 emit_jump (loop_start);
6229 /* Build the end of the loop. */
6230 emit_label (loop_end);
6233 else if ((index != 0 && ! tree_fits_shwi_p (index))
6234 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6236 tree position;
6238 if (index == 0)
6239 index = ssize_int (1);
6241 if (minelt)
6242 index = fold_convert (ssizetype,
6243 fold_build2 (MINUS_EXPR,
6244 TREE_TYPE (index),
6245 index,
6246 TYPE_MIN_VALUE (domain)));
6248 position =
6249 size_binop (MULT_EXPR, index,
6250 fold_convert (ssizetype,
6251 TYPE_SIZE_UNIT (elttype)));
6252 xtarget = offset_address (target,
6253 expand_normal (position),
6254 highest_pow2_factor (position));
6255 xtarget = adjust_address (xtarget, mode, 0);
6256 store_expr (value, xtarget, 0, false);
6258 else
6260 if (index != 0)
6261 bitpos = ((tree_to_shwi (index) - minelt)
6262 * tree_to_uhwi (TYPE_SIZE (elttype)));
6263 else
6264 bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6266 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6267 && TREE_CODE (type) == ARRAY_TYPE
6268 && TYPE_NONALIASED_COMPONENT (type))
6270 target = copy_rtx (target);
6271 MEM_KEEP_ALIAS_SET_P (target) = 1;
6273 store_constructor_field (target, bitsize, bitpos, mode, value,
6274 cleared, get_alias_set (elttype));
6277 break;
6280 case VECTOR_TYPE:
6282 unsigned HOST_WIDE_INT idx;
6283 constructor_elt *ce;
6284 int i;
6285 int need_to_clear;
6286 int icode = CODE_FOR_nothing;
6287 tree elttype = TREE_TYPE (type);
6288 int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6289 enum machine_mode eltmode = TYPE_MODE (elttype);
6290 HOST_WIDE_INT bitsize;
6291 HOST_WIDE_INT bitpos;
6292 rtvec vector = NULL;
6293 unsigned n_elts;
6294 alias_set_type alias;
6296 gcc_assert (eltmode != BLKmode);
6298 n_elts = TYPE_VECTOR_SUBPARTS (type);
6299 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6301 enum machine_mode mode = GET_MODE (target);
6303 icode = (int) optab_handler (vec_init_optab, mode);
6304 /* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
6305 if (icode != CODE_FOR_nothing)
6307 tree value;
6309 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6310 if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
6312 icode = CODE_FOR_nothing;
6313 break;
6316 if (icode != CODE_FOR_nothing)
6318 unsigned int i;
6320 vector = rtvec_alloc (n_elts);
6321 for (i = 0; i < n_elts; i++)
6322 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6326 /* If the constructor has fewer elements than the vector,
6327 clear the whole array first. Similarly if this is static
6328 constructor of a non-BLKmode object. */
6329 if (cleared)
6330 need_to_clear = 0;
6331 else if (REG_P (target) && TREE_STATIC (exp))
6332 need_to_clear = 1;
6333 else
6335 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6336 tree value;
6338 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6340 int n_elts_here = tree_to_uhwi
6341 (int_const_binop (TRUNC_DIV_EXPR,
6342 TYPE_SIZE (TREE_TYPE (value)),
6343 TYPE_SIZE (elttype)));
6345 count += n_elts_here;
6346 if (mostly_zeros_p (value))
6347 zero_count += n_elts_here;
6350 /* Clear the entire vector first if there are any missing elements,
6351 or if the incidence of zero elements is >= 75%. */
6352 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6355 if (need_to_clear && size > 0 && !vector)
6357 if (REG_P (target))
6358 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6359 else
6360 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6361 cleared = 1;
6364 /* Inform later passes that the old value is dead. */
6365 if (!cleared && !vector && REG_P (target))
6366 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6368 if (MEM_P (target))
6369 alias = MEM_ALIAS_SET (target);
6370 else
6371 alias = get_alias_set (elttype);
6373 /* Store each element of the constructor into the corresponding
6374 element of TARGET, determined by counting the elements. */
6375 for (idx = 0, i = 0;
6376 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6377 idx++, i += bitsize / elt_size)
6379 HOST_WIDE_INT eltpos;
6380 tree value = ce->value;
6382 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6383 if (cleared && initializer_zerop (value))
6384 continue;
6386 if (ce->index)
6387 eltpos = tree_to_uhwi (ce->index);
6388 else
6389 eltpos = i;
6391 if (vector)
6393 /* vec_init<mode> should not be used if there are VECTOR_TYPE
6394 elements. */
6395 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6396 RTVEC_ELT (vector, eltpos)
6397 = expand_normal (value);
6399 else
6401 enum machine_mode value_mode =
6402 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6403 ? TYPE_MODE (TREE_TYPE (value))
6404 : eltmode;
6405 bitpos = eltpos * elt_size;
6406 store_constructor_field (target, bitsize, bitpos, value_mode,
6407 value, cleared, alias);
6411 if (vector)
6412 emit_insn (GEN_FCN (icode)
6413 (target,
6414 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6415 break;
6418 default:
6419 gcc_unreachable ();
6423 /* Store the value of EXP (an expression tree)
6424 into a subfield of TARGET which has mode MODE and occupies
6425 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6426 If MODE is VOIDmode, it means that we are storing into a bit-field.
6428 BITREGION_START is bitpos of the first bitfield in this region.
6429 BITREGION_END is the bitpos of the ending bitfield in this region.
6430 These two fields are 0, if the C++ memory model does not apply,
6431 or we are not interested in keeping track of bitfield regions.
6433 Always return const0_rtx unless we have something particular to
6434 return.
6436 ALIAS_SET is the alias set for the destination. This value will
6437 (in general) be different from that for TARGET, since TARGET is a
6438 reference to the containing structure.
6440 If NONTEMPORAL is true, try generating a nontemporal store. */
6442 static rtx
6443 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6444 unsigned HOST_WIDE_INT bitregion_start,
6445 unsigned HOST_WIDE_INT bitregion_end,
6446 enum machine_mode mode, tree exp,
6447 alias_set_type alias_set, bool nontemporal)
6449 if (TREE_CODE (exp) == ERROR_MARK)
6450 return const0_rtx;
6452 /* If we have nothing to store, do nothing unless the expression has
6453 side-effects. */
6454 if (bitsize == 0)
6455 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6457 if (GET_CODE (target) == CONCAT)
6459 /* We're storing into a struct containing a single __complex. */
6461 gcc_assert (!bitpos);
6462 return store_expr (exp, target, 0, nontemporal);
6465 /* If the structure is in a register or if the component
6466 is a bit field, we cannot use addressing to access it.
6467 Use bit-field techniques or SUBREG to store in it. */
6469 if (mode == VOIDmode
6470 || (mode != BLKmode && ! direct_store[(int) mode]
6471 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6472 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6473 || REG_P (target)
6474 || GET_CODE (target) == SUBREG
6475 /* If the field isn't aligned enough to store as an ordinary memref,
6476 store it as a bit field. */
6477 || (mode != BLKmode
6478 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6479 || bitpos % GET_MODE_ALIGNMENT (mode))
6480 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6481 || (bitpos % BITS_PER_UNIT != 0)))
6482 || (bitsize >= 0 && mode != BLKmode
6483 && GET_MODE_BITSIZE (mode) > bitsize)
6484 /* If the RHS and field are a constant size and the size of the
6485 RHS isn't the same size as the bitfield, we must use bitfield
6486 operations. */
6487 || (bitsize >= 0
6488 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6489 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6490 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6491 decl we must use bitfield operations. */
6492 || (bitsize >= 0
6493 && TREE_CODE (exp) == MEM_REF
6494 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6495 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6496 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6497 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6499 rtx temp;
6500 gimple nop_def;
6502 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6503 implies a mask operation. If the precision is the same size as
6504 the field we're storing into, that mask is redundant. This is
6505 particularly common with bit field assignments generated by the
6506 C front end. */
6507 nop_def = get_def_for_expr (exp, NOP_EXPR);
6508 if (nop_def)
6510 tree type = TREE_TYPE (exp);
6511 if (INTEGRAL_TYPE_P (type)
6512 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6513 && bitsize == TYPE_PRECISION (type))
6515 tree op = gimple_assign_rhs1 (nop_def);
6516 type = TREE_TYPE (op);
6517 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6518 exp = op;
6522 temp = expand_normal (exp);
6524 /* If BITSIZE is narrower than the size of the type of EXP
6525 we will be narrowing TEMP. Normally, what's wanted are the
6526 low-order bits. However, if EXP's type is a record and this is
6527 big-endian machine, we want the upper BITSIZE bits. */
6528 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6529 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6530 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6531 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6532 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6533 NULL_RTX, 1);
6535 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6536 if (mode != VOIDmode && mode != BLKmode
6537 && mode != TYPE_MODE (TREE_TYPE (exp)))
6538 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6540 /* If the modes of TEMP and TARGET are both BLKmode, both
6541 must be in memory and BITPOS must be aligned on a byte
6542 boundary. If so, we simply do a block copy. Likewise
6543 for a BLKmode-like TARGET. */
6544 if (GET_MODE (temp) == BLKmode
6545 && (GET_MODE (target) == BLKmode
6546 || (MEM_P (target)
6547 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6548 && (bitpos % BITS_PER_UNIT) == 0
6549 && (bitsize % BITS_PER_UNIT) == 0)))
6551 gcc_assert (MEM_P (target) && MEM_P (temp)
6552 && (bitpos % BITS_PER_UNIT) == 0);
6554 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6555 emit_block_move (target, temp,
6556 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6557 / BITS_PER_UNIT),
6558 BLOCK_OP_NORMAL);
6560 return const0_rtx;
6563 /* Handle calls that return values in multiple non-contiguous locations.
6564 The Irix 6 ABI has examples of this. */
6565 if (GET_CODE (temp) == PARALLEL)
6567 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6568 rtx temp_target;
6569 if (mode == BLKmode)
6570 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6571 temp_target = gen_reg_rtx (mode);
6572 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6573 temp = temp_target;
6575 else if (mode == BLKmode)
6577 /* Handle calls that return BLKmode values in registers. */
6578 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6580 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6581 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6582 temp = temp_target;
6584 else
6586 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6587 rtx temp_target;
6588 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6589 temp_target = gen_reg_rtx (mode);
6590 temp_target
6591 = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
6592 temp_target, mode, mode);
6593 temp = temp_target;
6597 /* Store the value in the bitfield. */
6598 store_bit_field (target, bitsize, bitpos,
6599 bitregion_start, bitregion_end,
6600 mode, temp);
6602 return const0_rtx;
6604 else
6606 /* Now build a reference to just the desired component. */
6607 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6609 if (to_rtx == target)
6610 to_rtx = copy_rtx (to_rtx);
6612 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6613 set_mem_alias_set (to_rtx, alias_set);
6615 return store_expr (exp, to_rtx, 0, nontemporal);
6619 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6620 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6621 codes and find the ultimate containing object, which we return.
6623 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6624 bit position, and *PUNSIGNEDP to the signedness of the field.
6625 If the position of the field is variable, we store a tree
6626 giving the variable offset (in units) in *POFFSET.
6627 This offset is in addition to the bit position.
6628 If the position is not variable, we store 0 in *POFFSET.
6630 If any of the extraction expressions is volatile,
6631 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6633 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6634 Otherwise, it is a mode that can be used to access the field.
6636 If the field describes a variable-sized object, *PMODE is set to
6637 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6638 this case, but the address of the object can be found.
6640 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6641 look through nodes that serve as markers of a greater alignment than
6642 the one that can be deduced from the expression. These nodes make it
6643 possible for front-ends to prevent temporaries from being created by
6644 the middle-end on alignment considerations. For that purpose, the
6645 normal operating mode at high-level is to always pass FALSE so that
6646 the ultimate containing object is really returned; moreover, the
6647 associated predicate handled_component_p will always return TRUE
6648 on these nodes, thus indicating that they are essentially handled
6649 by get_inner_reference. TRUE should only be passed when the caller
6650 is scanning the expression in order to build another representation
6651 and specifically knows how to handle these nodes; as such, this is
6652 the normal operating mode in the RTL expanders. */
6654 tree
6655 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6656 HOST_WIDE_INT *pbitpos, tree *poffset,
6657 enum machine_mode *pmode, int *punsignedp,
6658 int *pvolatilep, bool keep_aligning)
6660 tree size_tree = 0;
6661 enum machine_mode mode = VOIDmode;
6662 bool blkmode_bitfield = false;
6663 tree offset = size_zero_node;
6664 offset_int bit_offset = 0;
6666 /* First get the mode, signedness, and size. We do this from just the
6667 outermost expression. */
6668 *pbitsize = -1;
6669 if (TREE_CODE (exp) == COMPONENT_REF)
6671 tree field = TREE_OPERAND (exp, 1);
6672 size_tree = DECL_SIZE (field);
6673 if (flag_strict_volatile_bitfields > 0
6674 && TREE_THIS_VOLATILE (exp)
6675 && DECL_BIT_FIELD_TYPE (field)
6676 && DECL_MODE (field) != BLKmode)
6677 /* Volatile bitfields should be accessed in the mode of the
6678 field's type, not the mode computed based on the bit
6679 size. */
6680 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6681 else if (!DECL_BIT_FIELD (field))
6682 mode = DECL_MODE (field);
6683 else if (DECL_MODE (field) == BLKmode)
6684 blkmode_bitfield = true;
6686 *punsignedp = DECL_UNSIGNED (field);
6688 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6690 size_tree = TREE_OPERAND (exp, 1);
6691 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6692 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6694 /* For vector types, with the correct size of access, use the mode of
6695 inner type. */
6696 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6697 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6698 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6699 mode = TYPE_MODE (TREE_TYPE (exp));
6701 else
6703 mode = TYPE_MODE (TREE_TYPE (exp));
6704 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6706 if (mode == BLKmode)
6707 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6708 else
6709 *pbitsize = GET_MODE_BITSIZE (mode);
6712 if (size_tree != 0)
6714 if (! tree_fits_uhwi_p (size_tree))
6715 mode = BLKmode, *pbitsize = -1;
6716 else
6717 *pbitsize = tree_to_uhwi (size_tree);
6720 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6721 and find the ultimate containing object. */
6722 while (1)
6724 switch (TREE_CODE (exp))
6726 case BIT_FIELD_REF:
6727 bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
6728 break;
6730 case COMPONENT_REF:
6732 tree field = TREE_OPERAND (exp, 1);
6733 tree this_offset = component_ref_field_offset (exp);
6735 /* If this field hasn't been filled in yet, don't go past it.
6736 This should only happen when folding expressions made during
6737 type construction. */
6738 if (this_offset == 0)
6739 break;
6741 offset = size_binop (PLUS_EXPR, offset, this_offset);
6742 bit_offset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
6744 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6746 break;
6748 case ARRAY_REF:
6749 case ARRAY_RANGE_REF:
6751 tree index = TREE_OPERAND (exp, 1);
6752 tree low_bound = array_ref_low_bound (exp);
6753 tree unit_size = array_ref_element_size (exp);
6755 /* We assume all arrays have sizes that are a multiple of a byte.
6756 First subtract the lower bound, if any, in the type of the
6757 index, then convert to sizetype and multiply by the size of
6758 the array element. */
6759 if (! integer_zerop (low_bound))
6760 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6761 index, low_bound);
6763 offset = size_binop (PLUS_EXPR, offset,
6764 size_binop (MULT_EXPR,
6765 fold_convert (sizetype, index),
6766 unit_size));
6768 break;
6770 case REALPART_EXPR:
6771 break;
6773 case IMAGPART_EXPR:
6774 bit_offset += *pbitsize;
6775 break;
6777 case VIEW_CONVERT_EXPR:
6778 if (keep_aligning && STRICT_ALIGNMENT
6779 && (TYPE_ALIGN (TREE_TYPE (exp))
6780 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6781 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6782 < BIGGEST_ALIGNMENT)
6783 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6784 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6785 goto done;
6786 break;
6788 case MEM_REF:
6789 /* Hand back the decl for MEM[&decl, off]. */
6790 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6792 tree off = TREE_OPERAND (exp, 1);
6793 if (!integer_zerop (off))
6795 offset_int boff, coff = mem_ref_offset (exp);
6796 boff = wi::lshift (coff, LOG2_BITS_PER_UNIT);
6797 bit_offset += boff;
6799 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6801 goto done;
6803 default:
6804 goto done;
6807 /* If any reference in the chain is volatile, the effect is volatile. */
6808 if (TREE_THIS_VOLATILE (exp))
6809 *pvolatilep = 1;
6811 exp = TREE_OPERAND (exp, 0);
6813 done:
6815 /* If OFFSET is constant, see if we can return the whole thing as a
6816 constant bit position. Make sure to handle overflow during
6817 this conversion. */
6818 if (TREE_CODE (offset) == INTEGER_CST)
6820 offset_int tem = wi::sext (wi::to_offset (offset),
6821 TYPE_PRECISION (sizetype));
6822 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
6823 tem += bit_offset;
6824 if (wi::fits_shwi_p (tem))
6826 *pbitpos = tem.to_shwi ();
6827 *poffset = offset = NULL_TREE;
6831 /* Otherwise, split it up. */
6832 if (offset)
6834 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6835 if (wi::neg_p (bit_offset))
6837 offset_int mask = wi::mask <offset_int> (LOG2_BITS_PER_UNIT, false);
6838 offset_int tem = bit_offset.and_not (mask);
6839 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6840 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6841 bit_offset -= tem;
6842 tem = wi::arshift (tem, LOG2_BITS_PER_UNIT);
6843 offset = size_binop (PLUS_EXPR, offset,
6844 wide_int_to_tree (sizetype, tem));
6847 *pbitpos = bit_offset.to_shwi ();
6848 *poffset = offset;
6851 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6852 if (mode == VOIDmode
6853 && blkmode_bitfield
6854 && (*pbitpos % BITS_PER_UNIT) == 0
6855 && (*pbitsize % BITS_PER_UNIT) == 0)
6856 *pmode = BLKmode;
6857 else
6858 *pmode = mode;
6860 return exp;
6863 /* Return a tree of sizetype representing the size, in bytes, of the element
6864 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6866 tree
6867 array_ref_element_size (tree exp)
6869 tree aligned_size = TREE_OPERAND (exp, 3);
6870 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6871 location_t loc = EXPR_LOCATION (exp);
6873 /* If a size was specified in the ARRAY_REF, it's the size measured
6874 in alignment units of the element type. So multiply by that value. */
6875 if (aligned_size)
6877 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6878 sizetype from another type of the same width and signedness. */
6879 if (TREE_TYPE (aligned_size) != sizetype)
6880 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6881 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6882 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6885 /* Otherwise, take the size from that of the element type. Substitute
6886 any PLACEHOLDER_EXPR that we have. */
6887 else
6888 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6891 /* Return a tree representing the lower bound of the array mentioned in
6892 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6894 tree
6895 array_ref_low_bound (tree exp)
6897 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6899 /* If a lower bound is specified in EXP, use it. */
6900 if (TREE_OPERAND (exp, 2))
6901 return TREE_OPERAND (exp, 2);
6903 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6904 substituting for a PLACEHOLDER_EXPR as needed. */
6905 if (domain_type && TYPE_MIN_VALUE (domain_type))
6906 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6908 /* Otherwise, return a zero of the appropriate type. */
6909 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6912 /* Returns true if REF is an array reference to an array at the end of
6913 a structure. If this is the case, the array may be allocated larger
6914 than its upper bound implies. */
6916 bool
6917 array_at_struct_end_p (tree ref)
6919 if (TREE_CODE (ref) != ARRAY_REF
6920 && TREE_CODE (ref) != ARRAY_RANGE_REF)
6921 return false;
6923 while (handled_component_p (ref))
6925 /* If the reference chain contains a component reference to a
6926 non-union type and there follows another field the reference
6927 is not at the end of a structure. */
6928 if (TREE_CODE (ref) == COMPONENT_REF
6929 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
6931 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
6932 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
6933 nextf = DECL_CHAIN (nextf);
6934 if (nextf)
6935 return false;
6938 ref = TREE_OPERAND (ref, 0);
6941 /* If the reference is based on a declared entity, the size of the array
6942 is constrained by its given domain. */
6943 if (DECL_P (ref))
6944 return false;
6946 return true;
6949 /* Return a tree representing the upper bound of the array mentioned in
6950 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6952 tree
6953 array_ref_up_bound (tree exp)
6955 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6957 /* If there is a domain type and it has an upper bound, use it, substituting
6958 for a PLACEHOLDER_EXPR as needed. */
6959 if (domain_type && TYPE_MAX_VALUE (domain_type))
6960 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6962 /* Otherwise fail. */
6963 return NULL_TREE;
6966 /* Return a tree representing the offset, in bytes, of the field referenced
6967 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6969 tree
6970 component_ref_field_offset (tree exp)
6972 tree aligned_offset = TREE_OPERAND (exp, 2);
6973 tree field = TREE_OPERAND (exp, 1);
6974 location_t loc = EXPR_LOCATION (exp);
6976 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6977 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6978 value. */
6979 if (aligned_offset)
6981 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6982 sizetype from another type of the same width and signedness. */
6983 if (TREE_TYPE (aligned_offset) != sizetype)
6984 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
6985 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
6986 size_int (DECL_OFFSET_ALIGN (field)
6987 / BITS_PER_UNIT));
6990 /* Otherwise, take the offset from that of the field. Substitute
6991 any PLACEHOLDER_EXPR that we have. */
6992 else
6993 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6996 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6998 static unsigned HOST_WIDE_INT
6999 target_align (const_tree target)
7001 /* We might have a chain of nested references with intermediate misaligning
7002 bitfields components, so need to recurse to find out. */
7004 unsigned HOST_WIDE_INT this_align, outer_align;
7006 switch (TREE_CODE (target))
7008 case BIT_FIELD_REF:
7009 return 1;
7011 case COMPONENT_REF:
7012 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7013 outer_align = target_align (TREE_OPERAND (target, 0));
7014 return MIN (this_align, outer_align);
7016 case ARRAY_REF:
7017 case ARRAY_RANGE_REF:
7018 this_align = TYPE_ALIGN (TREE_TYPE (target));
7019 outer_align = target_align (TREE_OPERAND (target, 0));
7020 return MIN (this_align, outer_align);
7022 CASE_CONVERT:
7023 case NON_LVALUE_EXPR:
7024 case VIEW_CONVERT_EXPR:
7025 this_align = TYPE_ALIGN (TREE_TYPE (target));
7026 outer_align = target_align (TREE_OPERAND (target, 0));
7027 return MAX (this_align, outer_align);
7029 default:
7030 return TYPE_ALIGN (TREE_TYPE (target));
7035 /* Given an rtx VALUE that may contain additions and multiplications, return
7036 an equivalent value that just refers to a register, memory, or constant.
7037 This is done by generating instructions to perform the arithmetic and
7038 returning a pseudo-register containing the value.
7040 The returned value may be a REG, SUBREG, MEM or constant. */
7043 force_operand (rtx value, rtx target)
7045 rtx op1, op2;
7046 /* Use subtarget as the target for operand 0 of a binary operation. */
7047 rtx subtarget = get_subtarget (target);
7048 enum rtx_code code = GET_CODE (value);
7050 /* Check for subreg applied to an expression produced by loop optimizer. */
7051 if (code == SUBREG
7052 && !REG_P (SUBREG_REG (value))
7053 && !MEM_P (SUBREG_REG (value)))
7055 value
7056 = simplify_gen_subreg (GET_MODE (value),
7057 force_reg (GET_MODE (SUBREG_REG (value)),
7058 force_operand (SUBREG_REG (value),
7059 NULL_RTX)),
7060 GET_MODE (SUBREG_REG (value)),
7061 SUBREG_BYTE (value));
7062 code = GET_CODE (value);
7065 /* Check for a PIC address load. */
7066 if ((code == PLUS || code == MINUS)
7067 && XEXP (value, 0) == pic_offset_table_rtx
7068 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7069 || GET_CODE (XEXP (value, 1)) == LABEL_REF
7070 || GET_CODE (XEXP (value, 1)) == CONST))
7072 if (!subtarget)
7073 subtarget = gen_reg_rtx (GET_MODE (value));
7074 emit_move_insn (subtarget, value);
7075 return subtarget;
7078 if (ARITHMETIC_P (value))
7080 op2 = XEXP (value, 1);
7081 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7082 subtarget = 0;
7083 if (code == MINUS && CONST_INT_P (op2))
7085 code = PLUS;
7086 op2 = negate_rtx (GET_MODE (value), op2);
7089 /* Check for an addition with OP2 a constant integer and our first
7090 operand a PLUS of a virtual register and something else. In that
7091 case, we want to emit the sum of the virtual register and the
7092 constant first and then add the other value. This allows virtual
7093 register instantiation to simply modify the constant rather than
7094 creating another one around this addition. */
7095 if (code == PLUS && CONST_INT_P (op2)
7096 && GET_CODE (XEXP (value, 0)) == PLUS
7097 && REG_P (XEXP (XEXP (value, 0), 0))
7098 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7099 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7101 rtx temp = expand_simple_binop (GET_MODE (value), code,
7102 XEXP (XEXP (value, 0), 0), op2,
7103 subtarget, 0, OPTAB_LIB_WIDEN);
7104 return expand_simple_binop (GET_MODE (value), code, temp,
7105 force_operand (XEXP (XEXP (value,
7106 0), 1), 0),
7107 target, 0, OPTAB_LIB_WIDEN);
7110 op1 = force_operand (XEXP (value, 0), subtarget);
7111 op2 = force_operand (op2, NULL_RTX);
7112 switch (code)
7114 case MULT:
7115 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7116 case DIV:
7117 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7118 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7119 target, 1, OPTAB_LIB_WIDEN);
7120 else
7121 return expand_divmod (0,
7122 FLOAT_MODE_P (GET_MODE (value))
7123 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7124 GET_MODE (value), op1, op2, target, 0);
7125 case MOD:
7126 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7127 target, 0);
7128 case UDIV:
7129 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7130 target, 1);
7131 case UMOD:
7132 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7133 target, 1);
7134 case ASHIFTRT:
7135 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7136 target, 0, OPTAB_LIB_WIDEN);
7137 default:
7138 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7139 target, 1, OPTAB_LIB_WIDEN);
7142 if (UNARY_P (value))
7144 if (!target)
7145 target = gen_reg_rtx (GET_MODE (value));
7146 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7147 switch (code)
7149 case ZERO_EXTEND:
7150 case SIGN_EXTEND:
7151 case TRUNCATE:
7152 case FLOAT_EXTEND:
7153 case FLOAT_TRUNCATE:
7154 convert_move (target, op1, code == ZERO_EXTEND);
7155 return target;
7157 case FIX:
7158 case UNSIGNED_FIX:
7159 expand_fix (target, op1, code == UNSIGNED_FIX);
7160 return target;
7162 case FLOAT:
7163 case UNSIGNED_FLOAT:
7164 expand_float (target, op1, code == UNSIGNED_FLOAT);
7165 return target;
7167 default:
7168 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7172 #ifdef INSN_SCHEDULING
7173 /* On machines that have insn scheduling, we want all memory reference to be
7174 explicit, so we need to deal with such paradoxical SUBREGs. */
7175 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7176 value
7177 = simplify_gen_subreg (GET_MODE (value),
7178 force_reg (GET_MODE (SUBREG_REG (value)),
7179 force_operand (SUBREG_REG (value),
7180 NULL_RTX)),
7181 GET_MODE (SUBREG_REG (value)),
7182 SUBREG_BYTE (value));
7183 #endif
7185 return value;
7188 /* Subroutine of expand_expr: return nonzero iff there is no way that
7189 EXP can reference X, which is being modified. TOP_P is nonzero if this
7190 call is going to be used to determine whether we need a temporary
7191 for EXP, as opposed to a recursive call to this function.
7193 It is always safe for this routine to return zero since it merely
7194 searches for optimization opportunities. */
7197 safe_from_p (const_rtx x, tree exp, int top_p)
7199 rtx exp_rtl = 0;
7200 int i, nops;
7202 if (x == 0
7203 /* If EXP has varying size, we MUST use a target since we currently
7204 have no way of allocating temporaries of variable size
7205 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7206 So we assume here that something at a higher level has prevented a
7207 clash. This is somewhat bogus, but the best we can do. Only
7208 do this when X is BLKmode and when we are at the top level. */
7209 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7210 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7211 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7212 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7213 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7214 != INTEGER_CST)
7215 && GET_MODE (x) == BLKmode)
7216 /* If X is in the outgoing argument area, it is always safe. */
7217 || (MEM_P (x)
7218 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7219 || (GET_CODE (XEXP (x, 0)) == PLUS
7220 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7221 return 1;
7223 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7224 find the underlying pseudo. */
7225 if (GET_CODE (x) == SUBREG)
7227 x = SUBREG_REG (x);
7228 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7229 return 0;
7232 /* Now look at our tree code and possibly recurse. */
7233 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7235 case tcc_declaration:
7236 exp_rtl = DECL_RTL_IF_SET (exp);
7237 break;
7239 case tcc_constant:
7240 return 1;
7242 case tcc_exceptional:
7243 if (TREE_CODE (exp) == TREE_LIST)
7245 while (1)
7247 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7248 return 0;
7249 exp = TREE_CHAIN (exp);
7250 if (!exp)
7251 return 1;
7252 if (TREE_CODE (exp) != TREE_LIST)
7253 return safe_from_p (x, exp, 0);
7256 else if (TREE_CODE (exp) == CONSTRUCTOR)
7258 constructor_elt *ce;
7259 unsigned HOST_WIDE_INT idx;
7261 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7262 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7263 || !safe_from_p (x, ce->value, 0))
7264 return 0;
7265 return 1;
7267 else if (TREE_CODE (exp) == ERROR_MARK)
7268 return 1; /* An already-visited SAVE_EXPR? */
7269 else
7270 return 0;
7272 case tcc_statement:
7273 /* The only case we look at here is the DECL_INITIAL inside a
7274 DECL_EXPR. */
7275 return (TREE_CODE (exp) != DECL_EXPR
7276 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7277 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7278 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7280 case tcc_binary:
7281 case tcc_comparison:
7282 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7283 return 0;
7284 /* Fall through. */
7286 case tcc_unary:
7287 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7289 case tcc_expression:
7290 case tcc_reference:
7291 case tcc_vl_exp:
7292 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7293 the expression. If it is set, we conflict iff we are that rtx or
7294 both are in memory. Otherwise, we check all operands of the
7295 expression recursively. */
7297 switch (TREE_CODE (exp))
7299 case ADDR_EXPR:
7300 /* If the operand is static or we are static, we can't conflict.
7301 Likewise if we don't conflict with the operand at all. */
7302 if (staticp (TREE_OPERAND (exp, 0))
7303 || TREE_STATIC (exp)
7304 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7305 return 1;
7307 /* Otherwise, the only way this can conflict is if we are taking
7308 the address of a DECL a that address if part of X, which is
7309 very rare. */
7310 exp = TREE_OPERAND (exp, 0);
7311 if (DECL_P (exp))
7313 if (!DECL_RTL_SET_P (exp)
7314 || !MEM_P (DECL_RTL (exp)))
7315 return 0;
7316 else
7317 exp_rtl = XEXP (DECL_RTL (exp), 0);
7319 break;
7321 case MEM_REF:
7322 if (MEM_P (x)
7323 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7324 get_alias_set (exp)))
7325 return 0;
7326 break;
7328 case CALL_EXPR:
7329 /* Assume that the call will clobber all hard registers and
7330 all of memory. */
7331 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7332 || MEM_P (x))
7333 return 0;
7334 break;
7336 case WITH_CLEANUP_EXPR:
7337 case CLEANUP_POINT_EXPR:
7338 /* Lowered by gimplify.c. */
7339 gcc_unreachable ();
7341 case SAVE_EXPR:
7342 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7344 default:
7345 break;
7348 /* If we have an rtx, we do not need to scan our operands. */
7349 if (exp_rtl)
7350 break;
7352 nops = TREE_OPERAND_LENGTH (exp);
7353 for (i = 0; i < nops; i++)
7354 if (TREE_OPERAND (exp, i) != 0
7355 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7356 return 0;
7358 break;
7360 case tcc_type:
7361 /* Should never get a type here. */
7362 gcc_unreachable ();
7365 /* If we have an rtl, find any enclosed object. Then see if we conflict
7366 with it. */
7367 if (exp_rtl)
7369 if (GET_CODE (exp_rtl) == SUBREG)
7371 exp_rtl = SUBREG_REG (exp_rtl);
7372 if (REG_P (exp_rtl)
7373 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7374 return 0;
7377 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7378 are memory and they conflict. */
7379 return ! (rtx_equal_p (x, exp_rtl)
7380 || (MEM_P (x) && MEM_P (exp_rtl)
7381 && true_dependence (exp_rtl, VOIDmode, x)));
7384 /* If we reach here, it is safe. */
7385 return 1;
7389 /* Return the highest power of two that EXP is known to be a multiple of.
7390 This is used in updating alignment of MEMs in array references. */
7392 unsigned HOST_WIDE_INT
7393 highest_pow2_factor (const_tree exp)
7395 unsigned HOST_WIDE_INT ret;
7396 int trailing_zeros = tree_ctz (exp);
7397 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7398 return BIGGEST_ALIGNMENT;
7399 ret = (unsigned HOST_WIDE_INT) 1 << trailing_zeros;
7400 if (ret > BIGGEST_ALIGNMENT)
7401 return BIGGEST_ALIGNMENT;
7402 return ret;
7405 /* Similar, except that the alignment requirements of TARGET are
7406 taken into account. Assume it is at least as aligned as its
7407 type, unless it is a COMPONENT_REF in which case the layout of
7408 the structure gives the alignment. */
7410 static unsigned HOST_WIDE_INT
7411 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7413 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7414 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7416 return MAX (factor, talign);
7419 #ifdef HAVE_conditional_move
7420 /* Convert the tree comparison code TCODE to the rtl one where the
7421 signedness is UNSIGNEDP. */
7423 static enum rtx_code
7424 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7426 enum rtx_code code;
7427 switch (tcode)
7429 case EQ_EXPR:
7430 code = EQ;
7431 break;
7432 case NE_EXPR:
7433 code = NE;
7434 break;
7435 case LT_EXPR:
7436 code = unsignedp ? LTU : LT;
7437 break;
7438 case LE_EXPR:
7439 code = unsignedp ? LEU : LE;
7440 break;
7441 case GT_EXPR:
7442 code = unsignedp ? GTU : GT;
7443 break;
7444 case GE_EXPR:
7445 code = unsignedp ? GEU : GE;
7446 break;
7447 case UNORDERED_EXPR:
7448 code = UNORDERED;
7449 break;
7450 case ORDERED_EXPR:
7451 code = ORDERED;
7452 break;
7453 case UNLT_EXPR:
7454 code = UNLT;
7455 break;
7456 case UNLE_EXPR:
7457 code = UNLE;
7458 break;
7459 case UNGT_EXPR:
7460 code = UNGT;
7461 break;
7462 case UNGE_EXPR:
7463 code = UNGE;
7464 break;
7465 case UNEQ_EXPR:
7466 code = UNEQ;
7467 break;
7468 case LTGT_EXPR:
7469 code = LTGT;
7470 break;
7472 default:
7473 gcc_unreachable ();
7475 return code;
7477 #endif
7479 /* Subroutine of expand_expr. Expand the two operands of a binary
7480 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7481 The value may be stored in TARGET if TARGET is nonzero. The
7482 MODIFIER argument is as documented by expand_expr. */
7484 static void
7485 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7486 enum expand_modifier modifier)
7488 if (! safe_from_p (target, exp1, 1))
7489 target = 0;
7490 if (operand_equal_p (exp0, exp1, 0))
7492 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7493 *op1 = copy_rtx (*op0);
7495 else
7497 /* If we need to preserve evaluation order, copy exp0 into its own
7498 temporary variable so that it can't be clobbered by exp1. */
7499 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7500 exp0 = save_expr (exp0);
7501 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7502 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7507 /* Return a MEM that contains constant EXP. DEFER is as for
7508 output_constant_def and MODIFIER is as for expand_expr. */
7510 static rtx
7511 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7513 rtx mem;
7515 mem = output_constant_def (exp, defer);
7516 if (modifier != EXPAND_INITIALIZER)
7517 mem = use_anchored_address (mem);
7518 return mem;
7521 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7522 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7524 static rtx
7525 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
7526 enum expand_modifier modifier, addr_space_t as)
7528 rtx result, subtarget;
7529 tree inner, offset;
7530 HOST_WIDE_INT bitsize, bitpos;
7531 int volatilep, unsignedp;
7532 enum machine_mode mode1;
7534 /* If we are taking the address of a constant and are at the top level,
7535 we have to use output_constant_def since we can't call force_const_mem
7536 at top level. */
7537 /* ??? This should be considered a front-end bug. We should not be
7538 generating ADDR_EXPR of something that isn't an LVALUE. The only
7539 exception here is STRING_CST. */
7540 if (CONSTANT_CLASS_P (exp))
7542 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7543 if (modifier < EXPAND_SUM)
7544 result = force_operand (result, target);
7545 return result;
7548 /* Everything must be something allowed by is_gimple_addressable. */
7549 switch (TREE_CODE (exp))
7551 case INDIRECT_REF:
7552 /* This case will happen via recursion for &a->b. */
7553 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7555 case MEM_REF:
7557 tree tem = TREE_OPERAND (exp, 0);
7558 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7559 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7560 return expand_expr (tem, target, tmode, modifier);
7563 case CONST_DECL:
7564 /* Expand the initializer like constants above. */
7565 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7566 0, modifier), 0);
7567 if (modifier < EXPAND_SUM)
7568 result = force_operand (result, target);
7569 return result;
7571 case REALPART_EXPR:
7572 /* The real part of the complex number is always first, therefore
7573 the address is the same as the address of the parent object. */
7574 offset = 0;
7575 bitpos = 0;
7576 inner = TREE_OPERAND (exp, 0);
7577 break;
7579 case IMAGPART_EXPR:
7580 /* The imaginary part of the complex number is always second.
7581 The expression is therefore always offset by the size of the
7582 scalar type. */
7583 offset = 0;
7584 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7585 inner = TREE_OPERAND (exp, 0);
7586 break;
7588 case COMPOUND_LITERAL_EXPR:
7589 /* Allow COMPOUND_LITERAL_EXPR in initializers, if e.g.
7590 rtl_for_decl_init is called on DECL_INITIAL with
7591 COMPOUNT_LITERAL_EXPRs in it, they aren't gimplified. */
7592 if (modifier == EXPAND_INITIALIZER
7593 && COMPOUND_LITERAL_EXPR_DECL (exp))
7594 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7595 target, tmode, modifier, as);
7596 /* FALLTHRU */
7597 default:
7598 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7599 expand_expr, as that can have various side effects; LABEL_DECLs for
7600 example, may not have their DECL_RTL set yet. Expand the rtl of
7601 CONSTRUCTORs too, which should yield a memory reference for the
7602 constructor's contents. Assume language specific tree nodes can
7603 be expanded in some interesting way. */
7604 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7605 if (DECL_P (exp)
7606 || TREE_CODE (exp) == CONSTRUCTOR
7607 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7609 result = expand_expr (exp, target, tmode,
7610 modifier == EXPAND_INITIALIZER
7611 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7613 /* If the DECL isn't in memory, then the DECL wasn't properly
7614 marked TREE_ADDRESSABLE, which will be either a front-end
7615 or a tree optimizer bug. */
7617 if (TREE_ADDRESSABLE (exp)
7618 && ! MEM_P (result)
7619 && ! targetm.calls.allocate_stack_slots_for_args ())
7621 error ("local frame unavailable (naked function?)");
7622 return result;
7624 else
7625 gcc_assert (MEM_P (result));
7626 result = XEXP (result, 0);
7628 /* ??? Is this needed anymore? */
7629 if (DECL_P (exp))
7630 TREE_USED (exp) = 1;
7632 if (modifier != EXPAND_INITIALIZER
7633 && modifier != EXPAND_CONST_ADDRESS
7634 && modifier != EXPAND_SUM)
7635 result = force_operand (result, target);
7636 return result;
7639 /* Pass FALSE as the last argument to get_inner_reference although
7640 we are expanding to RTL. The rationale is that we know how to
7641 handle "aligning nodes" here: we can just bypass them because
7642 they won't change the final object whose address will be returned
7643 (they actually exist only for that purpose). */
7644 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7645 &mode1, &unsignedp, &volatilep, false);
7646 break;
7649 /* We must have made progress. */
7650 gcc_assert (inner != exp);
7652 subtarget = offset || bitpos ? NULL_RTX : target;
7653 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7654 inner alignment, force the inner to be sufficiently aligned. */
7655 if (CONSTANT_CLASS_P (inner)
7656 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7658 inner = copy_node (inner);
7659 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7660 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7661 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7663 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7665 if (offset)
7667 rtx tmp;
7669 if (modifier != EXPAND_NORMAL)
7670 result = force_operand (result, NULL);
7671 tmp = expand_expr (offset, NULL_RTX, tmode,
7672 modifier == EXPAND_INITIALIZER
7673 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7675 /* expand_expr is allowed to return an object in a mode other
7676 than TMODE. If it did, we need to convert. */
7677 if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
7678 tmp = convert_modes (tmode, GET_MODE (tmp),
7679 tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
7680 result = convert_memory_address_addr_space (tmode, result, as);
7681 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7683 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7684 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7685 else
7687 subtarget = bitpos ? NULL_RTX : target;
7688 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7689 1, OPTAB_LIB_WIDEN);
7693 if (bitpos)
7695 /* Someone beforehand should have rejected taking the address
7696 of such an object. */
7697 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7699 result = convert_memory_address_addr_space (tmode, result, as);
7700 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7701 if (modifier < EXPAND_SUM)
7702 result = force_operand (result, target);
7705 return result;
7708 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7709 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7711 static rtx
7712 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
7713 enum expand_modifier modifier)
7715 addr_space_t as = ADDR_SPACE_GENERIC;
7716 enum machine_mode address_mode = Pmode;
7717 enum machine_mode pointer_mode = ptr_mode;
7718 enum machine_mode rmode;
7719 rtx result;
7721 /* Target mode of VOIDmode says "whatever's natural". */
7722 if (tmode == VOIDmode)
7723 tmode = TYPE_MODE (TREE_TYPE (exp));
7725 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7727 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7728 address_mode = targetm.addr_space.address_mode (as);
7729 pointer_mode = targetm.addr_space.pointer_mode (as);
7732 /* We can get called with some Weird Things if the user does silliness
7733 like "(short) &a". In that case, convert_memory_address won't do
7734 the right thing, so ignore the given target mode. */
7735 if (tmode != address_mode && tmode != pointer_mode)
7736 tmode = address_mode;
7738 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7739 tmode, modifier, as);
7741 /* Despite expand_expr claims concerning ignoring TMODE when not
7742 strictly convenient, stuff breaks if we don't honor it. Note
7743 that combined with the above, we only do this for pointer modes. */
7744 rmode = GET_MODE (result);
7745 if (rmode == VOIDmode)
7746 rmode = tmode;
7747 if (rmode != tmode)
7748 result = convert_memory_address_addr_space (tmode, result, as);
7750 return result;
7753 /* Generate code for computing CONSTRUCTOR EXP.
7754 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7755 is TRUE, instead of creating a temporary variable in memory
7756 NULL is returned and the caller needs to handle it differently. */
7758 static rtx
7759 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7760 bool avoid_temp_mem)
7762 tree type = TREE_TYPE (exp);
7763 enum machine_mode mode = TYPE_MODE (type);
7765 /* Try to avoid creating a temporary at all. This is possible
7766 if all of the initializer is zero.
7767 FIXME: try to handle all [0..255] initializers we can handle
7768 with memset. */
7769 if (TREE_STATIC (exp)
7770 && !TREE_ADDRESSABLE (exp)
7771 && target != 0 && mode == BLKmode
7772 && all_zeros_p (exp))
7774 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7775 return target;
7778 /* All elts simple constants => refer to a constant in memory. But
7779 if this is a non-BLKmode mode, let it store a field at a time
7780 since that should make a CONST_INT, CONST_WIDE_INT or
7781 CONST_DOUBLE when we fold. Likewise, if we have a target we can
7782 use, it is best to store directly into the target unless the type
7783 is large enough that memcpy will be used. If we are making an
7784 initializer and all operands are constant, put it in memory as
7785 well.
7787 FIXME: Avoid trying to fill vector constructors piece-meal.
7788 Output them with output_constant_def below unless we're sure
7789 they're zeros. This should go away when vector initializers
7790 are treated like VECTOR_CST instead of arrays. */
7791 if ((TREE_STATIC (exp)
7792 && ((mode == BLKmode
7793 && ! (target != 0 && safe_from_p (target, exp, 1)))
7794 || TREE_ADDRESSABLE (exp)
7795 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
7796 && (! MOVE_BY_PIECES_P
7797 (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
7798 TYPE_ALIGN (type)))
7799 && ! mostly_zeros_p (exp))))
7800 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7801 && TREE_CONSTANT (exp)))
7803 rtx constructor;
7805 if (avoid_temp_mem)
7806 return NULL_RTX;
7808 constructor = expand_expr_constant (exp, 1, modifier);
7810 if (modifier != EXPAND_CONST_ADDRESS
7811 && modifier != EXPAND_INITIALIZER
7812 && modifier != EXPAND_SUM)
7813 constructor = validize_mem (constructor);
7815 return constructor;
7818 /* Handle calls that pass values in multiple non-contiguous
7819 locations. The Irix 6 ABI has examples of this. */
7820 if (target == 0 || ! safe_from_p (target, exp, 1)
7821 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7823 if (avoid_temp_mem)
7824 return NULL_RTX;
7826 target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
7829 store_constructor (exp, target, 0, int_expr_size (exp));
7830 return target;
7834 /* expand_expr: generate code for computing expression EXP.
7835 An rtx for the computed value is returned. The value is never null.
7836 In the case of a void EXP, const0_rtx is returned.
7838 The value may be stored in TARGET if TARGET is nonzero.
7839 TARGET is just a suggestion; callers must assume that
7840 the rtx returned may not be the same as TARGET.
7842 If TARGET is CONST0_RTX, it means that the value will be ignored.
7844 If TMODE is not VOIDmode, it suggests generating the
7845 result in mode TMODE. But this is done only when convenient.
7846 Otherwise, TMODE is ignored and the value generated in its natural mode.
7847 TMODE is just a suggestion; callers must assume that
7848 the rtx returned may not have mode TMODE.
7850 Note that TARGET may have neither TMODE nor MODE. In that case, it
7851 probably will not be used.
7853 If MODIFIER is EXPAND_SUM then when EXP is an addition
7854 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7855 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7856 products as above, or REG or MEM, or constant.
7857 Ordinarily in such cases we would output mul or add instructions
7858 and then return a pseudo reg containing the sum.
7860 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7861 it also marks a label as absolutely required (it can't be dead).
7862 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7863 This is used for outputting expressions used in initializers.
7865 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7866 with a constant address even if that address is not normally legitimate.
7867 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7869 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7870 a call parameter. Such targets require special care as we haven't yet
7871 marked TARGET so that it's safe from being trashed by libcalls. We
7872 don't want to use TARGET for anything but the final result;
7873 Intermediate values must go elsewhere. Additionally, calls to
7874 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7876 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7877 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7878 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7879 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7880 recursively.
7882 If INNER_REFERENCE_P is true, we are expanding an inner reference.
7883 In this case, we don't adjust a returned MEM rtx that wouldn't be
7884 sufficiently aligned for its mode; instead, it's up to the caller
7885 to deal with it afterwards. This is used to make sure that unaligned
7886 base objects for which out-of-bounds accesses are supported, for
7887 example record types with trailing arrays, aren't realigned behind
7888 the back of the caller.
7889 The normal operating mode is to pass FALSE for this parameter. */
7892 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7893 enum expand_modifier modifier, rtx *alt_rtl,
7894 bool inner_reference_p)
7896 rtx ret;
7898 /* Handle ERROR_MARK before anybody tries to access its type. */
7899 if (TREE_CODE (exp) == ERROR_MARK
7900 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7902 ret = CONST0_RTX (tmode);
7903 return ret ? ret : const0_rtx;
7906 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
7907 inner_reference_p);
7908 return ret;
7911 /* Try to expand the conditional expression which is represented by
7912 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
7913 return the rtl reg which repsents the result. Otherwise return
7914 NULL_RTL. */
7916 static rtx
7917 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
7918 tree treeop1 ATTRIBUTE_UNUSED,
7919 tree treeop2 ATTRIBUTE_UNUSED)
7921 #ifdef HAVE_conditional_move
7922 rtx insn;
7923 rtx op00, op01, op1, op2;
7924 enum rtx_code comparison_code;
7925 enum machine_mode comparison_mode;
7926 gimple srcstmt;
7927 rtx temp;
7928 tree type = TREE_TYPE (treeop1);
7929 int unsignedp = TYPE_UNSIGNED (type);
7930 enum machine_mode mode = TYPE_MODE (type);
7931 enum machine_mode orig_mode = mode;
7933 /* If we cannot do a conditional move on the mode, try doing it
7934 with the promoted mode. */
7935 if (!can_conditionally_move_p (mode))
7937 mode = promote_mode (type, mode, &unsignedp);
7938 if (!can_conditionally_move_p (mode))
7939 return NULL_RTX;
7940 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
7942 else
7943 temp = assign_temp (type, 0, 1);
7945 start_sequence ();
7946 expand_operands (treeop1, treeop2,
7947 temp, &op1, &op2, EXPAND_NORMAL);
7949 if (TREE_CODE (treeop0) == SSA_NAME
7950 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
7952 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
7953 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
7954 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
7955 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
7956 comparison_mode = TYPE_MODE (type);
7957 unsignedp = TYPE_UNSIGNED (type);
7958 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7960 else if (TREE_CODE_CLASS (TREE_CODE (treeop0)) == tcc_comparison)
7962 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
7963 enum tree_code cmpcode = TREE_CODE (treeop0);
7964 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
7965 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
7966 unsignedp = TYPE_UNSIGNED (type);
7967 comparison_mode = TYPE_MODE (type);
7968 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7970 else
7972 op00 = expand_normal (treeop0);
7973 op01 = const0_rtx;
7974 comparison_code = NE;
7975 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
7978 if (GET_MODE (op1) != mode)
7979 op1 = gen_lowpart (mode, op1);
7981 if (GET_MODE (op2) != mode)
7982 op2 = gen_lowpart (mode, op2);
7984 /* Try to emit the conditional move. */
7985 insn = emit_conditional_move (temp, comparison_code,
7986 op00, op01, comparison_mode,
7987 op1, op2, mode,
7988 unsignedp);
7990 /* If we could do the conditional move, emit the sequence,
7991 and return. */
7992 if (insn)
7994 rtx seq = get_insns ();
7995 end_sequence ();
7996 emit_insn (seq);
7997 return convert_modes (orig_mode, mode, temp, 0);
8000 /* Otherwise discard the sequence and fall back to code with
8001 branches. */
8002 end_sequence ();
8003 #endif
8004 return NULL_RTX;
8008 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
8009 enum expand_modifier modifier)
8011 rtx op0, op1, op2, temp;
8012 tree type;
8013 int unsignedp;
8014 enum machine_mode mode;
8015 enum tree_code code = ops->code;
8016 optab this_optab;
8017 rtx subtarget, original_target;
8018 int ignore;
8019 bool reduce_bit_field;
8020 location_t loc = ops->location;
8021 tree treeop0, treeop1, treeop2;
8022 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8023 ? reduce_to_bit_field_precision ((expr), \
8024 target, \
8025 type) \
8026 : (expr))
8028 type = ops->type;
8029 mode = TYPE_MODE (type);
8030 unsignedp = TYPE_UNSIGNED (type);
8032 treeop0 = ops->op0;
8033 treeop1 = ops->op1;
8034 treeop2 = ops->op2;
8036 /* We should be called only on simple (binary or unary) expressions,
8037 exactly those that are valid in gimple expressions that aren't
8038 GIMPLE_SINGLE_RHS (or invalid). */
8039 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8040 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8041 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8043 ignore = (target == const0_rtx
8044 || ((CONVERT_EXPR_CODE_P (code)
8045 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8046 && TREE_CODE (type) == VOID_TYPE));
8048 /* We should be called only if we need the result. */
8049 gcc_assert (!ignore);
8051 /* An operation in what may be a bit-field type needs the
8052 result to be reduced to the precision of the bit-field type,
8053 which is narrower than that of the type's mode. */
8054 reduce_bit_field = (INTEGRAL_TYPE_P (type)
8055 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8057 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8058 target = 0;
8060 /* Use subtarget as the target for operand 0 of a binary operation. */
8061 subtarget = get_subtarget (target);
8062 original_target = target;
8064 switch (code)
8066 case NON_LVALUE_EXPR:
8067 case PAREN_EXPR:
8068 CASE_CONVERT:
8069 if (treeop0 == error_mark_node)
8070 return const0_rtx;
8072 if (TREE_CODE (type) == UNION_TYPE)
8074 tree valtype = TREE_TYPE (treeop0);
8076 /* If both input and output are BLKmode, this conversion isn't doing
8077 anything except possibly changing memory attribute. */
8078 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8080 rtx result = expand_expr (treeop0, target, tmode,
8081 modifier);
8083 result = copy_rtx (result);
8084 set_mem_attributes (result, type, 0);
8085 return result;
8088 if (target == 0)
8090 if (TYPE_MODE (type) != BLKmode)
8091 target = gen_reg_rtx (TYPE_MODE (type));
8092 else
8093 target = assign_temp (type, 1, 1);
8096 if (MEM_P (target))
8097 /* Store data into beginning of memory target. */
8098 store_expr (treeop0,
8099 adjust_address (target, TYPE_MODE (valtype), 0),
8100 modifier == EXPAND_STACK_PARM,
8101 false);
8103 else
8105 gcc_assert (REG_P (target));
8107 /* Store this field into a union of the proper type. */
8108 store_field (target,
8109 MIN ((int_size_in_bytes (TREE_TYPE
8110 (treeop0))
8111 * BITS_PER_UNIT),
8112 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8113 0, 0, 0, TYPE_MODE (valtype), treeop0, 0, false);
8116 /* Return the entire union. */
8117 return target;
8120 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8122 op0 = expand_expr (treeop0, target, VOIDmode,
8123 modifier);
8125 /* If the signedness of the conversion differs and OP0 is
8126 a promoted SUBREG, clear that indication since we now
8127 have to do the proper extension. */
8128 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8129 && GET_CODE (op0) == SUBREG)
8130 SUBREG_PROMOTED_VAR_P (op0) = 0;
8132 return REDUCE_BIT_FIELD (op0);
8135 op0 = expand_expr (treeop0, NULL_RTX, mode,
8136 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8137 if (GET_MODE (op0) == mode)
8140 /* If OP0 is a constant, just convert it into the proper mode. */
8141 else if (CONSTANT_P (op0))
8143 tree inner_type = TREE_TYPE (treeop0);
8144 enum machine_mode inner_mode = GET_MODE (op0);
8146 if (inner_mode == VOIDmode)
8147 inner_mode = TYPE_MODE (inner_type);
8149 if (modifier == EXPAND_INITIALIZER)
8150 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8151 subreg_lowpart_offset (mode,
8152 inner_mode));
8153 else
8154 op0= convert_modes (mode, inner_mode, op0,
8155 TYPE_UNSIGNED (inner_type));
8158 else if (modifier == EXPAND_INITIALIZER)
8159 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8161 else if (target == 0)
8162 op0 = convert_to_mode (mode, op0,
8163 TYPE_UNSIGNED (TREE_TYPE
8164 (treeop0)));
8165 else
8167 convert_move (target, op0,
8168 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8169 op0 = target;
8172 return REDUCE_BIT_FIELD (op0);
8174 case ADDR_SPACE_CONVERT_EXPR:
8176 tree treeop0_type = TREE_TYPE (treeop0);
8177 addr_space_t as_to;
8178 addr_space_t as_from;
8180 gcc_assert (POINTER_TYPE_P (type));
8181 gcc_assert (POINTER_TYPE_P (treeop0_type));
8183 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8184 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8186 /* Conversions between pointers to the same address space should
8187 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8188 gcc_assert (as_to != as_from);
8190 /* Ask target code to handle conversion between pointers
8191 to overlapping address spaces. */
8192 if (targetm.addr_space.subset_p (as_to, as_from)
8193 || targetm.addr_space.subset_p (as_from, as_to))
8195 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8196 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8197 gcc_assert (op0);
8198 return op0;
8201 /* For disjoint address spaces, converting anything but
8202 a null pointer invokes undefined behaviour. We simply
8203 always return a null pointer here. */
8204 return CONST0_RTX (mode);
8207 case POINTER_PLUS_EXPR:
8208 /* Even though the sizetype mode and the pointer's mode can be different
8209 expand is able to handle this correctly and get the correct result out
8210 of the PLUS_EXPR code. */
8211 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8212 if sizetype precision is smaller than pointer precision. */
8213 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8214 treeop1 = fold_convert_loc (loc, type,
8215 fold_convert_loc (loc, ssizetype,
8216 treeop1));
8217 /* If sizetype precision is larger than pointer precision, truncate the
8218 offset to have matching modes. */
8219 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8220 treeop1 = fold_convert_loc (loc, type, treeop1);
8222 case PLUS_EXPR:
8223 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8224 something else, make sure we add the register to the constant and
8225 then to the other thing. This case can occur during strength
8226 reduction and doing it this way will produce better code if the
8227 frame pointer or argument pointer is eliminated.
8229 fold-const.c will ensure that the constant is always in the inner
8230 PLUS_EXPR, so the only case we need to do anything about is if
8231 sp, ap, or fp is our second argument, in which case we must swap
8232 the innermost first argument and our second argument. */
8234 if (TREE_CODE (treeop0) == PLUS_EXPR
8235 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8236 && TREE_CODE (treeop1) == VAR_DECL
8237 && (DECL_RTL (treeop1) == frame_pointer_rtx
8238 || DECL_RTL (treeop1) == stack_pointer_rtx
8239 || DECL_RTL (treeop1) == arg_pointer_rtx))
8241 gcc_unreachable ();
8244 /* If the result is to be ptr_mode and we are adding an integer to
8245 something, we might be forming a constant. So try to use
8246 plus_constant. If it produces a sum and we can't accept it,
8247 use force_operand. This allows P = &ARR[const] to generate
8248 efficient code on machines where a SYMBOL_REF is not a valid
8249 address.
8251 If this is an EXPAND_SUM call, always return the sum. */
8252 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8253 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8255 if (modifier == EXPAND_STACK_PARM)
8256 target = 0;
8257 if (TREE_CODE (treeop0) == INTEGER_CST
8258 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8259 && TREE_CONSTANT (treeop1))
8261 rtx constant_part;
8262 HOST_WIDE_INT wc;
8263 enum machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
8265 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8266 EXPAND_SUM);
8267 /* Use wi::shwi to ensure that the constant is
8268 truncated according to the mode of OP1, then sign extended
8269 to a HOST_WIDE_INT. Using the constant directly can result
8270 in non-canonical RTL in a 64x32 cross compile. */
8271 wc = TREE_INT_CST_LOW (treeop0);
8272 constant_part =
8273 immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8274 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8275 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8276 op1 = force_operand (op1, target);
8277 return REDUCE_BIT_FIELD (op1);
8280 else if (TREE_CODE (treeop1) == INTEGER_CST
8281 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8282 && TREE_CONSTANT (treeop0))
8284 rtx constant_part;
8285 HOST_WIDE_INT wc;
8286 enum machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
8288 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8289 (modifier == EXPAND_INITIALIZER
8290 ? EXPAND_INITIALIZER : EXPAND_SUM));
8291 if (! CONSTANT_P (op0))
8293 op1 = expand_expr (treeop1, NULL_RTX,
8294 VOIDmode, modifier);
8295 /* Return a PLUS if modifier says it's OK. */
8296 if (modifier == EXPAND_SUM
8297 || modifier == EXPAND_INITIALIZER)
8298 return simplify_gen_binary (PLUS, mode, op0, op1);
8299 goto binop2;
8301 /* Use wi::shwi to ensure that the constant is
8302 truncated according to the mode of OP1, then sign extended
8303 to a HOST_WIDE_INT. Using the constant directly can result
8304 in non-canonical RTL in a 64x32 cross compile. */
8305 wc = TREE_INT_CST_LOW (treeop1);
8306 constant_part
8307 = immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8308 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8309 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8310 op0 = force_operand (op0, target);
8311 return REDUCE_BIT_FIELD (op0);
8315 /* Use TER to expand pointer addition of a negated value
8316 as pointer subtraction. */
8317 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8318 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8319 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8320 && TREE_CODE (treeop1) == SSA_NAME
8321 && TYPE_MODE (TREE_TYPE (treeop0))
8322 == TYPE_MODE (TREE_TYPE (treeop1)))
8324 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8325 if (def)
8327 treeop1 = gimple_assign_rhs1 (def);
8328 code = MINUS_EXPR;
8329 goto do_minus;
8333 /* No sense saving up arithmetic to be done
8334 if it's all in the wrong mode to form part of an address.
8335 And force_operand won't know whether to sign-extend or
8336 zero-extend. */
8337 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8338 || mode != ptr_mode)
8340 expand_operands (treeop0, treeop1,
8341 subtarget, &op0, &op1, EXPAND_NORMAL);
8342 if (op0 == const0_rtx)
8343 return op1;
8344 if (op1 == const0_rtx)
8345 return op0;
8346 goto binop2;
8349 expand_operands (treeop0, treeop1,
8350 subtarget, &op0, &op1, modifier);
8351 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8353 case MINUS_EXPR:
8354 do_minus:
8355 /* For initializers, we are allowed to return a MINUS of two
8356 symbolic constants. Here we handle all cases when both operands
8357 are constant. */
8358 /* Handle difference of two symbolic constants,
8359 for the sake of an initializer. */
8360 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8361 && really_constant_p (treeop0)
8362 && really_constant_p (treeop1))
8364 expand_operands (treeop0, treeop1,
8365 NULL_RTX, &op0, &op1, modifier);
8367 /* If the last operand is a CONST_INT, use plus_constant of
8368 the negated constant. Else make the MINUS. */
8369 if (CONST_INT_P (op1))
8370 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8371 -INTVAL (op1)));
8372 else
8373 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8376 /* No sense saving up arithmetic to be done
8377 if it's all in the wrong mode to form part of an address.
8378 And force_operand won't know whether to sign-extend or
8379 zero-extend. */
8380 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8381 || mode != ptr_mode)
8382 goto binop;
8384 expand_operands (treeop0, treeop1,
8385 subtarget, &op0, &op1, modifier);
8387 /* Convert A - const to A + (-const). */
8388 if (CONST_INT_P (op1))
8390 op1 = negate_rtx (mode, op1);
8391 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8394 goto binop2;
8396 case WIDEN_MULT_PLUS_EXPR:
8397 case WIDEN_MULT_MINUS_EXPR:
8398 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8399 op2 = expand_normal (treeop2);
8400 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8401 target, unsignedp);
8402 return target;
8404 case WIDEN_MULT_EXPR:
8405 /* If first operand is constant, swap them.
8406 Thus the following special case checks need only
8407 check the second operand. */
8408 if (TREE_CODE (treeop0) == INTEGER_CST)
8410 tree t1 = treeop0;
8411 treeop0 = treeop1;
8412 treeop1 = t1;
8415 /* First, check if we have a multiplication of one signed and one
8416 unsigned operand. */
8417 if (TREE_CODE (treeop1) != INTEGER_CST
8418 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8419 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8421 enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8422 this_optab = usmul_widen_optab;
8423 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8424 != CODE_FOR_nothing)
8426 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8427 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8428 EXPAND_NORMAL);
8429 else
8430 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8431 EXPAND_NORMAL);
8432 /* op0 and op1 might still be constant, despite the above
8433 != INTEGER_CST check. Handle it. */
8434 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8436 op0 = convert_modes (innermode, mode, op0, true);
8437 op1 = convert_modes (innermode, mode, op1, false);
8438 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8439 target, unsignedp));
8441 goto binop3;
8444 /* Check for a multiplication with matching signedness. */
8445 else if ((TREE_CODE (treeop1) == INTEGER_CST
8446 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8447 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8448 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8450 tree op0type = TREE_TYPE (treeop0);
8451 enum machine_mode innermode = TYPE_MODE (op0type);
8452 bool zextend_p = TYPE_UNSIGNED (op0type);
8453 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8454 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8456 if (TREE_CODE (treeop0) != INTEGER_CST)
8458 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8459 != CODE_FOR_nothing)
8461 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8462 EXPAND_NORMAL);
8463 /* op0 and op1 might still be constant, despite the above
8464 != INTEGER_CST check. Handle it. */
8465 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8467 widen_mult_const:
8468 op0 = convert_modes (innermode, mode, op0, zextend_p);
8470 = convert_modes (innermode, mode, op1,
8471 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8472 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8473 target,
8474 unsignedp));
8476 temp = expand_widening_mult (mode, op0, op1, target,
8477 unsignedp, this_optab);
8478 return REDUCE_BIT_FIELD (temp);
8480 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8481 != CODE_FOR_nothing
8482 && innermode == word_mode)
8484 rtx htem, hipart;
8485 op0 = expand_normal (treeop0);
8486 if (TREE_CODE (treeop1) == INTEGER_CST)
8487 op1 = convert_modes (innermode, mode,
8488 expand_normal (treeop1),
8489 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8490 else
8491 op1 = expand_normal (treeop1);
8492 /* op0 and op1 might still be constant, despite the above
8493 != INTEGER_CST check. Handle it. */
8494 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8495 goto widen_mult_const;
8496 temp = expand_binop (mode, other_optab, op0, op1, target,
8497 unsignedp, OPTAB_LIB_WIDEN);
8498 hipart = gen_highpart (innermode, temp);
8499 htem = expand_mult_highpart_adjust (innermode, hipart,
8500 op0, op1, hipart,
8501 zextend_p);
8502 if (htem != hipart)
8503 emit_move_insn (hipart, htem);
8504 return REDUCE_BIT_FIELD (temp);
8508 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8509 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8510 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8511 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8513 case FMA_EXPR:
8515 optab opt = fma_optab;
8516 gimple def0, def2;
8518 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8519 call. */
8520 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8522 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8523 tree call_expr;
8525 gcc_assert (fn != NULL_TREE);
8526 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8527 return expand_builtin (call_expr, target, subtarget, mode, false);
8530 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8531 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8533 op0 = op2 = NULL;
8535 if (def0 && def2
8536 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8538 opt = fnms_optab;
8539 op0 = expand_normal (gimple_assign_rhs1 (def0));
8540 op2 = expand_normal (gimple_assign_rhs1 (def2));
8542 else if (def0
8543 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8545 opt = fnma_optab;
8546 op0 = expand_normal (gimple_assign_rhs1 (def0));
8548 else if (def2
8549 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8551 opt = fms_optab;
8552 op2 = expand_normal (gimple_assign_rhs1 (def2));
8555 if (op0 == NULL)
8556 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8557 if (op2 == NULL)
8558 op2 = expand_normal (treeop2);
8559 op1 = expand_normal (treeop1);
8561 return expand_ternary_op (TYPE_MODE (type), opt,
8562 op0, op1, op2, target, 0);
8565 case MULT_EXPR:
8566 /* If this is a fixed-point operation, then we cannot use the code
8567 below because "expand_mult" doesn't support sat/no-sat fixed-point
8568 multiplications. */
8569 if (ALL_FIXED_POINT_MODE_P (mode))
8570 goto binop;
8572 /* If first operand is constant, swap them.
8573 Thus the following special case checks need only
8574 check the second operand. */
8575 if (TREE_CODE (treeop0) == INTEGER_CST)
8577 tree t1 = treeop0;
8578 treeop0 = treeop1;
8579 treeop1 = t1;
8582 /* Attempt to return something suitable for generating an
8583 indexed address, for machines that support that. */
8585 if (modifier == EXPAND_SUM && mode == ptr_mode
8586 && tree_fits_shwi_p (treeop1))
8588 tree exp1 = treeop1;
8590 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8591 EXPAND_SUM);
8593 if (!REG_P (op0))
8594 op0 = force_operand (op0, NULL_RTX);
8595 if (!REG_P (op0))
8596 op0 = copy_to_mode_reg (mode, op0);
8598 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8599 gen_int_mode (tree_to_shwi (exp1),
8600 TYPE_MODE (TREE_TYPE (exp1)))));
8603 if (modifier == EXPAND_STACK_PARM)
8604 target = 0;
8606 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8607 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8609 case TRUNC_DIV_EXPR:
8610 case FLOOR_DIV_EXPR:
8611 case CEIL_DIV_EXPR:
8612 case ROUND_DIV_EXPR:
8613 case EXACT_DIV_EXPR:
8614 /* If this is a fixed-point operation, then we cannot use the code
8615 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8616 divisions. */
8617 if (ALL_FIXED_POINT_MODE_P (mode))
8618 goto binop;
8620 if (modifier == EXPAND_STACK_PARM)
8621 target = 0;
8622 /* Possible optimization: compute the dividend with EXPAND_SUM
8623 then if the divisor is constant can optimize the case
8624 where some terms of the dividend have coeffs divisible by it. */
8625 expand_operands (treeop0, treeop1,
8626 subtarget, &op0, &op1, EXPAND_NORMAL);
8627 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8629 case RDIV_EXPR:
8630 goto binop;
8632 case MULT_HIGHPART_EXPR:
8633 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8634 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8635 gcc_assert (temp);
8636 return temp;
8638 case TRUNC_MOD_EXPR:
8639 case FLOOR_MOD_EXPR:
8640 case CEIL_MOD_EXPR:
8641 case ROUND_MOD_EXPR:
8642 if (modifier == EXPAND_STACK_PARM)
8643 target = 0;
8644 expand_operands (treeop0, treeop1,
8645 subtarget, &op0, &op1, EXPAND_NORMAL);
8646 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8648 case FIXED_CONVERT_EXPR:
8649 op0 = expand_normal (treeop0);
8650 if (target == 0 || modifier == EXPAND_STACK_PARM)
8651 target = gen_reg_rtx (mode);
8653 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8654 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8655 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8656 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8657 else
8658 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8659 return target;
8661 case FIX_TRUNC_EXPR:
8662 op0 = expand_normal (treeop0);
8663 if (target == 0 || modifier == EXPAND_STACK_PARM)
8664 target = gen_reg_rtx (mode);
8665 expand_fix (target, op0, unsignedp);
8666 return target;
8668 case FLOAT_EXPR:
8669 op0 = expand_normal (treeop0);
8670 if (target == 0 || modifier == EXPAND_STACK_PARM)
8671 target = gen_reg_rtx (mode);
8672 /* expand_float can't figure out what to do if FROM has VOIDmode.
8673 So give it the correct mode. With -O, cse will optimize this. */
8674 if (GET_MODE (op0) == VOIDmode)
8675 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8676 op0);
8677 expand_float (target, op0,
8678 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8679 return target;
8681 case NEGATE_EXPR:
8682 op0 = expand_expr (treeop0, subtarget,
8683 VOIDmode, EXPAND_NORMAL);
8684 if (modifier == EXPAND_STACK_PARM)
8685 target = 0;
8686 temp = expand_unop (mode,
8687 optab_for_tree_code (NEGATE_EXPR, type,
8688 optab_default),
8689 op0, target, 0);
8690 gcc_assert (temp);
8691 return REDUCE_BIT_FIELD (temp);
8693 case ABS_EXPR:
8694 op0 = expand_expr (treeop0, subtarget,
8695 VOIDmode, EXPAND_NORMAL);
8696 if (modifier == EXPAND_STACK_PARM)
8697 target = 0;
8699 /* ABS_EXPR is not valid for complex arguments. */
8700 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8701 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8703 /* Unsigned abs is simply the operand. Testing here means we don't
8704 risk generating incorrect code below. */
8705 if (TYPE_UNSIGNED (type))
8706 return op0;
8708 return expand_abs (mode, op0, target, unsignedp,
8709 safe_from_p (target, treeop0, 1));
8711 case MAX_EXPR:
8712 case MIN_EXPR:
8713 target = original_target;
8714 if (target == 0
8715 || modifier == EXPAND_STACK_PARM
8716 || (MEM_P (target) && MEM_VOLATILE_P (target))
8717 || GET_MODE (target) != mode
8718 || (REG_P (target)
8719 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8720 target = gen_reg_rtx (mode);
8721 expand_operands (treeop0, treeop1,
8722 target, &op0, &op1, EXPAND_NORMAL);
8724 /* First try to do it with a special MIN or MAX instruction.
8725 If that does not win, use a conditional jump to select the proper
8726 value. */
8727 this_optab = optab_for_tree_code (code, type, optab_default);
8728 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8729 OPTAB_WIDEN);
8730 if (temp != 0)
8731 return temp;
8733 /* At this point, a MEM target is no longer useful; we will get better
8734 code without it. */
8736 if (! REG_P (target))
8737 target = gen_reg_rtx (mode);
8739 /* If op1 was placed in target, swap op0 and op1. */
8740 if (target != op0 && target == op1)
8742 temp = op0;
8743 op0 = op1;
8744 op1 = temp;
8747 /* We generate better code and avoid problems with op1 mentioning
8748 target by forcing op1 into a pseudo if it isn't a constant. */
8749 if (! CONSTANT_P (op1))
8750 op1 = force_reg (mode, op1);
8753 enum rtx_code comparison_code;
8754 rtx cmpop1 = op1;
8756 if (code == MAX_EXPR)
8757 comparison_code = unsignedp ? GEU : GE;
8758 else
8759 comparison_code = unsignedp ? LEU : LE;
8761 /* Canonicalize to comparisons against 0. */
8762 if (op1 == const1_rtx)
8764 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8765 or (a != 0 ? a : 1) for unsigned.
8766 For MIN we are safe converting (a <= 1 ? a : 1)
8767 into (a <= 0 ? a : 1) */
8768 cmpop1 = const0_rtx;
8769 if (code == MAX_EXPR)
8770 comparison_code = unsignedp ? NE : GT;
8772 if (op1 == constm1_rtx && !unsignedp)
8774 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8775 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8776 cmpop1 = const0_rtx;
8777 if (code == MIN_EXPR)
8778 comparison_code = LT;
8780 #ifdef HAVE_conditional_move
8781 /* Use a conditional move if possible. */
8782 if (can_conditionally_move_p (mode))
8784 rtx insn;
8786 start_sequence ();
8788 /* Try to emit the conditional move. */
8789 insn = emit_conditional_move (target, comparison_code,
8790 op0, cmpop1, mode,
8791 op0, op1, mode,
8792 unsignedp);
8794 /* If we could do the conditional move, emit the sequence,
8795 and return. */
8796 if (insn)
8798 rtx seq = get_insns ();
8799 end_sequence ();
8800 emit_insn (seq);
8801 return target;
8804 /* Otherwise discard the sequence and fall back to code with
8805 branches. */
8806 end_sequence ();
8808 #endif
8809 if (target != op0)
8810 emit_move_insn (target, op0);
8812 temp = gen_label_rtx ();
8813 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8814 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8815 -1);
8817 emit_move_insn (target, op1);
8818 emit_label (temp);
8819 return target;
8821 case BIT_NOT_EXPR:
8822 op0 = expand_expr (treeop0, subtarget,
8823 VOIDmode, EXPAND_NORMAL);
8824 if (modifier == EXPAND_STACK_PARM)
8825 target = 0;
8826 /* In case we have to reduce the result to bitfield precision
8827 for unsigned bitfield expand this as XOR with a proper constant
8828 instead. */
8829 if (reduce_bit_field && TYPE_UNSIGNED (type))
8831 wide_int mask = wi::mask (TYPE_PRECISION (type),
8832 false, GET_MODE_PRECISION (mode));
8834 temp = expand_binop (mode, xor_optab, op0,
8835 immed_wide_int_const (mask, mode),
8836 target, 1, OPTAB_LIB_WIDEN);
8838 else
8839 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8840 gcc_assert (temp);
8841 return temp;
8843 /* ??? Can optimize bitwise operations with one arg constant.
8844 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8845 and (a bitwise1 b) bitwise2 b (etc)
8846 but that is probably not worth while. */
8848 case BIT_AND_EXPR:
8849 case BIT_IOR_EXPR:
8850 case BIT_XOR_EXPR:
8851 goto binop;
8853 case LROTATE_EXPR:
8854 case RROTATE_EXPR:
8855 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8856 || (GET_MODE_PRECISION (TYPE_MODE (type))
8857 == TYPE_PRECISION (type)));
8858 /* fall through */
8860 case LSHIFT_EXPR:
8861 case RSHIFT_EXPR:
8862 /* If this is a fixed-point operation, then we cannot use the code
8863 below because "expand_shift" doesn't support sat/no-sat fixed-point
8864 shifts. */
8865 if (ALL_FIXED_POINT_MODE_P (mode))
8866 goto binop;
8868 if (! safe_from_p (subtarget, treeop1, 1))
8869 subtarget = 0;
8870 if (modifier == EXPAND_STACK_PARM)
8871 target = 0;
8872 op0 = expand_expr (treeop0, subtarget,
8873 VOIDmode, EXPAND_NORMAL);
8874 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8875 unsignedp);
8876 if (code == LSHIFT_EXPR)
8877 temp = REDUCE_BIT_FIELD (temp);
8878 return temp;
8880 /* Could determine the answer when only additive constants differ. Also,
8881 the addition of one can be handled by changing the condition. */
8882 case LT_EXPR:
8883 case LE_EXPR:
8884 case GT_EXPR:
8885 case GE_EXPR:
8886 case EQ_EXPR:
8887 case NE_EXPR:
8888 case UNORDERED_EXPR:
8889 case ORDERED_EXPR:
8890 case UNLT_EXPR:
8891 case UNLE_EXPR:
8892 case UNGT_EXPR:
8893 case UNGE_EXPR:
8894 case UNEQ_EXPR:
8895 case LTGT_EXPR:
8896 temp = do_store_flag (ops,
8897 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8898 tmode != VOIDmode ? tmode : mode);
8899 if (temp)
8900 return temp;
8902 /* Use a compare and a jump for BLKmode comparisons, or for function
8903 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8905 if ((target == 0
8906 || modifier == EXPAND_STACK_PARM
8907 || ! safe_from_p (target, treeop0, 1)
8908 || ! safe_from_p (target, treeop1, 1)
8909 /* Make sure we don't have a hard reg (such as function's return
8910 value) live across basic blocks, if not optimizing. */
8911 || (!optimize && REG_P (target)
8912 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8913 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8915 emit_move_insn (target, const0_rtx);
8917 op1 = gen_label_rtx ();
8918 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8920 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8921 emit_move_insn (target, constm1_rtx);
8922 else
8923 emit_move_insn (target, const1_rtx);
8925 emit_label (op1);
8926 return target;
8928 case COMPLEX_EXPR:
8929 /* Get the rtx code of the operands. */
8930 op0 = expand_normal (treeop0);
8931 op1 = expand_normal (treeop1);
8933 if (!target)
8934 target = gen_reg_rtx (TYPE_MODE (type));
8935 else
8936 /* If target overlaps with op1, then either we need to force
8937 op1 into a pseudo (if target also overlaps with op0),
8938 or write the complex parts in reverse order. */
8939 switch (GET_CODE (target))
8941 case CONCAT:
8942 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
8944 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
8946 complex_expr_force_op1:
8947 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
8948 emit_move_insn (temp, op1);
8949 op1 = temp;
8950 break;
8952 complex_expr_swap_order:
8953 /* Move the imaginary (op1) and real (op0) parts to their
8954 location. */
8955 write_complex_part (target, op1, true);
8956 write_complex_part (target, op0, false);
8958 return target;
8960 break;
8961 case MEM:
8962 temp = adjust_address_nv (target,
8963 GET_MODE_INNER (GET_MODE (target)), 0);
8964 if (reg_overlap_mentioned_p (temp, op1))
8966 enum machine_mode imode = GET_MODE_INNER (GET_MODE (target));
8967 temp = adjust_address_nv (target, imode,
8968 GET_MODE_SIZE (imode));
8969 if (reg_overlap_mentioned_p (temp, op0))
8970 goto complex_expr_force_op1;
8971 goto complex_expr_swap_order;
8973 break;
8974 default:
8975 if (reg_overlap_mentioned_p (target, op1))
8977 if (reg_overlap_mentioned_p (target, op0))
8978 goto complex_expr_force_op1;
8979 goto complex_expr_swap_order;
8981 break;
8984 /* Move the real (op0) and imaginary (op1) parts to their location. */
8985 write_complex_part (target, op0, false);
8986 write_complex_part (target, op1, true);
8988 return target;
8990 case WIDEN_SUM_EXPR:
8992 tree oprnd0 = treeop0;
8993 tree oprnd1 = treeop1;
8995 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8996 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8997 target, unsignedp);
8998 return target;
9001 case REDUC_MAX_EXPR:
9002 case REDUC_MIN_EXPR:
9003 case REDUC_PLUS_EXPR:
9005 op0 = expand_normal (treeop0);
9006 this_optab = optab_for_tree_code (code, type, optab_default);
9007 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
9008 gcc_assert (temp);
9009 return temp;
9012 case VEC_LSHIFT_EXPR:
9013 case VEC_RSHIFT_EXPR:
9015 target = expand_vec_shift_expr (ops, target);
9016 return target;
9019 case VEC_UNPACK_HI_EXPR:
9020 case VEC_UNPACK_LO_EXPR:
9022 op0 = expand_normal (treeop0);
9023 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9024 target, unsignedp);
9025 gcc_assert (temp);
9026 return temp;
9029 case VEC_UNPACK_FLOAT_HI_EXPR:
9030 case VEC_UNPACK_FLOAT_LO_EXPR:
9032 op0 = expand_normal (treeop0);
9033 /* The signedness is determined from input operand. */
9034 temp = expand_widen_pattern_expr
9035 (ops, op0, NULL_RTX, NULL_RTX,
9036 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9038 gcc_assert (temp);
9039 return temp;
9042 case VEC_WIDEN_MULT_HI_EXPR:
9043 case VEC_WIDEN_MULT_LO_EXPR:
9044 case VEC_WIDEN_MULT_EVEN_EXPR:
9045 case VEC_WIDEN_MULT_ODD_EXPR:
9046 case VEC_WIDEN_LSHIFT_HI_EXPR:
9047 case VEC_WIDEN_LSHIFT_LO_EXPR:
9048 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9049 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9050 target, unsignedp);
9051 gcc_assert (target);
9052 return target;
9054 case VEC_PACK_TRUNC_EXPR:
9055 case VEC_PACK_SAT_EXPR:
9056 case VEC_PACK_FIX_TRUNC_EXPR:
9057 mode = TYPE_MODE (TREE_TYPE (treeop0));
9058 goto binop;
9060 case VEC_PERM_EXPR:
9061 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9062 op2 = expand_normal (treeop2);
9064 /* Careful here: if the target doesn't support integral vector modes,
9065 a constant selection vector could wind up smooshed into a normal
9066 integral constant. */
9067 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9069 tree sel_type = TREE_TYPE (treeop2);
9070 enum machine_mode vmode
9071 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9072 TYPE_VECTOR_SUBPARTS (sel_type));
9073 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9074 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9075 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9077 else
9078 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9080 temp = expand_vec_perm (mode, op0, op1, op2, target);
9081 gcc_assert (temp);
9082 return temp;
9084 case DOT_PROD_EXPR:
9086 tree oprnd0 = treeop0;
9087 tree oprnd1 = treeop1;
9088 tree oprnd2 = treeop2;
9089 rtx op2;
9091 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9092 op2 = expand_normal (oprnd2);
9093 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9094 target, unsignedp);
9095 return target;
9098 case REALIGN_LOAD_EXPR:
9100 tree oprnd0 = treeop0;
9101 tree oprnd1 = treeop1;
9102 tree oprnd2 = treeop2;
9103 rtx op2;
9105 this_optab = optab_for_tree_code (code, type, optab_default);
9106 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9107 op2 = expand_normal (oprnd2);
9108 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9109 target, unsignedp);
9110 gcc_assert (temp);
9111 return temp;
9114 case COND_EXPR:
9115 /* A COND_EXPR with its type being VOID_TYPE represents a
9116 conditional jump and is handled in
9117 expand_gimple_cond_expr. */
9118 gcc_assert (!VOID_TYPE_P (type));
9120 /* Note that COND_EXPRs whose type is a structure or union
9121 are required to be constructed to contain assignments of
9122 a temporary variable, so that we can evaluate them here
9123 for side effect only. If type is void, we must do likewise. */
9125 gcc_assert (!TREE_ADDRESSABLE (type)
9126 && !ignore
9127 && TREE_TYPE (treeop1) != void_type_node
9128 && TREE_TYPE (treeop2) != void_type_node);
9130 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9131 if (temp)
9132 return temp;
9134 /* If we are not to produce a result, we have no target. Otherwise,
9135 if a target was specified use it; it will not be used as an
9136 intermediate target unless it is safe. If no target, use a
9137 temporary. */
9139 if (modifier != EXPAND_STACK_PARM
9140 && original_target
9141 && safe_from_p (original_target, treeop0, 1)
9142 && GET_MODE (original_target) == mode
9143 && !MEM_P (original_target))
9144 temp = original_target;
9145 else
9146 temp = assign_temp (type, 0, 1);
9148 do_pending_stack_adjust ();
9149 NO_DEFER_POP;
9150 op0 = gen_label_rtx ();
9151 op1 = gen_label_rtx ();
9152 jumpifnot (treeop0, op0, -1);
9153 store_expr (treeop1, temp,
9154 modifier == EXPAND_STACK_PARM,
9155 false);
9157 emit_jump_insn (gen_jump (op1));
9158 emit_barrier ();
9159 emit_label (op0);
9160 store_expr (treeop2, temp,
9161 modifier == EXPAND_STACK_PARM,
9162 false);
9164 emit_label (op1);
9165 OK_DEFER_POP;
9166 return temp;
9168 case VEC_COND_EXPR:
9169 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9170 return target;
9172 default:
9173 gcc_unreachable ();
9176 /* Here to do an ordinary binary operator. */
9177 binop:
9178 expand_operands (treeop0, treeop1,
9179 subtarget, &op0, &op1, EXPAND_NORMAL);
9180 binop2:
9181 this_optab = optab_for_tree_code (code, type, optab_default);
9182 binop3:
9183 if (modifier == EXPAND_STACK_PARM)
9184 target = 0;
9185 temp = expand_binop (mode, this_optab, op0, op1, target,
9186 unsignedp, OPTAB_LIB_WIDEN);
9187 gcc_assert (temp);
9188 /* Bitwise operations do not need bitfield reduction as we expect their
9189 operands being properly truncated. */
9190 if (code == BIT_XOR_EXPR
9191 || code == BIT_AND_EXPR
9192 || code == BIT_IOR_EXPR)
9193 return temp;
9194 return REDUCE_BIT_FIELD (temp);
9196 #undef REDUCE_BIT_FIELD
9199 /* Return TRUE if expression STMT is suitable for replacement.
9200 Never consider memory loads as replaceable, because those don't ever lead
9201 into constant expressions. */
9203 static bool
9204 stmt_is_replaceable_p (gimple stmt)
9206 if (ssa_is_replaceable_p (stmt))
9208 /* Don't move around loads. */
9209 if (!gimple_assign_single_p (stmt)
9210 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9211 return true;
9213 return false;
9217 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
9218 enum expand_modifier modifier, rtx *alt_rtl,
9219 bool inner_reference_p)
9221 rtx op0, op1, temp, decl_rtl;
9222 tree type;
9223 int unsignedp;
9224 enum machine_mode mode;
9225 enum tree_code code = TREE_CODE (exp);
9226 rtx subtarget, original_target;
9227 int ignore;
9228 tree context;
9229 bool reduce_bit_field;
9230 location_t loc = EXPR_LOCATION (exp);
9231 struct separate_ops ops;
9232 tree treeop0, treeop1, treeop2;
9233 tree ssa_name = NULL_TREE;
9234 gimple g;
9236 type = TREE_TYPE (exp);
9237 mode = TYPE_MODE (type);
9238 unsignedp = TYPE_UNSIGNED (type);
9240 treeop0 = treeop1 = treeop2 = NULL_TREE;
9241 if (!VL_EXP_CLASS_P (exp))
9242 switch (TREE_CODE_LENGTH (code))
9244 default:
9245 case 3: treeop2 = TREE_OPERAND (exp, 2);
9246 case 2: treeop1 = TREE_OPERAND (exp, 1);
9247 case 1: treeop0 = TREE_OPERAND (exp, 0);
9248 case 0: break;
9250 ops.code = code;
9251 ops.type = type;
9252 ops.op0 = treeop0;
9253 ops.op1 = treeop1;
9254 ops.op2 = treeop2;
9255 ops.location = loc;
9257 ignore = (target == const0_rtx
9258 || ((CONVERT_EXPR_CODE_P (code)
9259 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9260 && TREE_CODE (type) == VOID_TYPE));
9262 /* An operation in what may be a bit-field type needs the
9263 result to be reduced to the precision of the bit-field type,
9264 which is narrower than that of the type's mode. */
9265 reduce_bit_field = (!ignore
9266 && INTEGRAL_TYPE_P (type)
9267 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9269 /* If we are going to ignore this result, we need only do something
9270 if there is a side-effect somewhere in the expression. If there
9271 is, short-circuit the most common cases here. Note that we must
9272 not call expand_expr with anything but const0_rtx in case this
9273 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9275 if (ignore)
9277 if (! TREE_SIDE_EFFECTS (exp))
9278 return const0_rtx;
9280 /* Ensure we reference a volatile object even if value is ignored, but
9281 don't do this if all we are doing is taking its address. */
9282 if (TREE_THIS_VOLATILE (exp)
9283 && TREE_CODE (exp) != FUNCTION_DECL
9284 && mode != VOIDmode && mode != BLKmode
9285 && modifier != EXPAND_CONST_ADDRESS)
9287 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9288 if (MEM_P (temp))
9289 copy_to_reg (temp);
9290 return const0_rtx;
9293 if (TREE_CODE_CLASS (code) == tcc_unary
9294 || code == BIT_FIELD_REF
9295 || code == COMPONENT_REF
9296 || code == INDIRECT_REF)
9297 return expand_expr (treeop0, const0_rtx, VOIDmode,
9298 modifier);
9300 else if (TREE_CODE_CLASS (code) == tcc_binary
9301 || TREE_CODE_CLASS (code) == tcc_comparison
9302 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9304 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9305 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9306 return const0_rtx;
9309 target = 0;
9312 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9313 target = 0;
9315 /* Use subtarget as the target for operand 0 of a binary operation. */
9316 subtarget = get_subtarget (target);
9317 original_target = target;
9319 switch (code)
9321 case LABEL_DECL:
9323 tree function = decl_function_context (exp);
9325 temp = label_rtx (exp);
9326 temp = gen_rtx_LABEL_REF (Pmode, temp);
9328 if (function != current_function_decl
9329 && function != 0)
9330 LABEL_REF_NONLOCAL_P (temp) = 1;
9332 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9333 return temp;
9336 case SSA_NAME:
9337 /* ??? ivopts calls expander, without any preparation from
9338 out-of-ssa. So fake instructions as if this was an access to the
9339 base variable. This unnecessarily allocates a pseudo, see how we can
9340 reuse it, if partition base vars have it set already. */
9341 if (!currently_expanding_to_rtl)
9343 tree var = SSA_NAME_VAR (exp);
9344 if (var && DECL_RTL_SET_P (var))
9345 return DECL_RTL (var);
9346 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9347 LAST_VIRTUAL_REGISTER + 1);
9350 g = get_gimple_for_ssa_name (exp);
9351 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9352 if (g == NULL
9353 && modifier == EXPAND_INITIALIZER
9354 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9355 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9356 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9357 g = SSA_NAME_DEF_STMT (exp);
9358 if (g)
9360 rtx r;
9361 ops.code = gimple_assign_rhs_code (g);
9362 switch (get_gimple_rhs_class (ops.code))
9364 case GIMPLE_TERNARY_RHS:
9365 ops.op2 = gimple_assign_rhs3 (g);
9366 /* Fallthru */
9367 case GIMPLE_BINARY_RHS:
9368 ops.op1 = gimple_assign_rhs2 (g);
9369 /* Fallthru */
9370 case GIMPLE_UNARY_RHS:
9371 ops.op0 = gimple_assign_rhs1 (g);
9372 ops.type = TREE_TYPE (gimple_assign_lhs (g));
9373 ops.location = gimple_location (g);
9374 r = expand_expr_real_2 (&ops, target, tmode, modifier);
9375 break;
9376 case GIMPLE_SINGLE_RHS:
9378 location_t saved_loc = curr_insn_location ();
9379 set_curr_insn_location (gimple_location (g));
9380 r = expand_expr_real (gimple_assign_rhs1 (g), target,
9381 tmode, modifier, NULL, inner_reference_p);
9382 set_curr_insn_location (saved_loc);
9383 break;
9385 default:
9386 gcc_unreachable ();
9388 if (REG_P (r) && !REG_EXPR (r))
9389 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9390 return r;
9393 ssa_name = exp;
9394 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9395 exp = SSA_NAME_VAR (ssa_name);
9396 goto expand_decl_rtl;
9398 case PARM_DECL:
9399 case VAR_DECL:
9400 /* If a static var's type was incomplete when the decl was written,
9401 but the type is complete now, lay out the decl now. */
9402 if (DECL_SIZE (exp) == 0
9403 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9404 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9405 layout_decl (exp, 0);
9407 /* ... fall through ... */
9409 case FUNCTION_DECL:
9410 case RESULT_DECL:
9411 decl_rtl = DECL_RTL (exp);
9412 expand_decl_rtl:
9413 gcc_assert (decl_rtl);
9414 decl_rtl = copy_rtx (decl_rtl);
9415 /* Record writes to register variables. */
9416 if (modifier == EXPAND_WRITE
9417 && REG_P (decl_rtl)
9418 && HARD_REGISTER_P (decl_rtl))
9419 add_to_hard_reg_set (&crtl->asm_clobbers,
9420 GET_MODE (decl_rtl), REGNO (decl_rtl));
9422 /* Ensure variable marked as used even if it doesn't go through
9423 a parser. If it hasn't be used yet, write out an external
9424 definition. */
9425 TREE_USED (exp) = 1;
9427 /* Show we haven't gotten RTL for this yet. */
9428 temp = 0;
9430 /* Variables inherited from containing functions should have
9431 been lowered by this point. */
9432 context = decl_function_context (exp);
9433 gcc_assert (SCOPE_FILE_SCOPE_P (context)
9434 || context == current_function_decl
9435 || TREE_STATIC (exp)
9436 || DECL_EXTERNAL (exp)
9437 /* ??? C++ creates functions that are not TREE_STATIC. */
9438 || TREE_CODE (exp) == FUNCTION_DECL);
9440 /* This is the case of an array whose size is to be determined
9441 from its initializer, while the initializer is still being parsed.
9442 ??? We aren't parsing while expanding anymore. */
9444 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9445 temp = validize_mem (decl_rtl);
9447 /* If DECL_RTL is memory, we are in the normal case and the
9448 address is not valid, get the address into a register. */
9450 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9452 if (alt_rtl)
9453 *alt_rtl = decl_rtl;
9454 decl_rtl = use_anchored_address (decl_rtl);
9455 if (modifier != EXPAND_CONST_ADDRESS
9456 && modifier != EXPAND_SUM
9457 && !memory_address_addr_space_p (DECL_MODE (exp),
9458 XEXP (decl_rtl, 0),
9459 MEM_ADDR_SPACE (decl_rtl)))
9460 temp = replace_equiv_address (decl_rtl,
9461 copy_rtx (XEXP (decl_rtl, 0)));
9464 /* If we got something, return it. But first, set the alignment
9465 if the address is a register. */
9466 if (temp != 0)
9468 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9469 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9471 return temp;
9474 /* If the mode of DECL_RTL does not match that of the decl,
9475 there are two cases: we are dealing with a BLKmode value
9476 that is returned in a register, or we are dealing with
9477 a promoted value. In the latter case, return a SUBREG
9478 of the wanted mode, but mark it so that we know that it
9479 was already extended. */
9480 if (REG_P (decl_rtl)
9481 && DECL_MODE (exp) != BLKmode
9482 && GET_MODE (decl_rtl) != DECL_MODE (exp))
9484 enum machine_mode pmode;
9486 /* Get the signedness to be used for this variable. Ensure we get
9487 the same mode we got when the variable was declared. */
9488 if (code == SSA_NAME
9489 && (g = SSA_NAME_DEF_STMT (ssa_name))
9490 && gimple_code (g) == GIMPLE_CALL
9491 && !gimple_call_internal_p (g))
9492 pmode = promote_function_mode (type, mode, &unsignedp,
9493 gimple_call_fntype (g),
9495 else
9496 pmode = promote_decl_mode (exp, &unsignedp);
9497 gcc_assert (GET_MODE (decl_rtl) == pmode);
9499 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9500 SUBREG_PROMOTED_VAR_P (temp) = 1;
9501 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
9502 return temp;
9505 return decl_rtl;
9507 case INTEGER_CST:
9508 /* Given that TYPE_PRECISION (type) is not always equal to
9509 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9510 the former to the latter according to the signedness of the
9511 type. */
9512 temp = immed_wide_int_const (wide_int::from
9513 (exp,
9514 GET_MODE_PRECISION (TYPE_MODE (type)),
9515 TYPE_SIGN (type)),
9516 TYPE_MODE (type));
9517 return temp;
9519 case VECTOR_CST:
9521 tree tmp = NULL_TREE;
9522 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9523 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9524 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9525 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9526 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9527 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9528 return const_vector_from_tree (exp);
9529 if (GET_MODE_CLASS (mode) == MODE_INT)
9531 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9532 if (type_for_mode)
9533 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9535 if (!tmp)
9537 vec<constructor_elt, va_gc> *v;
9538 unsigned i;
9539 vec_alloc (v, VECTOR_CST_NELTS (exp));
9540 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9541 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9542 tmp = build_constructor (type, v);
9544 return expand_expr (tmp, ignore ? const0_rtx : target,
9545 tmode, modifier);
9548 case CONST_DECL:
9549 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9551 case REAL_CST:
9552 /* If optimized, generate immediate CONST_DOUBLE
9553 which will be turned into memory by reload if necessary.
9555 We used to force a register so that loop.c could see it. But
9556 this does not allow gen_* patterns to perform optimizations with
9557 the constants. It also produces two insns in cases like "x = 1.0;".
9558 On most machines, floating-point constants are not permitted in
9559 many insns, so we'd end up copying it to a register in any case.
9561 Now, we do the copying in expand_binop, if appropriate. */
9562 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9563 TYPE_MODE (TREE_TYPE (exp)));
9565 case FIXED_CST:
9566 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9567 TYPE_MODE (TREE_TYPE (exp)));
9569 case COMPLEX_CST:
9570 /* Handle evaluating a complex constant in a CONCAT target. */
9571 if (original_target && GET_CODE (original_target) == CONCAT)
9573 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9574 rtx rtarg, itarg;
9576 rtarg = XEXP (original_target, 0);
9577 itarg = XEXP (original_target, 1);
9579 /* Move the real and imaginary parts separately. */
9580 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9581 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9583 if (op0 != rtarg)
9584 emit_move_insn (rtarg, op0);
9585 if (op1 != itarg)
9586 emit_move_insn (itarg, op1);
9588 return original_target;
9591 /* ... fall through ... */
9593 case STRING_CST:
9594 temp = expand_expr_constant (exp, 1, modifier);
9596 /* temp contains a constant address.
9597 On RISC machines where a constant address isn't valid,
9598 make some insns to get that address into a register. */
9599 if (modifier != EXPAND_CONST_ADDRESS
9600 && modifier != EXPAND_INITIALIZER
9601 && modifier != EXPAND_SUM
9602 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9603 MEM_ADDR_SPACE (temp)))
9604 return replace_equiv_address (temp,
9605 copy_rtx (XEXP (temp, 0)));
9606 return temp;
9608 case SAVE_EXPR:
9610 tree val = treeop0;
9611 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
9612 inner_reference_p);
9614 if (!SAVE_EXPR_RESOLVED_P (exp))
9616 /* We can indeed still hit this case, typically via builtin
9617 expanders calling save_expr immediately before expanding
9618 something. Assume this means that we only have to deal
9619 with non-BLKmode values. */
9620 gcc_assert (GET_MODE (ret) != BLKmode);
9622 val = build_decl (curr_insn_location (),
9623 VAR_DECL, NULL, TREE_TYPE (exp));
9624 DECL_ARTIFICIAL (val) = 1;
9625 DECL_IGNORED_P (val) = 1;
9626 treeop0 = val;
9627 TREE_OPERAND (exp, 0) = treeop0;
9628 SAVE_EXPR_RESOLVED_P (exp) = 1;
9630 if (!CONSTANT_P (ret))
9631 ret = copy_to_reg (ret);
9632 SET_DECL_RTL (val, ret);
9635 return ret;
9639 case CONSTRUCTOR:
9640 /* If we don't need the result, just ensure we evaluate any
9641 subexpressions. */
9642 if (ignore)
9644 unsigned HOST_WIDE_INT idx;
9645 tree value;
9647 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9648 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9650 return const0_rtx;
9653 return expand_constructor (exp, target, modifier, false);
9655 case TARGET_MEM_REF:
9657 addr_space_t as
9658 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9659 enum insn_code icode;
9660 unsigned int align;
9662 op0 = addr_for_mem_ref (exp, as, true);
9663 op0 = memory_address_addr_space (mode, op0, as);
9664 temp = gen_rtx_MEM (mode, op0);
9665 set_mem_attributes (temp, exp, 0);
9666 set_mem_addr_space (temp, as);
9667 align = get_object_alignment (exp);
9668 if (modifier != EXPAND_WRITE
9669 && modifier != EXPAND_MEMORY
9670 && mode != BLKmode
9671 && align < GET_MODE_ALIGNMENT (mode)
9672 /* If the target does not have special handling for unaligned
9673 loads of mode then it can use regular moves for them. */
9674 && ((icode = optab_handler (movmisalign_optab, mode))
9675 != CODE_FOR_nothing))
9677 struct expand_operand ops[2];
9679 /* We've already validated the memory, and we're creating a
9680 new pseudo destination. The predicates really can't fail,
9681 nor can the generator. */
9682 create_output_operand (&ops[0], NULL_RTX, mode);
9683 create_fixed_operand (&ops[1], temp);
9684 expand_insn (icode, 2, ops);
9685 temp = ops[0].value;
9687 return temp;
9690 case MEM_REF:
9692 addr_space_t as
9693 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9694 enum machine_mode address_mode;
9695 tree base = TREE_OPERAND (exp, 0);
9696 gimple def_stmt;
9697 enum insn_code icode;
9698 unsigned align;
9699 /* Handle expansion of non-aliased memory with non-BLKmode. That
9700 might end up in a register. */
9701 if (mem_ref_refers_to_non_mem_p (exp))
9703 HOST_WIDE_INT offset = mem_ref_offset (exp).to_short_addr ();
9704 base = TREE_OPERAND (base, 0);
9705 if (offset == 0
9706 && tree_fits_uhwi_p (TYPE_SIZE (type))
9707 && (GET_MODE_BITSIZE (DECL_MODE (base))
9708 == tree_to_uhwi (TYPE_SIZE (type))))
9709 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
9710 target, tmode, modifier);
9711 if (TYPE_MODE (type) == BLKmode)
9713 temp = assign_stack_temp (DECL_MODE (base),
9714 GET_MODE_SIZE (DECL_MODE (base)));
9715 store_expr (base, temp, 0, false);
9716 temp = adjust_address (temp, BLKmode, offset);
9717 set_mem_size (temp, int_size_in_bytes (type));
9718 return temp;
9720 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
9721 bitsize_int (offset * BITS_PER_UNIT));
9722 return expand_expr (exp, target, tmode, modifier);
9724 address_mode = targetm.addr_space.address_mode (as);
9725 base = TREE_OPERAND (exp, 0);
9726 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9728 tree mask = gimple_assign_rhs2 (def_stmt);
9729 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9730 gimple_assign_rhs1 (def_stmt), mask);
9731 TREE_OPERAND (exp, 0) = base;
9733 align = get_object_alignment (exp);
9734 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9735 op0 = memory_address_addr_space (mode, op0, as);
9736 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9738 rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
9739 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9740 op0 = memory_address_addr_space (mode, op0, as);
9742 temp = gen_rtx_MEM (mode, op0);
9743 set_mem_attributes (temp, exp, 0);
9744 set_mem_addr_space (temp, as);
9745 if (TREE_THIS_VOLATILE (exp))
9746 MEM_VOLATILE_P (temp) = 1;
9747 if (modifier != EXPAND_WRITE
9748 && modifier != EXPAND_MEMORY
9749 && !inner_reference_p
9750 && mode != BLKmode
9751 && align < GET_MODE_ALIGNMENT (mode))
9753 if ((icode = optab_handler (movmisalign_optab, mode))
9754 != CODE_FOR_nothing)
9756 struct expand_operand ops[2];
9758 /* We've already validated the memory, and we're creating a
9759 new pseudo destination. The predicates really can't fail,
9760 nor can the generator. */
9761 create_output_operand (&ops[0], NULL_RTX, mode);
9762 create_fixed_operand (&ops[1], temp);
9763 expand_insn (icode, 2, ops);
9764 temp = ops[0].value;
9766 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9767 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9768 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9769 (modifier == EXPAND_STACK_PARM
9770 ? NULL_RTX : target),
9771 mode, mode);
9773 return temp;
9776 case ARRAY_REF:
9779 tree array = treeop0;
9780 tree index = treeop1;
9781 tree init;
9783 /* Fold an expression like: "foo"[2].
9784 This is not done in fold so it won't happen inside &.
9785 Don't fold if this is for wide characters since it's too
9786 difficult to do correctly and this is a very rare case. */
9788 if (modifier != EXPAND_CONST_ADDRESS
9789 && modifier != EXPAND_INITIALIZER
9790 && modifier != EXPAND_MEMORY)
9792 tree t = fold_read_from_constant_string (exp);
9794 if (t)
9795 return expand_expr (t, target, tmode, modifier);
9798 /* If this is a constant index into a constant array,
9799 just get the value from the array. Handle both the cases when
9800 we have an explicit constructor and when our operand is a variable
9801 that was declared const. */
9803 if (modifier != EXPAND_CONST_ADDRESS
9804 && modifier != EXPAND_INITIALIZER
9805 && modifier != EXPAND_MEMORY
9806 && TREE_CODE (array) == CONSTRUCTOR
9807 && ! TREE_SIDE_EFFECTS (array)
9808 && TREE_CODE (index) == INTEGER_CST)
9810 unsigned HOST_WIDE_INT ix;
9811 tree field, value;
9813 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9814 field, value)
9815 if (tree_int_cst_equal (field, index))
9817 if (!TREE_SIDE_EFFECTS (value))
9818 return expand_expr (fold (value), target, tmode, modifier);
9819 break;
9823 else if (optimize >= 1
9824 && modifier != EXPAND_CONST_ADDRESS
9825 && modifier != EXPAND_INITIALIZER
9826 && modifier != EXPAND_MEMORY
9827 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9828 && TREE_CODE (index) == INTEGER_CST
9829 && (TREE_CODE (array) == VAR_DECL
9830 || TREE_CODE (array) == CONST_DECL)
9831 && (init = ctor_for_folding (array)) != error_mark_node)
9833 if (init == NULL_TREE)
9835 tree value = build_zero_cst (type);
9836 if (TREE_CODE (value) == CONSTRUCTOR)
9838 /* If VALUE is a CONSTRUCTOR, this optimization is only
9839 useful if this doesn't store the CONSTRUCTOR into
9840 memory. If it does, it is more efficient to just
9841 load the data from the array directly. */
9842 rtx ret = expand_constructor (value, target,
9843 modifier, true);
9844 if (ret == NULL_RTX)
9845 value = NULL_TREE;
9848 if (value)
9849 return expand_expr (value, target, tmode, modifier);
9851 else if (TREE_CODE (init) == CONSTRUCTOR)
9853 unsigned HOST_WIDE_INT ix;
9854 tree field, value;
9856 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
9857 field, value)
9858 if (tree_int_cst_equal (field, index))
9860 if (TREE_SIDE_EFFECTS (value))
9861 break;
9863 if (TREE_CODE (value) == CONSTRUCTOR)
9865 /* If VALUE is a CONSTRUCTOR, this
9866 optimization is only useful if
9867 this doesn't store the CONSTRUCTOR
9868 into memory. If it does, it is more
9869 efficient to just load the data from
9870 the array directly. */
9871 rtx ret = expand_constructor (value, target,
9872 modifier, true);
9873 if (ret == NULL_RTX)
9874 break;
9877 return
9878 expand_expr (fold (value), target, tmode, modifier);
9881 else if (TREE_CODE (init) == STRING_CST)
9883 tree low_bound = array_ref_low_bound (exp);
9884 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
9886 /* Optimize the special case of a zero lower bound.
9888 We convert the lower bound to sizetype to avoid problems
9889 with constant folding. E.g. suppose the lower bound is
9890 1 and its mode is QI. Without the conversion
9891 (ARRAY + (INDEX - (unsigned char)1))
9892 becomes
9893 (ARRAY + (-(unsigned char)1) + INDEX)
9894 which becomes
9895 (ARRAY + 255 + INDEX). Oops! */
9896 if (!integer_zerop (low_bound))
9897 index1 = size_diffop_loc (loc, index1,
9898 fold_convert_loc (loc, sizetype,
9899 low_bound));
9901 if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
9903 tree type = TREE_TYPE (TREE_TYPE (init));
9904 enum machine_mode mode = TYPE_MODE (type);
9906 if (GET_MODE_CLASS (mode) == MODE_INT
9907 && GET_MODE_SIZE (mode) == 1)
9908 return gen_int_mode (TREE_STRING_POINTER (init)
9909 [TREE_INT_CST_LOW (index1)],
9910 mode);
9915 goto normal_inner_ref;
9917 case COMPONENT_REF:
9918 /* If the operand is a CONSTRUCTOR, we can just extract the
9919 appropriate field if it is present. */
9920 if (TREE_CODE (treeop0) == CONSTRUCTOR)
9922 unsigned HOST_WIDE_INT idx;
9923 tree field, value;
9925 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
9926 idx, field, value)
9927 if (field == treeop1
9928 /* We can normally use the value of the field in the
9929 CONSTRUCTOR. However, if this is a bitfield in
9930 an integral mode that we can fit in a HOST_WIDE_INT,
9931 we must mask only the number of bits in the bitfield,
9932 since this is done implicitly by the constructor. If
9933 the bitfield does not meet either of those conditions,
9934 we can't do this optimization. */
9935 && (! DECL_BIT_FIELD (field)
9936 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
9937 && (GET_MODE_PRECISION (DECL_MODE (field))
9938 <= HOST_BITS_PER_WIDE_INT))))
9940 if (DECL_BIT_FIELD (field)
9941 && modifier == EXPAND_STACK_PARM)
9942 target = 0;
9943 op0 = expand_expr (value, target, tmode, modifier);
9944 if (DECL_BIT_FIELD (field))
9946 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
9947 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
9949 if (TYPE_UNSIGNED (TREE_TYPE (field)))
9951 op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
9952 imode);
9953 op0 = expand_and (imode, op0, op1, target);
9955 else
9957 int count = GET_MODE_PRECISION (imode) - bitsize;
9959 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
9960 target, 0);
9961 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
9962 target, 0);
9966 return op0;
9969 goto normal_inner_ref;
9971 case BIT_FIELD_REF:
9972 case ARRAY_RANGE_REF:
9973 normal_inner_ref:
9975 enum machine_mode mode1, mode2;
9976 HOST_WIDE_INT bitsize, bitpos;
9977 tree offset;
9978 int volatilep = 0, must_force_mem;
9979 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9980 &mode1, &unsignedp, &volatilep, true);
9981 rtx orig_op0, memloc;
9982 bool mem_attrs_from_type = false;
9984 /* If we got back the original object, something is wrong. Perhaps
9985 we are evaluating an expression too early. In any event, don't
9986 infinitely recurse. */
9987 gcc_assert (tem != exp);
9989 /* If TEM's type is a union of variable size, pass TARGET to the inner
9990 computation, since it will need a temporary and TARGET is known
9991 to have to do. This occurs in unchecked conversion in Ada. */
9992 orig_op0 = op0
9993 = expand_expr_real (tem,
9994 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9995 && COMPLETE_TYPE_P (TREE_TYPE (tem))
9996 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9997 != INTEGER_CST)
9998 && modifier != EXPAND_STACK_PARM
9999 ? target : NULL_RTX),
10000 VOIDmode,
10001 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10002 NULL, true);
10004 /* If the field has a mode, we want to access it in the
10005 field's mode, not the computed mode.
10006 If a MEM has VOIDmode (external with incomplete type),
10007 use BLKmode for it instead. */
10008 if (MEM_P (op0))
10010 if (mode1 != VOIDmode)
10011 op0 = adjust_address (op0, mode1, 0);
10012 else if (GET_MODE (op0) == VOIDmode)
10013 op0 = adjust_address (op0, BLKmode, 0);
10016 mode2
10017 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10019 /* If we have either an offset, a BLKmode result, or a reference
10020 outside the underlying object, we must force it to memory.
10021 Such a case can occur in Ada if we have unchecked conversion
10022 of an expression from a scalar type to an aggregate type or
10023 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10024 passed a partially uninitialized object or a view-conversion
10025 to a larger size. */
10026 must_force_mem = (offset
10027 || mode1 == BLKmode
10028 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
10030 /* Handle CONCAT first. */
10031 if (GET_CODE (op0) == CONCAT && !must_force_mem)
10033 if (bitpos == 0
10034 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
10035 return op0;
10036 if (bitpos == 0
10037 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10038 && bitsize)
10040 op0 = XEXP (op0, 0);
10041 mode2 = GET_MODE (op0);
10043 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10044 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
10045 && bitpos
10046 && bitsize)
10048 op0 = XEXP (op0, 1);
10049 bitpos = 0;
10050 mode2 = GET_MODE (op0);
10052 else
10053 /* Otherwise force into memory. */
10054 must_force_mem = 1;
10057 /* If this is a constant, put it in a register if it is a legitimate
10058 constant and we don't need a memory reference. */
10059 if (CONSTANT_P (op0)
10060 && mode2 != BLKmode
10061 && targetm.legitimate_constant_p (mode2, op0)
10062 && !must_force_mem)
10063 op0 = force_reg (mode2, op0);
10065 /* Otherwise, if this is a constant, try to force it to the constant
10066 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10067 is a legitimate constant. */
10068 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10069 op0 = validize_mem (memloc);
10071 /* Otherwise, if this is a constant or the object is not in memory
10072 and need be, put it there. */
10073 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10075 memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10076 emit_move_insn (memloc, op0);
10077 op0 = memloc;
10078 mem_attrs_from_type = true;
10081 if (offset)
10083 enum machine_mode address_mode;
10084 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10085 EXPAND_SUM);
10087 gcc_assert (MEM_P (op0));
10089 address_mode = get_address_mode (op0);
10090 if (GET_MODE (offset_rtx) != address_mode)
10091 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10093 if (GET_MODE (op0) == BLKmode
10094 /* The check for a constant address in OP0 not having VOIDmode
10095 is probably no longer necessary. */
10096 && GET_MODE (XEXP (op0, 0)) != VOIDmode
10097 && bitsize != 0
10098 && (bitpos % bitsize) == 0
10099 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
10100 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
10102 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10103 bitpos = 0;
10106 op0 = offset_address (op0, offset_rtx,
10107 highest_pow2_factor (offset));
10110 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10111 record its alignment as BIGGEST_ALIGNMENT. */
10112 if (MEM_P (op0) && bitpos == 0 && offset != 0
10113 && is_aligning_offset (offset, tem))
10114 set_mem_align (op0, BIGGEST_ALIGNMENT);
10116 /* Don't forget about volatility even if this is a bitfield. */
10117 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10119 if (op0 == orig_op0)
10120 op0 = copy_rtx (op0);
10122 MEM_VOLATILE_P (op0) = 1;
10125 /* In cases where an aligned union has an unaligned object
10126 as a field, we might be extracting a BLKmode value from
10127 an integer-mode (e.g., SImode) object. Handle this case
10128 by doing the extract into an object as wide as the field
10129 (which we know to be the width of a basic mode), then
10130 storing into memory, and changing the mode to BLKmode. */
10131 if (mode1 == VOIDmode
10132 || REG_P (op0) || GET_CODE (op0) == SUBREG
10133 || (mode1 != BLKmode && ! direct_load[(int) mode1]
10134 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10135 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10136 && modifier != EXPAND_CONST_ADDRESS
10137 && modifier != EXPAND_INITIALIZER
10138 && modifier != EXPAND_MEMORY)
10139 /* If the bitfield is volatile and the bitsize
10140 is narrower than the access size of the bitfield,
10141 we need to extract bitfields from the access. */
10142 || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10143 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10144 && mode1 != BLKmode
10145 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)
10146 /* If the field isn't aligned enough to fetch as a memref,
10147 fetch it as a bit field. */
10148 || (mode1 != BLKmode
10149 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10150 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10151 || (MEM_P (op0)
10152 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10153 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10154 && modifier != EXPAND_MEMORY
10155 && ((modifier == EXPAND_CONST_ADDRESS
10156 || modifier == EXPAND_INITIALIZER)
10157 ? STRICT_ALIGNMENT
10158 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10159 || (bitpos % BITS_PER_UNIT != 0)))
10160 /* If the type and the field are a constant size and the
10161 size of the type isn't the same size as the bitfield,
10162 we must use bitfield operations. */
10163 || (bitsize >= 0
10164 && TYPE_SIZE (TREE_TYPE (exp))
10165 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10166 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10167 bitsize)))
10169 enum machine_mode ext_mode = mode;
10171 if (ext_mode == BLKmode
10172 && ! (target != 0 && MEM_P (op0)
10173 && MEM_P (target)
10174 && bitpos % BITS_PER_UNIT == 0))
10175 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10177 if (ext_mode == BLKmode)
10179 if (target == 0)
10180 target = assign_temp (type, 1, 1);
10182 /* ??? Unlike the similar test a few lines below, this one is
10183 very likely obsolete. */
10184 if (bitsize == 0)
10185 return target;
10187 /* In this case, BITPOS must start at a byte boundary and
10188 TARGET, if specified, must be a MEM. */
10189 gcc_assert (MEM_P (op0)
10190 && (!target || MEM_P (target))
10191 && !(bitpos % BITS_PER_UNIT));
10193 emit_block_move (target,
10194 adjust_address (op0, VOIDmode,
10195 bitpos / BITS_PER_UNIT),
10196 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10197 / BITS_PER_UNIT),
10198 (modifier == EXPAND_STACK_PARM
10199 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10201 return target;
10204 /* If we have nothing to extract, the result will be 0 for targets
10205 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10206 return 0 for the sake of consistency, as reading a zero-sized
10207 bitfield is valid in Ada and the value is fully specified. */
10208 if (bitsize == 0)
10209 return const0_rtx;
10211 op0 = validize_mem (op0);
10213 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10214 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10216 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10217 (modifier == EXPAND_STACK_PARM
10218 ? NULL_RTX : target),
10219 ext_mode, ext_mode);
10221 /* If the result is a record type and BITSIZE is narrower than
10222 the mode of OP0, an integral mode, and this is a big endian
10223 machine, we must put the field into the high-order bits. */
10224 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10225 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10226 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10227 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10228 GET_MODE_BITSIZE (GET_MODE (op0))
10229 - bitsize, op0, 1);
10231 /* If the result type is BLKmode, store the data into a temporary
10232 of the appropriate type, but with the mode corresponding to the
10233 mode for the data we have (op0's mode). */
10234 if (mode == BLKmode)
10236 rtx new_rtx
10237 = assign_stack_temp_for_type (ext_mode,
10238 GET_MODE_BITSIZE (ext_mode),
10239 type);
10240 emit_move_insn (new_rtx, op0);
10241 op0 = copy_rtx (new_rtx);
10242 PUT_MODE (op0, BLKmode);
10245 return op0;
10248 /* If the result is BLKmode, use that to access the object
10249 now as well. */
10250 if (mode == BLKmode)
10251 mode1 = BLKmode;
10253 /* Get a reference to just this component. */
10254 if (modifier == EXPAND_CONST_ADDRESS
10255 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10256 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10257 else
10258 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10260 if (op0 == orig_op0)
10261 op0 = copy_rtx (op0);
10263 /* If op0 is a temporary because of forcing to memory, pass only the
10264 type to set_mem_attributes so that the original expression is never
10265 marked as ADDRESSABLE through MEM_EXPR of the temporary. */
10266 if (mem_attrs_from_type)
10267 set_mem_attributes (op0, type, 0);
10268 else
10269 set_mem_attributes (op0, exp, 0);
10271 if (REG_P (XEXP (op0, 0)))
10272 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10274 MEM_VOLATILE_P (op0) |= volatilep;
10275 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10276 || modifier == EXPAND_CONST_ADDRESS
10277 || modifier == EXPAND_INITIALIZER)
10278 return op0;
10280 if (target == 0)
10281 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10283 convert_move (target, op0, unsignedp);
10284 return target;
10287 case OBJ_TYPE_REF:
10288 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10290 case CALL_EXPR:
10291 /* All valid uses of __builtin_va_arg_pack () are removed during
10292 inlining. */
10293 if (CALL_EXPR_VA_ARG_PACK (exp))
10294 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10296 tree fndecl = get_callee_fndecl (exp), attr;
10298 if (fndecl
10299 && (attr = lookup_attribute ("error",
10300 DECL_ATTRIBUTES (fndecl))) != NULL)
10301 error ("%Kcall to %qs declared with attribute error: %s",
10302 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10303 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10304 if (fndecl
10305 && (attr = lookup_attribute ("warning",
10306 DECL_ATTRIBUTES (fndecl))) != NULL)
10307 warning_at (tree_nonartificial_location (exp),
10308 0, "%Kcall to %qs declared with attribute warning: %s",
10309 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10310 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10312 /* Check for a built-in function. */
10313 if (fndecl && DECL_BUILT_IN (fndecl))
10315 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10316 return expand_builtin (exp, target, subtarget, tmode, ignore);
10319 return expand_call (exp, target, ignore);
10321 case VIEW_CONVERT_EXPR:
10322 op0 = NULL_RTX;
10324 /* If we are converting to BLKmode, try to avoid an intermediate
10325 temporary by fetching an inner memory reference. */
10326 if (mode == BLKmode
10327 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10328 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10329 && handled_component_p (treeop0))
10331 enum machine_mode mode1;
10332 HOST_WIDE_INT bitsize, bitpos;
10333 tree offset;
10334 int unsignedp;
10335 int volatilep = 0;
10336 tree tem
10337 = get_inner_reference (treeop0, &bitsize, &bitpos,
10338 &offset, &mode1, &unsignedp, &volatilep,
10339 true);
10340 rtx orig_op0;
10342 /* ??? We should work harder and deal with non-zero offsets. */
10343 if (!offset
10344 && (bitpos % BITS_PER_UNIT) == 0
10345 && bitsize >= 0
10346 && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10348 /* See the normal_inner_ref case for the rationale. */
10349 orig_op0
10350 = expand_expr_real (tem,
10351 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10352 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10353 != INTEGER_CST)
10354 && modifier != EXPAND_STACK_PARM
10355 ? target : NULL_RTX),
10356 VOIDmode,
10357 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10358 NULL, true);
10360 if (MEM_P (orig_op0))
10362 op0 = orig_op0;
10364 /* Get a reference to just this component. */
10365 if (modifier == EXPAND_CONST_ADDRESS
10366 || modifier == EXPAND_SUM
10367 || modifier == EXPAND_INITIALIZER)
10368 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10369 else
10370 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10372 if (op0 == orig_op0)
10373 op0 = copy_rtx (op0);
10375 set_mem_attributes (op0, treeop0, 0);
10376 if (REG_P (XEXP (op0, 0)))
10377 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10379 MEM_VOLATILE_P (op0) |= volatilep;
10384 if (!op0)
10385 op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
10386 NULL, inner_reference_p);
10388 /* If the input and output modes are both the same, we are done. */
10389 if (mode == GET_MODE (op0))
10391 /* If neither mode is BLKmode, and both modes are the same size
10392 then we can use gen_lowpart. */
10393 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10394 && (GET_MODE_PRECISION (mode)
10395 == GET_MODE_PRECISION (GET_MODE (op0)))
10396 && !COMPLEX_MODE_P (GET_MODE (op0)))
10398 if (GET_CODE (op0) == SUBREG)
10399 op0 = force_reg (GET_MODE (op0), op0);
10400 temp = gen_lowpart_common (mode, op0);
10401 if (temp)
10402 op0 = temp;
10403 else
10405 if (!REG_P (op0) && !MEM_P (op0))
10406 op0 = force_reg (GET_MODE (op0), op0);
10407 op0 = gen_lowpart (mode, op0);
10410 /* If both types are integral, convert from one mode to the other. */
10411 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10412 op0 = convert_modes (mode, GET_MODE (op0), op0,
10413 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10414 /* If the output type is a bit-field type, do an extraction. */
10415 else if (reduce_bit_field)
10416 return extract_bit_field (op0, TYPE_PRECISION (type), 0,
10417 TYPE_UNSIGNED (type), NULL_RTX,
10418 mode, mode);
10419 /* As a last resort, spill op0 to memory, and reload it in a
10420 different mode. */
10421 else if (!MEM_P (op0))
10423 /* If the operand is not a MEM, force it into memory. Since we
10424 are going to be changing the mode of the MEM, don't call
10425 force_const_mem for constants because we don't allow pool
10426 constants to change mode. */
10427 tree inner_type = TREE_TYPE (treeop0);
10429 gcc_assert (!TREE_ADDRESSABLE (exp));
10431 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10432 target
10433 = assign_stack_temp_for_type
10434 (TYPE_MODE (inner_type),
10435 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10437 emit_move_insn (target, op0);
10438 op0 = target;
10441 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10442 output type is such that the operand is known to be aligned, indicate
10443 that it is. Otherwise, we need only be concerned about alignment for
10444 non-BLKmode results. */
10445 if (MEM_P (op0))
10447 enum insn_code icode;
10449 if (TYPE_ALIGN_OK (type))
10451 /* ??? Copying the MEM without substantially changing it might
10452 run afoul of the code handling volatile memory references in
10453 store_expr, which assumes that TARGET is returned unmodified
10454 if it has been used. */
10455 op0 = copy_rtx (op0);
10456 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10458 else if (modifier != EXPAND_WRITE
10459 && modifier != EXPAND_MEMORY
10460 && !inner_reference_p
10461 && mode != BLKmode
10462 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10464 /* If the target does have special handling for unaligned
10465 loads of mode then use them. */
10466 if ((icode = optab_handler (movmisalign_optab, mode))
10467 != CODE_FOR_nothing)
10469 rtx reg, insn;
10471 op0 = adjust_address (op0, mode, 0);
10472 /* We've already validated the memory, and we're creating a
10473 new pseudo destination. The predicates really can't
10474 fail. */
10475 reg = gen_reg_rtx (mode);
10477 /* Nor can the insn generator. */
10478 insn = GEN_FCN (icode) (reg, op0);
10479 emit_insn (insn);
10480 return reg;
10482 else if (STRICT_ALIGNMENT)
10484 tree inner_type = TREE_TYPE (treeop0);
10485 HOST_WIDE_INT temp_size
10486 = MAX (int_size_in_bytes (inner_type),
10487 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10488 rtx new_rtx
10489 = assign_stack_temp_for_type (mode, temp_size, type);
10490 rtx new_with_op0_mode
10491 = adjust_address (new_rtx, GET_MODE (op0), 0);
10493 gcc_assert (!TREE_ADDRESSABLE (exp));
10495 if (GET_MODE (op0) == BLKmode)
10496 emit_block_move (new_with_op0_mode, op0,
10497 GEN_INT (GET_MODE_SIZE (mode)),
10498 (modifier == EXPAND_STACK_PARM
10499 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10500 else
10501 emit_move_insn (new_with_op0_mode, op0);
10503 op0 = new_rtx;
10507 op0 = adjust_address (op0, mode, 0);
10510 return op0;
10512 case MODIFY_EXPR:
10514 tree lhs = treeop0;
10515 tree rhs = treeop1;
10516 gcc_assert (ignore);
10518 /* Check for |= or &= of a bitfield of size one into another bitfield
10519 of size 1. In this case, (unless we need the result of the
10520 assignment) we can do this more efficiently with a
10521 test followed by an assignment, if necessary.
10523 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10524 things change so we do, this code should be enhanced to
10525 support it. */
10526 if (TREE_CODE (lhs) == COMPONENT_REF
10527 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10528 || TREE_CODE (rhs) == BIT_AND_EXPR)
10529 && TREE_OPERAND (rhs, 0) == lhs
10530 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10531 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10532 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10534 rtx label = gen_label_rtx ();
10535 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10536 do_jump (TREE_OPERAND (rhs, 1),
10537 value ? label : 0,
10538 value ? 0 : label, -1);
10539 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10540 false);
10541 do_pending_stack_adjust ();
10542 emit_label (label);
10543 return const0_rtx;
10546 expand_assignment (lhs, rhs, false);
10547 return const0_rtx;
10550 case ADDR_EXPR:
10551 return expand_expr_addr_expr (exp, target, tmode, modifier);
10553 case REALPART_EXPR:
10554 op0 = expand_normal (treeop0);
10555 return read_complex_part (op0, false);
10557 case IMAGPART_EXPR:
10558 op0 = expand_normal (treeop0);
10559 return read_complex_part (op0, true);
10561 case RETURN_EXPR:
10562 case LABEL_EXPR:
10563 case GOTO_EXPR:
10564 case SWITCH_EXPR:
10565 case ASM_EXPR:
10566 /* Expanded in cfgexpand.c. */
10567 gcc_unreachable ();
10569 case TRY_CATCH_EXPR:
10570 case CATCH_EXPR:
10571 case EH_FILTER_EXPR:
10572 case TRY_FINALLY_EXPR:
10573 /* Lowered by tree-eh.c. */
10574 gcc_unreachable ();
10576 case WITH_CLEANUP_EXPR:
10577 case CLEANUP_POINT_EXPR:
10578 case TARGET_EXPR:
10579 case CASE_LABEL_EXPR:
10580 case VA_ARG_EXPR:
10581 case BIND_EXPR:
10582 case INIT_EXPR:
10583 case CONJ_EXPR:
10584 case COMPOUND_EXPR:
10585 case PREINCREMENT_EXPR:
10586 case PREDECREMENT_EXPR:
10587 case POSTINCREMENT_EXPR:
10588 case POSTDECREMENT_EXPR:
10589 case LOOP_EXPR:
10590 case EXIT_EXPR:
10591 case COMPOUND_LITERAL_EXPR:
10592 /* Lowered by gimplify.c. */
10593 gcc_unreachable ();
10595 case FDESC_EXPR:
10596 /* Function descriptors are not valid except for as
10597 initialization constants, and should not be expanded. */
10598 gcc_unreachable ();
10600 case WITH_SIZE_EXPR:
10601 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10602 have pulled out the size to use in whatever context it needed. */
10603 return expand_expr_real (treeop0, original_target, tmode,
10604 modifier, alt_rtl, inner_reference_p);
10606 default:
10607 return expand_expr_real_2 (&ops, target, tmode, modifier);
10611 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10612 signedness of TYPE), possibly returning the result in TARGET. */
10613 static rtx
10614 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10616 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10617 if (target && GET_MODE (target) != GET_MODE (exp))
10618 target = 0;
10619 /* For constant values, reduce using build_int_cst_type. */
10620 if (CONST_INT_P (exp))
10622 HOST_WIDE_INT value = INTVAL (exp);
10623 tree t = build_int_cst_type (type, value);
10624 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10626 else if (TYPE_UNSIGNED (type))
10628 enum machine_mode mode = GET_MODE (exp);
10629 rtx mask = immed_wide_int_const
10630 (wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
10631 return expand_and (mode, exp, mask, target);
10633 else
10635 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10636 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10637 exp, count, target, 0);
10638 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10639 exp, count, target, 0);
10643 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10644 when applied to the address of EXP produces an address known to be
10645 aligned more than BIGGEST_ALIGNMENT. */
10647 static int
10648 is_aligning_offset (const_tree offset, const_tree exp)
10650 /* Strip off any conversions. */
10651 while (CONVERT_EXPR_P (offset))
10652 offset = TREE_OPERAND (offset, 0);
10654 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10655 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10656 if (TREE_CODE (offset) != BIT_AND_EXPR
10657 || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
10658 || compare_tree_int (TREE_OPERAND (offset, 1),
10659 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10660 || !exact_log2 (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1) < 0)
10661 return 0;
10663 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10664 It must be NEGATE_EXPR. Then strip any more conversions. */
10665 offset = TREE_OPERAND (offset, 0);
10666 while (CONVERT_EXPR_P (offset))
10667 offset = TREE_OPERAND (offset, 0);
10669 if (TREE_CODE (offset) != NEGATE_EXPR)
10670 return 0;
10672 offset = TREE_OPERAND (offset, 0);
10673 while (CONVERT_EXPR_P (offset))
10674 offset = TREE_OPERAND (offset, 0);
10676 /* This must now be the address of EXP. */
10677 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10680 /* Return the tree node if an ARG corresponds to a string constant or zero
10681 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10682 in bytes within the string that ARG is accessing. The type of the
10683 offset will be `sizetype'. */
10685 tree
10686 string_constant (tree arg, tree *ptr_offset)
10688 tree array, offset, lower_bound;
10689 STRIP_NOPS (arg);
10691 if (TREE_CODE (arg) == ADDR_EXPR)
10693 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10695 *ptr_offset = size_zero_node;
10696 return TREE_OPERAND (arg, 0);
10698 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10700 array = TREE_OPERAND (arg, 0);
10701 offset = size_zero_node;
10703 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10705 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10706 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10707 if (TREE_CODE (array) != STRING_CST
10708 && TREE_CODE (array) != VAR_DECL)
10709 return 0;
10711 /* Check if the array has a nonzero lower bound. */
10712 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10713 if (!integer_zerop (lower_bound))
10715 /* If the offset and base aren't both constants, return 0. */
10716 if (TREE_CODE (lower_bound) != INTEGER_CST)
10717 return 0;
10718 if (TREE_CODE (offset) != INTEGER_CST)
10719 return 0;
10720 /* Adjust offset by the lower bound. */
10721 offset = size_diffop (fold_convert (sizetype, offset),
10722 fold_convert (sizetype, lower_bound));
10725 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10727 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10728 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10729 if (TREE_CODE (array) != ADDR_EXPR)
10730 return 0;
10731 array = TREE_OPERAND (array, 0);
10732 if (TREE_CODE (array) != STRING_CST
10733 && TREE_CODE (array) != VAR_DECL)
10734 return 0;
10736 else
10737 return 0;
10739 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10741 tree arg0 = TREE_OPERAND (arg, 0);
10742 tree arg1 = TREE_OPERAND (arg, 1);
10744 STRIP_NOPS (arg0);
10745 STRIP_NOPS (arg1);
10747 if (TREE_CODE (arg0) == ADDR_EXPR
10748 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10749 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10751 array = TREE_OPERAND (arg0, 0);
10752 offset = arg1;
10754 else if (TREE_CODE (arg1) == ADDR_EXPR
10755 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10756 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10758 array = TREE_OPERAND (arg1, 0);
10759 offset = arg0;
10761 else
10762 return 0;
10764 else
10765 return 0;
10767 if (TREE_CODE (array) == STRING_CST)
10769 *ptr_offset = fold_convert (sizetype, offset);
10770 return array;
10772 else if (TREE_CODE (array) == VAR_DECL
10773 || TREE_CODE (array) == CONST_DECL)
10775 int length;
10776 tree init = ctor_for_folding (array);
10778 /* Variables initialized to string literals can be handled too. */
10779 if (init == error_mark_node
10780 || !init
10781 || TREE_CODE (init) != STRING_CST)
10782 return 0;
10784 /* Avoid const char foo[4] = "abcde"; */
10785 if (DECL_SIZE_UNIT (array) == NULL_TREE
10786 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10787 || (length = TREE_STRING_LENGTH (init)) <= 0
10788 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10789 return 0;
10791 /* If variable is bigger than the string literal, OFFSET must be constant
10792 and inside of the bounds of the string literal. */
10793 offset = fold_convert (sizetype, offset);
10794 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10795 && (! tree_fits_uhwi_p (offset)
10796 || compare_tree_int (offset, length) >= 0))
10797 return 0;
10799 *ptr_offset = offset;
10800 return init;
10803 return 0;
10806 /* Generate code to calculate OPS, and exploded expression
10807 using a store-flag instruction and return an rtx for the result.
10808 OPS reflects a comparison.
10810 If TARGET is nonzero, store the result there if convenient.
10812 Return zero if there is no suitable set-flag instruction
10813 available on this machine.
10815 Once expand_expr has been called on the arguments of the comparison,
10816 we are committed to doing the store flag, since it is not safe to
10817 re-evaluate the expression. We emit the store-flag insn by calling
10818 emit_store_flag, but only expand the arguments if we have a reason
10819 to believe that emit_store_flag will be successful. If we think that
10820 it will, but it isn't, we have to simulate the store-flag with a
10821 set/jump/set sequence. */
10823 static rtx
10824 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
10826 enum rtx_code code;
10827 tree arg0, arg1, type;
10828 tree tem;
10829 enum machine_mode operand_mode;
10830 int unsignedp;
10831 rtx op0, op1;
10832 rtx subtarget = target;
10833 location_t loc = ops->location;
10835 arg0 = ops->op0;
10836 arg1 = ops->op1;
10838 /* Don't crash if the comparison was erroneous. */
10839 if (arg0 == error_mark_node || arg1 == error_mark_node)
10840 return const0_rtx;
10842 type = TREE_TYPE (arg0);
10843 operand_mode = TYPE_MODE (type);
10844 unsignedp = TYPE_UNSIGNED (type);
10846 /* We won't bother with BLKmode store-flag operations because it would mean
10847 passing a lot of information to emit_store_flag. */
10848 if (operand_mode == BLKmode)
10849 return 0;
10851 /* We won't bother with store-flag operations involving function pointers
10852 when function pointers must be canonicalized before comparisons. */
10853 #ifdef HAVE_canonicalize_funcptr_for_compare
10854 if (HAVE_canonicalize_funcptr_for_compare
10855 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
10856 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
10857 == FUNCTION_TYPE))
10858 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
10859 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
10860 == FUNCTION_TYPE))))
10861 return 0;
10862 #endif
10864 STRIP_NOPS (arg0);
10865 STRIP_NOPS (arg1);
10867 /* For vector typed comparisons emit code to generate the desired
10868 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10869 expander for this. */
10870 if (TREE_CODE (ops->type) == VECTOR_TYPE)
10872 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10873 tree if_true = constant_boolean_node (true, ops->type);
10874 tree if_false = constant_boolean_node (false, ops->type);
10875 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10878 /* Get the rtx comparison code to use. We know that EXP is a comparison
10879 operation of some type. Some comparisons against 1 and -1 can be
10880 converted to comparisons with zero. Do so here so that the tests
10881 below will be aware that we have a comparison with zero. These
10882 tests will not catch constants in the first operand, but constants
10883 are rarely passed as the first operand. */
10885 switch (ops->code)
10887 case EQ_EXPR:
10888 code = EQ;
10889 break;
10890 case NE_EXPR:
10891 code = NE;
10892 break;
10893 case LT_EXPR:
10894 if (integer_onep (arg1))
10895 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10896 else
10897 code = unsignedp ? LTU : LT;
10898 break;
10899 case LE_EXPR:
10900 if (! unsignedp && integer_all_onesp (arg1))
10901 arg1 = integer_zero_node, code = LT;
10902 else
10903 code = unsignedp ? LEU : LE;
10904 break;
10905 case GT_EXPR:
10906 if (! unsignedp && integer_all_onesp (arg1))
10907 arg1 = integer_zero_node, code = GE;
10908 else
10909 code = unsignedp ? GTU : GT;
10910 break;
10911 case GE_EXPR:
10912 if (integer_onep (arg1))
10913 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10914 else
10915 code = unsignedp ? GEU : GE;
10916 break;
10918 case UNORDERED_EXPR:
10919 code = UNORDERED;
10920 break;
10921 case ORDERED_EXPR:
10922 code = ORDERED;
10923 break;
10924 case UNLT_EXPR:
10925 code = UNLT;
10926 break;
10927 case UNLE_EXPR:
10928 code = UNLE;
10929 break;
10930 case UNGT_EXPR:
10931 code = UNGT;
10932 break;
10933 case UNGE_EXPR:
10934 code = UNGE;
10935 break;
10936 case UNEQ_EXPR:
10937 code = UNEQ;
10938 break;
10939 case LTGT_EXPR:
10940 code = LTGT;
10941 break;
10943 default:
10944 gcc_unreachable ();
10947 /* Put a constant second. */
10948 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
10949 || TREE_CODE (arg0) == FIXED_CST)
10951 tem = arg0; arg0 = arg1; arg1 = tem;
10952 code = swap_condition (code);
10955 /* If this is an equality or inequality test of a single bit, we can
10956 do this by shifting the bit being tested to the low-order bit and
10957 masking the result with the constant 1. If the condition was EQ,
10958 we xor it with 1. This does not require an scc insn and is faster
10959 than an scc insn even if we have it.
10961 The code to make this transformation was moved into fold_single_bit_test,
10962 so we just call into the folder and expand its result. */
10964 if ((code == NE || code == EQ)
10965 && integer_zerop (arg1)
10966 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
10968 gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
10969 if (srcstmt
10970 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
10972 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
10973 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
10974 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
10975 gimple_assign_rhs1 (srcstmt),
10976 gimple_assign_rhs2 (srcstmt));
10977 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
10978 if (temp)
10979 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
10983 if (! get_subtarget (target)
10984 || GET_MODE (subtarget) != operand_mode)
10985 subtarget = 0;
10987 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
10989 if (target == 0)
10990 target = gen_reg_rtx (mode);
10992 /* Try a cstore if possible. */
10993 return emit_store_flag_force (target, code, op0, op1,
10994 operand_mode, unsignedp,
10995 (TYPE_PRECISION (ops->type) == 1
10996 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
11000 /* Stubs in case we haven't got a casesi insn. */
11001 #ifndef HAVE_casesi
11002 # define HAVE_casesi 0
11003 # define gen_casesi(a, b, c, d, e) (0)
11004 # define CODE_FOR_casesi CODE_FOR_nothing
11005 #endif
11007 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11008 0 otherwise (i.e. if there is no casesi instruction).
11010 DEFAULT_PROBABILITY is the probability of jumping to the default
11011 label. */
11013 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
11014 rtx table_label, rtx default_label, rtx fallback_label,
11015 int default_probability)
11017 struct expand_operand ops[5];
11018 enum machine_mode index_mode = SImode;
11019 rtx op1, op2, index;
11021 if (! HAVE_casesi)
11022 return 0;
11024 /* Convert the index to SImode. */
11025 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11027 enum machine_mode omode = TYPE_MODE (index_type);
11028 rtx rangertx = expand_normal (range);
11030 /* We must handle the endpoints in the original mode. */
11031 index_expr = build2 (MINUS_EXPR, index_type,
11032 index_expr, minval);
11033 minval = integer_zero_node;
11034 index = expand_normal (index_expr);
11035 if (default_label)
11036 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11037 omode, 1, default_label,
11038 default_probability);
11039 /* Now we can safely truncate. */
11040 index = convert_to_mode (index_mode, index, 0);
11042 else
11044 if (TYPE_MODE (index_type) != index_mode)
11046 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
11047 index_expr = fold_convert (index_type, index_expr);
11050 index = expand_normal (index_expr);
11053 do_pending_stack_adjust ();
11055 op1 = expand_normal (minval);
11056 op2 = expand_normal (range);
11058 create_input_operand (&ops[0], index, index_mode);
11059 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11060 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11061 create_fixed_operand (&ops[3], table_label);
11062 create_fixed_operand (&ops[4], (default_label
11063 ? default_label
11064 : fallback_label));
11065 expand_jump_insn (CODE_FOR_casesi, 5, ops);
11066 return 1;
11069 /* Attempt to generate a tablejump instruction; same concept. */
11070 #ifndef HAVE_tablejump
11071 #define HAVE_tablejump 0
11072 #define gen_tablejump(x, y) (0)
11073 #endif
11075 /* Subroutine of the next function.
11077 INDEX is the value being switched on, with the lowest value
11078 in the table already subtracted.
11079 MODE is its expected mode (needed if INDEX is constant).
11080 RANGE is the length of the jump table.
11081 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11083 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11084 index value is out of range.
11085 DEFAULT_PROBABILITY is the probability of jumping to
11086 the default label. */
11088 static void
11089 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
11090 rtx default_label, int default_probability)
11092 rtx temp, vector;
11094 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11095 cfun->cfg->max_jumptable_ents = INTVAL (range);
11097 /* Do an unsigned comparison (in the proper mode) between the index
11098 expression and the value which represents the length of the range.
11099 Since we just finished subtracting the lower bound of the range
11100 from the index expression, this comparison allows us to simultaneously
11101 check that the original index expression value is both greater than
11102 or equal to the minimum value of the range and less than or equal to
11103 the maximum value of the range. */
11105 if (default_label)
11106 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11107 default_label, default_probability);
11110 /* If index is in range, it must fit in Pmode.
11111 Convert to Pmode so we can index with it. */
11112 if (mode != Pmode)
11113 index = convert_to_mode (Pmode, index, 1);
11115 /* Don't let a MEM slip through, because then INDEX that comes
11116 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11117 and break_out_memory_refs will go to work on it and mess it up. */
11118 #ifdef PIC_CASE_VECTOR_ADDRESS
11119 if (flag_pic && !REG_P (index))
11120 index = copy_to_mode_reg (Pmode, index);
11121 #endif
11123 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11124 GET_MODE_SIZE, because this indicates how large insns are. The other
11125 uses should all be Pmode, because they are addresses. This code
11126 could fail if addresses and insns are not the same size. */
11127 index = simplify_gen_binary (MULT, Pmode, index,
11128 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
11129 Pmode));
11130 index = simplify_gen_binary (PLUS, Pmode, index,
11131 gen_rtx_LABEL_REF (Pmode, table_label));
11133 #ifdef PIC_CASE_VECTOR_ADDRESS
11134 if (flag_pic)
11135 index = PIC_CASE_VECTOR_ADDRESS (index);
11136 else
11137 #endif
11138 index = memory_address (CASE_VECTOR_MODE, index);
11139 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11140 vector = gen_const_mem (CASE_VECTOR_MODE, index);
11141 convert_move (temp, vector, 0);
11143 emit_jump_insn (gen_tablejump (temp, table_label));
11145 /* If we are generating PIC code or if the table is PC-relative, the
11146 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11147 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11148 emit_barrier ();
11152 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11153 rtx table_label, rtx default_label, int default_probability)
11155 rtx index;
11157 if (! HAVE_tablejump)
11158 return 0;
11160 index_expr = fold_build2 (MINUS_EXPR, index_type,
11161 fold_convert (index_type, index_expr),
11162 fold_convert (index_type, minval));
11163 index = expand_normal (index_expr);
11164 do_pending_stack_adjust ();
11166 do_tablejump (index, TYPE_MODE (index_type),
11167 convert_modes (TYPE_MODE (index_type),
11168 TYPE_MODE (TREE_TYPE (range)),
11169 expand_normal (range),
11170 TYPE_UNSIGNED (TREE_TYPE (range))),
11171 table_label, default_label, default_probability);
11172 return 1;
11175 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11176 static rtx
11177 const_vector_from_tree (tree exp)
11179 rtvec v;
11180 unsigned i;
11181 int units;
11182 tree elt;
11183 enum machine_mode inner, mode;
11185 mode = TYPE_MODE (TREE_TYPE (exp));
11187 if (initializer_zerop (exp))
11188 return CONST0_RTX (mode);
11190 units = GET_MODE_NUNITS (mode);
11191 inner = GET_MODE_INNER (mode);
11193 v = rtvec_alloc (units);
11195 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11197 elt = VECTOR_CST_ELT (exp, i);
11199 if (TREE_CODE (elt) == REAL_CST)
11200 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11201 inner);
11202 else if (TREE_CODE (elt) == FIXED_CST)
11203 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11204 inner);
11205 else
11206 RTVEC_ELT (v, i) = immed_wide_int_const (elt, inner);
11209 return gen_rtx_CONST_VECTOR (mode, v);
11212 /* Build a decl for a personality function given a language prefix. */
11214 tree
11215 build_personality_function (const char *lang)
11217 const char *unwind_and_version;
11218 tree decl, type;
11219 char *name;
11221 switch (targetm_common.except_unwind_info (&global_options))
11223 case UI_NONE:
11224 return NULL;
11225 case UI_SJLJ:
11226 unwind_and_version = "_sj0";
11227 break;
11228 case UI_DWARF2:
11229 case UI_TARGET:
11230 unwind_and_version = "_v0";
11231 break;
11232 case UI_SEH:
11233 unwind_and_version = "_seh0";
11234 break;
11235 default:
11236 gcc_unreachable ();
11239 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11241 type = build_function_type_list (integer_type_node, integer_type_node,
11242 long_long_unsigned_type_node,
11243 ptr_type_node, ptr_type_node, NULL_TREE);
11244 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11245 get_identifier (name), type);
11246 DECL_ARTIFICIAL (decl) = 1;
11247 DECL_EXTERNAL (decl) = 1;
11248 TREE_PUBLIC (decl) = 1;
11250 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11251 are the flags assigned by targetm.encode_section_info. */
11252 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11254 return decl;
11257 /* Extracts the personality function of DECL and returns the corresponding
11258 libfunc. */
11261 get_personality_function (tree decl)
11263 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11264 enum eh_personality_kind pk;
11266 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11267 if (pk == eh_personality_none)
11268 return NULL;
11270 if (!personality
11271 && pk == eh_personality_any)
11272 personality = lang_hooks.eh_personality ();
11274 if (pk == eh_personality_lang)
11275 gcc_assert (personality != NULL_TREE);
11277 return XEXP (DECL_RTL (personality), 0);
11280 #include "gt-expr.h"