* gcc.dg/tree-ssa/alias-30.c (dg-options): Dump only fre1 details.
[official-gcc.git] / gcc / expr.c
blobfec6194af1ff3ae4db29a0f1407eae1d9a8d159b
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
29 #include "attribs.h"
30 #include "varasm.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
39 #include "expr.h"
40 #include "optabs.h"
41 #include "libfuncs.h"
42 #include "recog.h"
43 #include "reload.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "langhooks.h"
47 #include "intl.h"
48 #include "tm_p.h"
49 #include "tree-iterator.h"
50 #include "basic-block.h"
51 #include "tree-ssa-alias.h"
52 #include "internal-fn.h"
53 #include "gimple-expr.h"
54 #include "is-a.h"
55 #include "gimple.h"
56 #include "gimple-ssa.h"
57 #include "cgraph.h"
58 #include "tree-ssanames.h"
59 #include "target.h"
60 #include "common/common-target.h"
61 #include "timevar.h"
62 #include "df.h"
63 #include "diagnostic.h"
64 #include "tree-ssa-live.h"
65 #include "tree-outof-ssa.h"
66 #include "target-globals.h"
67 #include "params.h"
68 #include "tree-ssa-address.h"
69 #include "cfgexpand.h"
71 /* Decide whether a function's arguments should be processed
72 from first to last or from last to first.
74 They should if the stack and args grow in opposite directions, but
75 only if we have push insns. */
77 #ifdef PUSH_ROUNDING
79 #ifndef PUSH_ARGS_REVERSED
80 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
81 #define PUSH_ARGS_REVERSED /* If it's last to first. */
82 #endif
83 #endif
85 #endif
87 #ifndef STACK_PUSH_CODE
88 #ifdef STACK_GROWS_DOWNWARD
89 #define STACK_PUSH_CODE PRE_DEC
90 #else
91 #define STACK_PUSH_CODE PRE_INC
92 #endif
93 #endif
96 /* If this is nonzero, we do not bother generating VOLATILE
97 around volatile memory references, and we are willing to
98 output indirect addresses. If cse is to follow, we reject
99 indirect addresses so a useful potential cse is generated;
100 if it is used only once, instruction combination will produce
101 the same indirect address eventually. */
102 int cse_not_expected;
104 /* This structure is used by move_by_pieces to describe the move to
105 be performed. */
106 struct move_by_pieces_d
108 rtx to;
109 rtx to_addr;
110 int autinc_to;
111 int explicit_inc_to;
112 rtx from;
113 rtx from_addr;
114 int autinc_from;
115 int explicit_inc_from;
116 unsigned HOST_WIDE_INT len;
117 HOST_WIDE_INT offset;
118 int reverse;
121 /* This structure is used by store_by_pieces to describe the clear to
122 be performed. */
124 struct store_by_pieces_d
126 rtx to;
127 rtx to_addr;
128 int autinc_to;
129 int explicit_inc_to;
130 unsigned HOST_WIDE_INT len;
131 HOST_WIDE_INT offset;
132 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
133 void *constfundata;
134 int reverse;
137 static void move_by_pieces_1 (insn_gen_fn, machine_mode,
138 struct move_by_pieces_d *);
139 static bool block_move_libcall_safe_for_call_parm (void);
140 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
141 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
142 unsigned HOST_WIDE_INT);
143 static tree emit_block_move_libcall_fn (int);
144 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
145 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
146 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
147 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
148 static void store_by_pieces_2 (insn_gen_fn, machine_mode,
149 struct store_by_pieces_d *);
150 static tree clear_storage_libcall_fn (int);
151 static rtx compress_float_constant (rtx, rtx);
152 static rtx get_subtarget (rtx);
153 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
154 HOST_WIDE_INT, enum machine_mode,
155 tree, int, alias_set_type);
156 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
157 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
158 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
159 enum machine_mode, tree, alias_set_type, bool);
161 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
163 static int is_aligning_offset (const_tree, const_tree);
164 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
165 enum expand_modifier);
166 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
167 static rtx do_store_flag (sepops, rtx, enum machine_mode);
168 #ifdef PUSH_ROUNDING
169 static void emit_single_push_insn (enum machine_mode, rtx, tree);
170 #endif
171 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx, int);
172 static rtx const_vector_from_tree (tree);
173 static void write_complex_part (rtx, rtx, bool);
175 /* This macro is used to determine whether move_by_pieces should be called
176 to perform a structure copy. */
177 #ifndef MOVE_BY_PIECES_P
178 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
179 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
180 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
181 #endif
183 /* This macro is used to determine whether clear_by_pieces should be
184 called to clear storage. */
185 #ifndef CLEAR_BY_PIECES_P
186 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
187 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
188 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
189 #endif
191 /* This macro is used to determine whether store_by_pieces should be
192 called to "memset" storage with byte values other than zero. */
193 #ifndef SET_BY_PIECES_P
194 #define SET_BY_PIECES_P(SIZE, ALIGN) \
195 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
197 #endif
199 /* This macro is used to determine whether store_by_pieces should be
200 called to "memcpy" storage when the source is a constant string. */
201 #ifndef STORE_BY_PIECES_P
202 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
203 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
204 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
205 #endif
207 /* This is run to set up which modes can be used
208 directly in memory and to initialize the block move optab. It is run
209 at the beginning of compilation and when the target is reinitialized. */
211 void
212 init_expr_target (void)
214 rtx insn, pat;
215 enum machine_mode mode;
216 int num_clobbers;
217 rtx mem, mem1;
218 rtx reg;
220 /* Try indexing by frame ptr and try by stack ptr.
221 It is known that on the Convex the stack ptr isn't a valid index.
222 With luck, one or the other is valid on any machine. */
223 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
224 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
226 /* A scratch register we can modify in-place below to avoid
227 useless RTL allocations. */
228 reg = gen_rtx_REG (VOIDmode, -1);
230 insn = rtx_alloc (INSN);
231 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
232 PATTERN (insn) = pat;
234 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
235 mode = (enum machine_mode) ((int) mode + 1))
237 int regno;
239 direct_load[(int) mode] = direct_store[(int) mode] = 0;
240 PUT_MODE (mem, mode);
241 PUT_MODE (mem1, mode);
242 PUT_MODE (reg, mode);
244 /* See if there is some register that can be used in this mode and
245 directly loaded or stored from memory. */
247 if (mode != VOIDmode && mode != BLKmode)
248 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
249 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
250 regno++)
252 if (! HARD_REGNO_MODE_OK (regno, mode))
253 continue;
255 SET_REGNO (reg, regno);
257 SET_SRC (pat) = mem;
258 SET_DEST (pat) = reg;
259 if (recog (pat, insn, &num_clobbers) >= 0)
260 direct_load[(int) mode] = 1;
262 SET_SRC (pat) = mem1;
263 SET_DEST (pat) = reg;
264 if (recog (pat, insn, &num_clobbers) >= 0)
265 direct_load[(int) mode] = 1;
267 SET_SRC (pat) = reg;
268 SET_DEST (pat) = mem;
269 if (recog (pat, insn, &num_clobbers) >= 0)
270 direct_store[(int) mode] = 1;
272 SET_SRC (pat) = reg;
273 SET_DEST (pat) = mem1;
274 if (recog (pat, insn, &num_clobbers) >= 0)
275 direct_store[(int) mode] = 1;
279 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
281 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
282 mode = GET_MODE_WIDER_MODE (mode))
284 enum machine_mode srcmode;
285 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
286 srcmode = GET_MODE_WIDER_MODE (srcmode))
288 enum insn_code ic;
290 ic = can_extend_p (mode, srcmode, 0);
291 if (ic == CODE_FOR_nothing)
292 continue;
294 PUT_MODE (mem, srcmode);
296 if (insn_operand_matches (ic, 1, mem))
297 float_extend_from_mem[mode][srcmode] = true;
302 /* This is run at the start of compiling a function. */
304 void
305 init_expr (void)
307 memset (&crtl->expr, 0, sizeof (crtl->expr));
310 /* Copy data from FROM to TO, where the machine modes are not the same.
311 Both modes may be integer, or both may be floating, or both may be
312 fixed-point.
313 UNSIGNEDP should be nonzero if FROM is an unsigned type.
314 This causes zero-extension instead of sign-extension. */
316 void
317 convert_move (rtx to, rtx from, int unsignedp)
319 enum machine_mode to_mode = GET_MODE (to);
320 enum machine_mode from_mode = GET_MODE (from);
321 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
322 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
323 enum insn_code code;
324 rtx libcall;
326 /* rtx code for making an equivalent value. */
327 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
328 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
331 gcc_assert (to_real == from_real);
332 gcc_assert (to_mode != BLKmode);
333 gcc_assert (from_mode != BLKmode);
335 /* If the source and destination are already the same, then there's
336 nothing to do. */
337 if (to == from)
338 return;
340 /* If FROM is a SUBREG that indicates that we have already done at least
341 the required extension, strip it. We don't handle such SUBREGs as
342 TO here. */
344 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
345 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
346 >= GET_MODE_PRECISION (to_mode))
347 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
348 from = gen_lowpart (to_mode, from), from_mode = to_mode;
350 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
352 if (to_mode == from_mode
353 || (from_mode == VOIDmode && CONSTANT_P (from)))
355 emit_move_insn (to, from);
356 return;
359 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
361 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
363 if (VECTOR_MODE_P (to_mode))
364 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
365 else
366 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
368 emit_move_insn (to, from);
369 return;
372 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
374 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
375 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
376 return;
379 if (to_real)
381 rtx value, insns;
382 convert_optab tab;
384 gcc_assert ((GET_MODE_PRECISION (from_mode)
385 != GET_MODE_PRECISION (to_mode))
386 || (DECIMAL_FLOAT_MODE_P (from_mode)
387 != DECIMAL_FLOAT_MODE_P (to_mode)));
389 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
390 /* Conversion between decimal float and binary float, same size. */
391 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
392 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
393 tab = sext_optab;
394 else
395 tab = trunc_optab;
397 /* Try converting directly if the insn is supported. */
399 code = convert_optab_handler (tab, to_mode, from_mode);
400 if (code != CODE_FOR_nothing)
402 emit_unop_insn (code, to, from,
403 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
404 return;
407 /* Otherwise use a libcall. */
408 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
410 /* Is this conversion implemented yet? */
411 gcc_assert (libcall);
413 start_sequence ();
414 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
415 1, from, from_mode);
416 insns = get_insns ();
417 end_sequence ();
418 emit_libcall_block (insns, to, value,
419 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
420 from)
421 : gen_rtx_FLOAT_EXTEND (to_mode, from));
422 return;
425 /* Handle pointer conversion. */ /* SPEE 900220. */
426 /* Targets are expected to provide conversion insns between PxImode and
427 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
428 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
430 enum machine_mode full_mode
431 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
433 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
434 != CODE_FOR_nothing);
436 if (full_mode != from_mode)
437 from = convert_to_mode (full_mode, from, unsignedp);
438 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
439 to, from, UNKNOWN);
440 return;
442 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
444 rtx new_from;
445 enum machine_mode full_mode
446 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
447 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
448 enum insn_code icode;
450 icode = convert_optab_handler (ctab, full_mode, from_mode);
451 gcc_assert (icode != CODE_FOR_nothing);
453 if (to_mode == full_mode)
455 emit_unop_insn (icode, to, from, UNKNOWN);
456 return;
459 new_from = gen_reg_rtx (full_mode);
460 emit_unop_insn (icode, new_from, from, UNKNOWN);
462 /* else proceed to integer conversions below. */
463 from_mode = full_mode;
464 from = new_from;
467 /* Make sure both are fixed-point modes or both are not. */
468 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
469 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
470 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
472 /* If we widen from_mode to to_mode and they are in the same class,
473 we won't saturate the result.
474 Otherwise, always saturate the result to play safe. */
475 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
476 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
477 expand_fixed_convert (to, from, 0, 0);
478 else
479 expand_fixed_convert (to, from, 0, 1);
480 return;
483 /* Now both modes are integers. */
485 /* Handle expanding beyond a word. */
486 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
487 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
489 rtx insns;
490 rtx lowpart;
491 rtx fill_value;
492 rtx lowfrom;
493 int i;
494 enum machine_mode lowpart_mode;
495 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
497 /* Try converting directly if the insn is supported. */
498 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
499 != CODE_FOR_nothing)
501 /* If FROM is a SUBREG, put it into a register. Do this
502 so that we always generate the same set of insns for
503 better cse'ing; if an intermediate assignment occurred,
504 we won't be doing the operation directly on the SUBREG. */
505 if (optimize > 0 && GET_CODE (from) == SUBREG)
506 from = force_reg (from_mode, from);
507 emit_unop_insn (code, to, from, equiv_code);
508 return;
510 /* Next, try converting via full word. */
511 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
512 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
513 != CODE_FOR_nothing))
515 rtx word_to = gen_reg_rtx (word_mode);
516 if (REG_P (to))
518 if (reg_overlap_mentioned_p (to, from))
519 from = force_reg (from_mode, from);
520 emit_clobber (to);
522 convert_move (word_to, from, unsignedp);
523 emit_unop_insn (code, to, word_to, equiv_code);
524 return;
527 /* No special multiword conversion insn; do it by hand. */
528 start_sequence ();
530 /* Since we will turn this into a no conflict block, we must ensure the
531 the source does not overlap the target so force it into an isolated
532 register when maybe so. Likewise for any MEM input, since the
533 conversion sequence might require several references to it and we
534 must ensure we're getting the same value every time. */
536 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
537 from = force_reg (from_mode, from);
539 /* Get a copy of FROM widened to a word, if necessary. */
540 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
541 lowpart_mode = word_mode;
542 else
543 lowpart_mode = from_mode;
545 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
547 lowpart = gen_lowpart (lowpart_mode, to);
548 emit_move_insn (lowpart, lowfrom);
550 /* Compute the value to put in each remaining word. */
551 if (unsignedp)
552 fill_value = const0_rtx;
553 else
554 fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
555 LT, lowfrom, const0_rtx,
556 lowpart_mode, 0, -1);
558 /* Fill the remaining words. */
559 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
561 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
562 rtx subword = operand_subword (to, index, 1, to_mode);
564 gcc_assert (subword);
566 if (fill_value != subword)
567 emit_move_insn (subword, fill_value);
570 insns = get_insns ();
571 end_sequence ();
573 emit_insn (insns);
574 return;
577 /* Truncating multi-word to a word or less. */
578 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
579 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
581 if (!((MEM_P (from)
582 && ! MEM_VOLATILE_P (from)
583 && direct_load[(int) to_mode]
584 && ! mode_dependent_address_p (XEXP (from, 0),
585 MEM_ADDR_SPACE (from)))
586 || REG_P (from)
587 || GET_CODE (from) == SUBREG))
588 from = force_reg (from_mode, from);
589 convert_move (to, gen_lowpart (word_mode, from), 0);
590 return;
593 /* Now follow all the conversions between integers
594 no more than a word long. */
596 /* For truncation, usually we can just refer to FROM in a narrower mode. */
597 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
598 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
600 if (!((MEM_P (from)
601 && ! MEM_VOLATILE_P (from)
602 && direct_load[(int) to_mode]
603 && ! mode_dependent_address_p (XEXP (from, 0),
604 MEM_ADDR_SPACE (from)))
605 || REG_P (from)
606 || GET_CODE (from) == SUBREG))
607 from = force_reg (from_mode, from);
608 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
609 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
610 from = copy_to_reg (from);
611 emit_move_insn (to, gen_lowpart (to_mode, from));
612 return;
615 /* Handle extension. */
616 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
618 /* Convert directly if that works. */
619 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
620 != CODE_FOR_nothing)
622 emit_unop_insn (code, to, from, equiv_code);
623 return;
625 else
627 enum machine_mode intermediate;
628 rtx tmp;
629 int shift_amount;
631 /* Search for a mode to convert via. */
632 for (intermediate = from_mode; intermediate != VOIDmode;
633 intermediate = GET_MODE_WIDER_MODE (intermediate))
634 if (((can_extend_p (to_mode, intermediate, unsignedp)
635 != CODE_FOR_nothing)
636 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
637 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
638 && (can_extend_p (intermediate, from_mode, unsignedp)
639 != CODE_FOR_nothing))
641 convert_move (to, convert_to_mode (intermediate, from,
642 unsignedp), unsignedp);
643 return;
646 /* No suitable intermediate mode.
647 Generate what we need with shifts. */
648 shift_amount = (GET_MODE_PRECISION (to_mode)
649 - GET_MODE_PRECISION (from_mode));
650 from = gen_lowpart (to_mode, force_reg (from_mode, from));
651 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
652 to, unsignedp);
653 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
654 to, unsignedp);
655 if (tmp != to)
656 emit_move_insn (to, tmp);
657 return;
661 /* Support special truncate insns for certain modes. */
662 if (convert_optab_handler (trunc_optab, to_mode,
663 from_mode) != CODE_FOR_nothing)
665 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
666 to, from, UNKNOWN);
667 return;
670 /* Handle truncation of volatile memrefs, and so on;
671 the things that couldn't be truncated directly,
672 and for which there was no special instruction.
674 ??? Code above formerly short-circuited this, for most integer
675 mode pairs, with a force_reg in from_mode followed by a recursive
676 call to this routine. Appears always to have been wrong. */
677 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
679 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
680 emit_move_insn (to, temp);
681 return;
684 /* Mode combination is not recognized. */
685 gcc_unreachable ();
688 /* Return an rtx for a value that would result
689 from converting X to mode MODE.
690 Both X and MODE may be floating, or both integer.
691 UNSIGNEDP is nonzero if X is an unsigned value.
692 This can be done by referring to a part of X in place
693 or by copying to a new temporary with conversion. */
696 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
698 return convert_modes (mode, VOIDmode, x, unsignedp);
701 /* Return an rtx for a value that would result
702 from converting X from mode OLDMODE to mode MODE.
703 Both modes may be floating, or both integer.
704 UNSIGNEDP is nonzero if X is an unsigned value.
706 This can be done by referring to a part of X in place
707 or by copying to a new temporary with conversion.
709 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
712 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
714 rtx temp;
716 /* If FROM is a SUBREG that indicates that we have already done at least
717 the required extension, strip it. */
719 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
720 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
721 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
722 x = gen_lowpart (mode, SUBREG_REG (x));
724 if (GET_MODE (x) != VOIDmode)
725 oldmode = GET_MODE (x);
727 if (mode == oldmode)
728 return x;
730 /* There is one case that we must handle specially: If we are converting
731 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
732 we are to interpret the constant as unsigned, gen_lowpart will do
733 the wrong if the constant appears negative. What we want to do is
734 make the high-order word of the constant zero, not all ones. */
736 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
737 && GET_MODE_BITSIZE (mode) == HOST_BITS_PER_DOUBLE_INT
738 && CONST_INT_P (x) && INTVAL (x) < 0)
740 double_int val = double_int::from_uhwi (INTVAL (x));
742 /* We need to zero extend VAL. */
743 if (oldmode != VOIDmode)
744 val = val.zext (GET_MODE_BITSIZE (oldmode));
746 return immed_double_int_const (val, mode);
749 /* We can do this with a gen_lowpart if both desired and current modes
750 are integer, and this is either a constant integer, a register, or a
751 non-volatile MEM. Except for the constant case where MODE is no
752 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
754 if ((CONST_INT_P (x)
755 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT)
756 || (GET_MODE_CLASS (mode) == MODE_INT
757 && GET_MODE_CLASS (oldmode) == MODE_INT
758 && (CONST_DOUBLE_AS_INT_P (x)
759 || (GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
760 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
761 && direct_load[(int) mode])
762 || (REG_P (x)
763 && (! HARD_REGISTER_P (x)
764 || HARD_REGNO_MODE_OK (REGNO (x), mode))
765 && TRULY_NOOP_TRUNCATION_MODES_P (mode,
766 GET_MODE (x))))))))
768 /* ?? If we don't know OLDMODE, we have to assume here that
769 X does not need sign- or zero-extension. This may not be
770 the case, but it's the best we can do. */
771 if (CONST_INT_P (x) && oldmode != VOIDmode
772 && GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (oldmode))
774 HOST_WIDE_INT val = INTVAL (x);
776 /* We must sign or zero-extend in this case. Start by
777 zero-extending, then sign extend if we need to. */
778 val &= GET_MODE_MASK (oldmode);
779 if (! unsignedp
780 && val_signbit_known_set_p (oldmode, val))
781 val |= ~GET_MODE_MASK (oldmode);
783 return gen_int_mode (val, mode);
786 return gen_lowpart (mode, x);
789 /* Converting from integer constant into mode is always equivalent to an
790 subreg operation. */
791 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
793 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
794 return simplify_gen_subreg (mode, x, oldmode, 0);
797 temp = gen_reg_rtx (mode);
798 convert_move (temp, x, unsignedp);
799 return temp;
802 /* Return the largest alignment we can use for doing a move (or store)
803 of MAX_PIECES. ALIGN is the largest alignment we could use. */
805 static unsigned int
806 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
808 enum machine_mode tmode;
810 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
811 if (align >= GET_MODE_ALIGNMENT (tmode))
812 align = GET_MODE_ALIGNMENT (tmode);
813 else
815 enum machine_mode tmode, xmode;
817 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
818 tmode != VOIDmode;
819 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
820 if (GET_MODE_SIZE (tmode) > max_pieces
821 || SLOW_UNALIGNED_ACCESS (tmode, align))
822 break;
824 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
827 return align;
830 /* Return the widest integer mode no wider than SIZE. If no such mode
831 can be found, return VOIDmode. */
833 static enum machine_mode
834 widest_int_mode_for_size (unsigned int size)
836 enum machine_mode tmode, mode = VOIDmode;
838 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
839 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
840 if (GET_MODE_SIZE (tmode) < size)
841 mode = tmode;
843 return mode;
846 /* STORE_MAX_PIECES is the number of bytes at a time that we can
847 store efficiently. Due to internal GCC limitations, this is
848 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
849 for an immediate constant. */
851 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
853 /* Determine whether the LEN bytes can be moved by using several move
854 instructions. Return nonzero if a call to move_by_pieces should
855 succeed. */
858 can_move_by_pieces (unsigned HOST_WIDE_INT len ATTRIBUTE_UNUSED,
859 unsigned int align ATTRIBUTE_UNUSED)
861 return MOVE_BY_PIECES_P (len, align);
864 /* Generate several move instructions to copy LEN bytes from block FROM to
865 block TO. (These are MEM rtx's with BLKmode).
867 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
868 used to push FROM to the stack.
870 ALIGN is maximum stack alignment we can assume.
872 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
873 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
874 stpcpy. */
877 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
878 unsigned int align, int endp)
880 struct move_by_pieces_d data;
881 enum machine_mode to_addr_mode;
882 enum machine_mode from_addr_mode = get_address_mode (from);
883 rtx to_addr, from_addr = XEXP (from, 0);
884 unsigned int max_size = MOVE_MAX_PIECES + 1;
885 enum insn_code icode;
887 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
889 data.offset = 0;
890 data.from_addr = from_addr;
891 if (to)
893 to_addr_mode = get_address_mode (to);
894 to_addr = XEXP (to, 0);
895 data.to = to;
896 data.autinc_to
897 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
898 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
899 data.reverse
900 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
902 else
904 to_addr_mode = VOIDmode;
905 to_addr = NULL_RTX;
906 data.to = NULL_RTX;
907 data.autinc_to = 1;
908 #ifdef STACK_GROWS_DOWNWARD
909 data.reverse = 1;
910 #else
911 data.reverse = 0;
912 #endif
914 data.to_addr = to_addr;
915 data.from = from;
916 data.autinc_from
917 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
918 || GET_CODE (from_addr) == POST_INC
919 || GET_CODE (from_addr) == POST_DEC);
921 data.explicit_inc_from = 0;
922 data.explicit_inc_to = 0;
923 if (data.reverse) data.offset = len;
924 data.len = len;
926 /* If copying requires more than two move insns,
927 copy addresses to registers (to make displacements shorter)
928 and use post-increment if available. */
929 if (!(data.autinc_from && data.autinc_to)
930 && move_by_pieces_ninsns (len, align, max_size) > 2)
932 /* Find the mode of the largest move...
933 MODE might not be used depending on the definitions of the
934 USE_* macros below. */
935 enum machine_mode mode ATTRIBUTE_UNUSED
936 = widest_int_mode_for_size (max_size);
938 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
940 data.from_addr = copy_to_mode_reg (from_addr_mode,
941 plus_constant (from_addr_mode,
942 from_addr, len));
943 data.autinc_from = 1;
944 data.explicit_inc_from = -1;
946 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
948 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
949 data.autinc_from = 1;
950 data.explicit_inc_from = 1;
952 if (!data.autinc_from && CONSTANT_P (from_addr))
953 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
954 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
956 data.to_addr = copy_to_mode_reg (to_addr_mode,
957 plus_constant (to_addr_mode,
958 to_addr, len));
959 data.autinc_to = 1;
960 data.explicit_inc_to = -1;
962 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
964 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
965 data.autinc_to = 1;
966 data.explicit_inc_to = 1;
968 if (!data.autinc_to && CONSTANT_P (to_addr))
969 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
972 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
974 /* First move what we can in the largest integer mode, then go to
975 successively smaller modes. */
977 while (max_size > 1 && data.len > 0)
979 enum machine_mode mode = widest_int_mode_for_size (max_size);
981 if (mode == VOIDmode)
982 break;
984 icode = optab_handler (mov_optab, mode);
985 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
986 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
988 max_size = GET_MODE_SIZE (mode);
991 /* The code above should have handled everything. */
992 gcc_assert (!data.len);
994 if (endp)
996 rtx to1;
998 gcc_assert (!data.reverse);
999 if (data.autinc_to)
1001 if (endp == 2)
1003 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1004 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1005 else
1006 data.to_addr = copy_to_mode_reg (to_addr_mode,
1007 plus_constant (to_addr_mode,
1008 data.to_addr,
1009 -1));
1011 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1012 data.offset);
1014 else
1016 if (endp == 2)
1017 --data.offset;
1018 to1 = adjust_address (data.to, QImode, data.offset);
1020 return to1;
1022 else
1023 return data.to;
1026 /* Return number of insns required to move L bytes by pieces.
1027 ALIGN (in bits) is maximum alignment we can assume. */
1029 unsigned HOST_WIDE_INT
1030 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1031 unsigned int max_size)
1033 unsigned HOST_WIDE_INT n_insns = 0;
1035 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1037 while (max_size > 1 && l > 0)
1039 enum machine_mode mode;
1040 enum insn_code icode;
1042 mode = widest_int_mode_for_size (max_size);
1044 if (mode == VOIDmode)
1045 break;
1047 icode = optab_handler (mov_optab, mode);
1048 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1049 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1051 max_size = GET_MODE_SIZE (mode);
1054 gcc_assert (!l);
1055 return n_insns;
1058 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1059 with move instructions for mode MODE. GENFUN is the gen_... function
1060 to make a move insn for that mode. DATA has all the other info. */
1062 static void
1063 move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
1064 struct move_by_pieces_d *data)
1066 unsigned int size = GET_MODE_SIZE (mode);
1067 rtx to1 = NULL_RTX, from1;
1069 while (data->len >= size)
1071 if (data->reverse)
1072 data->offset -= size;
1074 if (data->to)
1076 if (data->autinc_to)
1077 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1078 data->offset);
1079 else
1080 to1 = adjust_address (data->to, mode, data->offset);
1083 if (data->autinc_from)
1084 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1085 data->offset);
1086 else
1087 from1 = adjust_address (data->from, mode, data->offset);
1089 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1090 emit_insn (gen_add2_insn (data->to_addr,
1091 gen_int_mode (-(HOST_WIDE_INT) size,
1092 GET_MODE (data->to_addr))));
1093 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1094 emit_insn (gen_add2_insn (data->from_addr,
1095 gen_int_mode (-(HOST_WIDE_INT) size,
1096 GET_MODE (data->from_addr))));
1098 if (data->to)
1099 emit_insn ((*genfun) (to1, from1));
1100 else
1102 #ifdef PUSH_ROUNDING
1103 emit_single_push_insn (mode, from1, NULL);
1104 #else
1105 gcc_unreachable ();
1106 #endif
1109 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1110 emit_insn (gen_add2_insn (data->to_addr,
1111 gen_int_mode (size,
1112 GET_MODE (data->to_addr))));
1113 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1114 emit_insn (gen_add2_insn (data->from_addr,
1115 gen_int_mode (size,
1116 GET_MODE (data->from_addr))));
1118 if (! data->reverse)
1119 data->offset += size;
1121 data->len -= size;
1125 /* Emit code to move a block Y to a block X. This may be done with
1126 string-move instructions, with multiple scalar move instructions,
1127 or with a library call.
1129 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1130 SIZE is an rtx that says how long they are.
1131 ALIGN is the maximum alignment we can assume they have.
1132 METHOD describes what kind of copy this is, and what mechanisms may be used.
1133 MIN_SIZE is the minimal size of block to move
1134 MAX_SIZE is the maximal size of block to move, if it can not be represented
1135 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1137 Return the address of the new block, if memcpy is called and returns it,
1138 0 otherwise. */
1141 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1142 unsigned int expected_align, HOST_WIDE_INT expected_size,
1143 unsigned HOST_WIDE_INT min_size,
1144 unsigned HOST_WIDE_INT max_size,
1145 unsigned HOST_WIDE_INT probable_max_size)
1147 bool may_use_call;
1148 rtx retval = 0;
1149 unsigned int align;
1151 gcc_assert (size);
1152 if (CONST_INT_P (size)
1153 && INTVAL (size) == 0)
1154 return 0;
1156 switch (method)
1158 case BLOCK_OP_NORMAL:
1159 case BLOCK_OP_TAILCALL:
1160 may_use_call = true;
1161 break;
1163 case BLOCK_OP_CALL_PARM:
1164 may_use_call = block_move_libcall_safe_for_call_parm ();
1166 /* Make inhibit_defer_pop nonzero around the library call
1167 to force it to pop the arguments right away. */
1168 NO_DEFER_POP;
1169 break;
1171 case BLOCK_OP_NO_LIBCALL:
1172 may_use_call = false;
1173 break;
1175 default:
1176 gcc_unreachable ();
1179 gcc_assert (MEM_P (x) && MEM_P (y));
1180 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1181 gcc_assert (align >= BITS_PER_UNIT);
1183 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1184 block copy is more efficient for other large modes, e.g. DCmode. */
1185 x = adjust_address (x, BLKmode, 0);
1186 y = adjust_address (y, BLKmode, 0);
1188 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1189 can be incorrect is coming from __builtin_memcpy. */
1190 if (CONST_INT_P (size))
1192 x = shallow_copy_rtx (x);
1193 y = shallow_copy_rtx (y);
1194 set_mem_size (x, INTVAL (size));
1195 set_mem_size (y, INTVAL (size));
1198 if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1199 move_by_pieces (x, y, INTVAL (size), align, 0);
1200 else if (emit_block_move_via_movmem (x, y, size, align,
1201 expected_align, expected_size,
1202 min_size, max_size, probable_max_size))
1204 else if (may_use_call
1205 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1206 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1208 /* Since x and y are passed to a libcall, mark the corresponding
1209 tree EXPR as addressable. */
1210 tree y_expr = MEM_EXPR (y);
1211 tree x_expr = MEM_EXPR (x);
1212 if (y_expr)
1213 mark_addressable (y_expr);
1214 if (x_expr)
1215 mark_addressable (x_expr);
1216 retval = emit_block_move_via_libcall (x, y, size,
1217 method == BLOCK_OP_TAILCALL);
1220 else
1221 emit_block_move_via_loop (x, y, size, align);
1223 if (method == BLOCK_OP_CALL_PARM)
1224 OK_DEFER_POP;
1226 return retval;
1230 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1232 unsigned HOST_WIDE_INT max, min = 0;
1233 if (GET_CODE (size) == CONST_INT)
1234 min = max = UINTVAL (size);
1235 else
1236 max = GET_MODE_MASK (GET_MODE (size));
1237 return emit_block_move_hints (x, y, size, method, 0, -1,
1238 min, max, max);
1241 /* A subroutine of emit_block_move. Returns true if calling the
1242 block move libcall will not clobber any parameters which may have
1243 already been placed on the stack. */
1245 static bool
1246 block_move_libcall_safe_for_call_parm (void)
1248 #if defined (REG_PARM_STACK_SPACE)
1249 tree fn;
1250 #endif
1252 /* If arguments are pushed on the stack, then they're safe. */
1253 if (PUSH_ARGS)
1254 return true;
1256 /* If registers go on the stack anyway, any argument is sure to clobber
1257 an outgoing argument. */
1258 #if defined (REG_PARM_STACK_SPACE)
1259 fn = emit_block_move_libcall_fn (false);
1260 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1261 depend on its argument. */
1262 (void) fn;
1263 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1264 && REG_PARM_STACK_SPACE (fn) != 0)
1265 return false;
1266 #endif
1268 /* If any argument goes in memory, then it might clobber an outgoing
1269 argument. */
1271 CUMULATIVE_ARGS args_so_far_v;
1272 cumulative_args_t args_so_far;
1273 tree fn, arg;
1275 fn = emit_block_move_libcall_fn (false);
1276 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1277 args_so_far = pack_cumulative_args (&args_so_far_v);
1279 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1280 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1282 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1283 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1284 NULL_TREE, true);
1285 if (!tmp || !REG_P (tmp))
1286 return false;
1287 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1288 return false;
1289 targetm.calls.function_arg_advance (args_so_far, mode,
1290 NULL_TREE, true);
1293 return true;
1296 /* A subroutine of emit_block_move. Expand a movmem pattern;
1297 return true if successful. */
1299 static bool
1300 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1301 unsigned int expected_align, HOST_WIDE_INT expected_size,
1302 unsigned HOST_WIDE_INT min_size,
1303 unsigned HOST_WIDE_INT max_size,
1304 unsigned HOST_WIDE_INT probable_max_size)
1306 int save_volatile_ok = volatile_ok;
1307 enum machine_mode mode;
1309 if (expected_align < align)
1310 expected_align = align;
1311 if (expected_size != -1)
1313 if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1314 expected_size = probable_max_size;
1315 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1316 expected_size = min_size;
1319 /* Since this is a move insn, we don't care about volatility. */
1320 volatile_ok = 1;
1322 /* Try the most limited insn first, because there's no point
1323 including more than one in the machine description unless
1324 the more limited one has some advantage. */
1326 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1327 mode = GET_MODE_WIDER_MODE (mode))
1329 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1331 if (code != CODE_FOR_nothing
1332 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1333 here because if SIZE is less than the mode mask, as it is
1334 returned by the macro, it will definitely be less than the
1335 actual mode mask. Since SIZE is within the Pmode address
1336 space, we limit MODE to Pmode. */
1337 && ((CONST_INT_P (size)
1338 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1339 <= (GET_MODE_MASK (mode) >> 1)))
1340 || max_size <= (GET_MODE_MASK (mode) >> 1)
1341 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1343 struct expand_operand ops[9];
1344 unsigned int nops;
1346 /* ??? When called via emit_block_move_for_call, it'd be
1347 nice if there were some way to inform the backend, so
1348 that it doesn't fail the expansion because it thinks
1349 emitting the libcall would be more efficient. */
1350 nops = insn_data[(int) code].n_generator_args;
1351 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1353 create_fixed_operand (&ops[0], x);
1354 create_fixed_operand (&ops[1], y);
1355 /* The check above guarantees that this size conversion is valid. */
1356 create_convert_operand_to (&ops[2], size, mode, true);
1357 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1358 if (nops >= 6)
1360 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1361 create_integer_operand (&ops[5], expected_size);
1363 if (nops >= 8)
1365 create_integer_operand (&ops[6], min_size);
1366 /* If we can not represent the maximal size,
1367 make parameter NULL. */
1368 if ((HOST_WIDE_INT) max_size != -1)
1369 create_integer_operand (&ops[7], max_size);
1370 else
1371 create_fixed_operand (&ops[7], NULL);
1373 if (nops == 9)
1375 /* If we can not represent the maximal size,
1376 make parameter NULL. */
1377 if ((HOST_WIDE_INT) probable_max_size != -1)
1378 create_integer_operand (&ops[8], probable_max_size);
1379 else
1380 create_fixed_operand (&ops[8], NULL);
1382 if (maybe_expand_insn (code, nops, ops))
1384 volatile_ok = save_volatile_ok;
1385 return true;
1390 volatile_ok = save_volatile_ok;
1391 return false;
1394 /* A subroutine of emit_block_move. Expand a call to memcpy.
1395 Return the return value from memcpy, 0 otherwise. */
1398 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1400 rtx dst_addr, src_addr;
1401 tree call_expr, fn, src_tree, dst_tree, size_tree;
1402 enum machine_mode size_mode;
1403 rtx retval;
1405 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1406 pseudos. We can then place those new pseudos into a VAR_DECL and
1407 use them later. */
1409 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1410 src_addr = copy_addr_to_reg (XEXP (src, 0));
1412 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1413 src_addr = convert_memory_address (ptr_mode, src_addr);
1415 dst_tree = make_tree (ptr_type_node, dst_addr);
1416 src_tree = make_tree (ptr_type_node, src_addr);
1418 size_mode = TYPE_MODE (sizetype);
1420 size = convert_to_mode (size_mode, size, 1);
1421 size = copy_to_mode_reg (size_mode, size);
1423 /* It is incorrect to use the libcall calling conventions to call
1424 memcpy in this context. This could be a user call to memcpy and
1425 the user may wish to examine the return value from memcpy. For
1426 targets where libcalls and normal calls have different conventions
1427 for returning pointers, we could end up generating incorrect code. */
1429 size_tree = make_tree (sizetype, size);
1431 fn = emit_block_move_libcall_fn (true);
1432 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1433 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1435 retval = expand_normal (call_expr);
1437 return retval;
1440 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1441 for the function we use for block copies. */
1443 static GTY(()) tree block_move_fn;
1445 void
1446 init_block_move_fn (const char *asmspec)
1448 if (!block_move_fn)
1450 tree args, fn, attrs, attr_args;
1452 fn = get_identifier ("memcpy");
1453 args = build_function_type_list (ptr_type_node, ptr_type_node,
1454 const_ptr_type_node, sizetype,
1455 NULL_TREE);
1457 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1458 DECL_EXTERNAL (fn) = 1;
1459 TREE_PUBLIC (fn) = 1;
1460 DECL_ARTIFICIAL (fn) = 1;
1461 TREE_NOTHROW (fn) = 1;
1462 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1463 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1465 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1466 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1468 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1470 block_move_fn = fn;
1473 if (asmspec)
1474 set_user_assembler_name (block_move_fn, asmspec);
1477 static tree
1478 emit_block_move_libcall_fn (int for_call)
1480 static bool emitted_extern;
1482 if (!block_move_fn)
1483 init_block_move_fn (NULL);
1485 if (for_call && !emitted_extern)
1487 emitted_extern = true;
1488 make_decl_rtl (block_move_fn);
1491 return block_move_fn;
1494 /* A subroutine of emit_block_move. Copy the data via an explicit
1495 loop. This is used only when libcalls are forbidden. */
1496 /* ??? It'd be nice to copy in hunks larger than QImode. */
1498 static void
1499 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1500 unsigned int align ATTRIBUTE_UNUSED)
1502 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1503 enum machine_mode x_addr_mode = get_address_mode (x);
1504 enum machine_mode y_addr_mode = get_address_mode (y);
1505 enum machine_mode iter_mode;
1507 iter_mode = GET_MODE (size);
1508 if (iter_mode == VOIDmode)
1509 iter_mode = word_mode;
1511 top_label = gen_label_rtx ();
1512 cmp_label = gen_label_rtx ();
1513 iter = gen_reg_rtx (iter_mode);
1515 emit_move_insn (iter, const0_rtx);
1517 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1518 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1519 do_pending_stack_adjust ();
1521 emit_jump (cmp_label);
1522 emit_label (top_label);
1524 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1525 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1527 if (x_addr_mode != y_addr_mode)
1528 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1529 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1531 x = change_address (x, QImode, x_addr);
1532 y = change_address (y, QImode, y_addr);
1534 emit_move_insn (x, y);
1536 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1537 true, OPTAB_LIB_WIDEN);
1538 if (tmp != iter)
1539 emit_move_insn (iter, tmp);
1541 emit_label (cmp_label);
1543 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1544 true, top_label, REG_BR_PROB_BASE * 90 / 100);
1547 /* Copy all or part of a value X into registers starting at REGNO.
1548 The number of registers to be filled is NREGS. */
1550 void
1551 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1553 int i;
1554 #ifdef HAVE_load_multiple
1555 rtx pat;
1556 rtx last;
1557 #endif
1559 if (nregs == 0)
1560 return;
1562 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1563 x = validize_mem (force_const_mem (mode, x));
1565 /* See if the machine can do this with a load multiple insn. */
1566 #ifdef HAVE_load_multiple
1567 if (HAVE_load_multiple)
1569 last = get_last_insn ();
1570 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1571 GEN_INT (nregs));
1572 if (pat)
1574 emit_insn (pat);
1575 return;
1577 else
1578 delete_insns_since (last);
1580 #endif
1582 for (i = 0; i < nregs; i++)
1583 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1584 operand_subword_force (x, i, mode));
1587 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1588 The number of registers to be filled is NREGS. */
1590 void
1591 move_block_from_reg (int regno, rtx x, int nregs)
1593 int i;
1595 if (nregs == 0)
1596 return;
1598 /* See if the machine can do this with a store multiple insn. */
1599 #ifdef HAVE_store_multiple
1600 if (HAVE_store_multiple)
1602 rtx last = get_last_insn ();
1603 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1604 GEN_INT (nregs));
1605 if (pat)
1607 emit_insn (pat);
1608 return;
1610 else
1611 delete_insns_since (last);
1613 #endif
1615 for (i = 0; i < nregs; i++)
1617 rtx tem = operand_subword (x, i, 1, BLKmode);
1619 gcc_assert (tem);
1621 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1625 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1626 ORIG, where ORIG is a non-consecutive group of registers represented by
1627 a PARALLEL. The clone is identical to the original except in that the
1628 original set of registers is replaced by a new set of pseudo registers.
1629 The new set has the same modes as the original set. */
1632 gen_group_rtx (rtx orig)
1634 int i, length;
1635 rtx *tmps;
1637 gcc_assert (GET_CODE (orig) == PARALLEL);
1639 length = XVECLEN (orig, 0);
1640 tmps = XALLOCAVEC (rtx, length);
1642 /* Skip a NULL entry in first slot. */
1643 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1645 if (i)
1646 tmps[0] = 0;
1648 for (; i < length; i++)
1650 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1651 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1653 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1656 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1659 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1660 except that values are placed in TMPS[i], and must later be moved
1661 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1663 static void
1664 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1666 rtx src;
1667 int start, i;
1668 enum machine_mode m = GET_MODE (orig_src);
1670 gcc_assert (GET_CODE (dst) == PARALLEL);
1672 if (m != VOIDmode
1673 && !SCALAR_INT_MODE_P (m)
1674 && !MEM_P (orig_src)
1675 && GET_CODE (orig_src) != CONCAT)
1677 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1678 if (imode == BLKmode)
1679 src = assign_stack_temp (GET_MODE (orig_src), ssize);
1680 else
1681 src = gen_reg_rtx (imode);
1682 if (imode != BLKmode)
1683 src = gen_lowpart (GET_MODE (orig_src), src);
1684 emit_move_insn (src, orig_src);
1685 /* ...and back again. */
1686 if (imode != BLKmode)
1687 src = gen_lowpart (imode, src);
1688 emit_group_load_1 (tmps, dst, src, type, ssize);
1689 return;
1692 /* Check for a NULL entry, used to indicate that the parameter goes
1693 both on the stack and in registers. */
1694 if (XEXP (XVECEXP (dst, 0, 0), 0))
1695 start = 0;
1696 else
1697 start = 1;
1699 /* Process the pieces. */
1700 for (i = start; i < XVECLEN (dst, 0); i++)
1702 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1703 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1704 unsigned int bytelen = GET_MODE_SIZE (mode);
1705 int shift = 0;
1707 /* Handle trailing fragments that run over the size of the struct. */
1708 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1710 /* Arrange to shift the fragment to where it belongs.
1711 extract_bit_field loads to the lsb of the reg. */
1712 if (
1713 #ifdef BLOCK_REG_PADDING
1714 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1715 == (BYTES_BIG_ENDIAN ? upward : downward)
1716 #else
1717 BYTES_BIG_ENDIAN
1718 #endif
1720 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1721 bytelen = ssize - bytepos;
1722 gcc_assert (bytelen > 0);
1725 /* If we won't be loading directly from memory, protect the real source
1726 from strange tricks we might play; but make sure that the source can
1727 be loaded directly into the destination. */
1728 src = orig_src;
1729 if (!MEM_P (orig_src)
1730 && (!CONSTANT_P (orig_src)
1731 || (GET_MODE (orig_src) != mode
1732 && GET_MODE (orig_src) != VOIDmode)))
1734 if (GET_MODE (orig_src) == VOIDmode)
1735 src = gen_reg_rtx (mode);
1736 else
1737 src = gen_reg_rtx (GET_MODE (orig_src));
1739 emit_move_insn (src, orig_src);
1742 /* Optimize the access just a bit. */
1743 if (MEM_P (src)
1744 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1745 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1746 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1747 && bytelen == GET_MODE_SIZE (mode))
1749 tmps[i] = gen_reg_rtx (mode);
1750 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1752 else if (COMPLEX_MODE_P (mode)
1753 && GET_MODE (src) == mode
1754 && bytelen == GET_MODE_SIZE (mode))
1755 /* Let emit_move_complex do the bulk of the work. */
1756 tmps[i] = src;
1757 else if (GET_CODE (src) == CONCAT)
1759 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1760 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1762 if ((bytepos == 0 && bytelen == slen0)
1763 || (bytepos != 0 && bytepos + bytelen <= slen))
1765 /* The following assumes that the concatenated objects all
1766 have the same size. In this case, a simple calculation
1767 can be used to determine the object and the bit field
1768 to be extracted. */
1769 tmps[i] = XEXP (src, bytepos / slen0);
1770 if (! CONSTANT_P (tmps[i])
1771 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1772 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1773 (bytepos % slen0) * BITS_PER_UNIT,
1774 1, NULL_RTX, mode, mode);
1776 else
1778 rtx mem;
1780 gcc_assert (!bytepos);
1781 mem = assign_stack_temp (GET_MODE (src), slen);
1782 emit_move_insn (mem, src);
1783 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1784 0, 1, NULL_RTX, mode, mode);
1787 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1788 SIMD register, which is currently broken. While we get GCC
1789 to emit proper RTL for these cases, let's dump to memory. */
1790 else if (VECTOR_MODE_P (GET_MODE (dst))
1791 && REG_P (src))
1793 int slen = GET_MODE_SIZE (GET_MODE (src));
1794 rtx mem;
1796 mem = assign_stack_temp (GET_MODE (src), slen);
1797 emit_move_insn (mem, src);
1798 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1800 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1801 && XVECLEN (dst, 0) > 1)
1802 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
1803 else if (CONSTANT_P (src))
1805 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1807 if (len == ssize)
1808 tmps[i] = src;
1809 else
1811 rtx first, second;
1813 gcc_assert (2 * len == ssize);
1814 split_double (src, &first, &second);
1815 if (i)
1816 tmps[i] = second;
1817 else
1818 tmps[i] = first;
1821 else if (REG_P (src) && GET_MODE (src) == mode)
1822 tmps[i] = src;
1823 else
1824 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1825 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1826 mode, mode);
1828 if (shift)
1829 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1830 shift, tmps[i], 0);
1834 /* Emit code to move a block SRC of type TYPE to a block DST,
1835 where DST is non-consecutive registers represented by a PARALLEL.
1836 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1837 if not known. */
1839 void
1840 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1842 rtx *tmps;
1843 int i;
1845 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1846 emit_group_load_1 (tmps, dst, src, type, ssize);
1848 /* Copy the extracted pieces into the proper (probable) hard regs. */
1849 for (i = 0; i < XVECLEN (dst, 0); i++)
1851 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1852 if (d == NULL)
1853 continue;
1854 emit_move_insn (d, tmps[i]);
1858 /* Similar, but load SRC into new pseudos in a format that looks like
1859 PARALLEL. This can later be fed to emit_group_move to get things
1860 in the right place. */
1863 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1865 rtvec vec;
1866 int i;
1868 vec = rtvec_alloc (XVECLEN (parallel, 0));
1869 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1871 /* Convert the vector to look just like the original PARALLEL, except
1872 with the computed values. */
1873 for (i = 0; i < XVECLEN (parallel, 0); i++)
1875 rtx e = XVECEXP (parallel, 0, i);
1876 rtx d = XEXP (e, 0);
1878 if (d)
1880 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1881 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1883 RTVEC_ELT (vec, i) = e;
1886 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1889 /* Emit code to move a block SRC to block DST, where SRC and DST are
1890 non-consecutive groups of registers, each represented by a PARALLEL. */
1892 void
1893 emit_group_move (rtx dst, rtx src)
1895 int i;
1897 gcc_assert (GET_CODE (src) == PARALLEL
1898 && GET_CODE (dst) == PARALLEL
1899 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1901 /* Skip first entry if NULL. */
1902 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1903 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1904 XEXP (XVECEXP (src, 0, i), 0));
1907 /* Move a group of registers represented by a PARALLEL into pseudos. */
1910 emit_group_move_into_temps (rtx src)
1912 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1913 int i;
1915 for (i = 0; i < XVECLEN (src, 0); i++)
1917 rtx e = XVECEXP (src, 0, i);
1918 rtx d = XEXP (e, 0);
1920 if (d)
1921 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1922 RTVEC_ELT (vec, i) = e;
1925 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1928 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1929 where SRC is non-consecutive registers represented by a PARALLEL.
1930 SSIZE represents the total size of block ORIG_DST, or -1 if not
1931 known. */
1933 void
1934 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1936 rtx *tmps, dst;
1937 int start, finish, i;
1938 enum machine_mode m = GET_MODE (orig_dst);
1940 gcc_assert (GET_CODE (src) == PARALLEL);
1942 if (!SCALAR_INT_MODE_P (m)
1943 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1945 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1946 if (imode == BLKmode)
1947 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1948 else
1949 dst = gen_reg_rtx (imode);
1950 emit_group_store (dst, src, type, ssize);
1951 if (imode != BLKmode)
1952 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1953 emit_move_insn (orig_dst, dst);
1954 return;
1957 /* Check for a NULL entry, used to indicate that the parameter goes
1958 both on the stack and in registers. */
1959 if (XEXP (XVECEXP (src, 0, 0), 0))
1960 start = 0;
1961 else
1962 start = 1;
1963 finish = XVECLEN (src, 0);
1965 tmps = XALLOCAVEC (rtx, finish);
1967 /* Copy the (probable) hard regs into pseudos. */
1968 for (i = start; i < finish; i++)
1970 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1971 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1973 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1974 emit_move_insn (tmps[i], reg);
1976 else
1977 tmps[i] = reg;
1980 /* If we won't be storing directly into memory, protect the real destination
1981 from strange tricks we might play. */
1982 dst = orig_dst;
1983 if (GET_CODE (dst) == PARALLEL)
1985 rtx temp;
1987 /* We can get a PARALLEL dst if there is a conditional expression in
1988 a return statement. In that case, the dst and src are the same,
1989 so no action is necessary. */
1990 if (rtx_equal_p (dst, src))
1991 return;
1993 /* It is unclear if we can ever reach here, but we may as well handle
1994 it. Allocate a temporary, and split this into a store/load to/from
1995 the temporary. */
1996 temp = assign_stack_temp (GET_MODE (dst), ssize);
1997 emit_group_store (temp, src, type, ssize);
1998 emit_group_load (dst, temp, type, ssize);
1999 return;
2001 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
2003 enum machine_mode outer = GET_MODE (dst);
2004 enum machine_mode inner;
2005 HOST_WIDE_INT bytepos;
2006 bool done = false;
2007 rtx temp;
2009 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
2010 dst = gen_reg_rtx (outer);
2012 /* Make life a bit easier for combine. */
2013 /* If the first element of the vector is the low part
2014 of the destination mode, use a paradoxical subreg to
2015 initialize the destination. */
2016 if (start < finish)
2018 inner = GET_MODE (tmps[start]);
2019 bytepos = subreg_lowpart_offset (inner, outer);
2020 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
2022 temp = simplify_gen_subreg (outer, tmps[start],
2023 inner, 0);
2024 if (temp)
2026 emit_move_insn (dst, temp);
2027 done = true;
2028 start++;
2033 /* If the first element wasn't the low part, try the last. */
2034 if (!done
2035 && start < finish - 1)
2037 inner = GET_MODE (tmps[finish - 1]);
2038 bytepos = subreg_lowpart_offset (inner, outer);
2039 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
2041 temp = simplify_gen_subreg (outer, tmps[finish - 1],
2042 inner, 0);
2043 if (temp)
2045 emit_move_insn (dst, temp);
2046 done = true;
2047 finish--;
2052 /* Otherwise, simply initialize the result to zero. */
2053 if (!done)
2054 emit_move_insn (dst, CONST0_RTX (outer));
2057 /* Process the pieces. */
2058 for (i = start; i < finish; i++)
2060 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2061 enum machine_mode mode = GET_MODE (tmps[i]);
2062 unsigned int bytelen = GET_MODE_SIZE (mode);
2063 unsigned int adj_bytelen;
2064 rtx dest = dst;
2066 /* Handle trailing fragments that run over the size of the struct. */
2067 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2068 adj_bytelen = ssize - bytepos;
2069 else
2070 adj_bytelen = bytelen;
2072 if (GET_CODE (dst) == CONCAT)
2074 if (bytepos + adj_bytelen
2075 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2076 dest = XEXP (dst, 0);
2077 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2079 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2080 dest = XEXP (dst, 1);
2082 else
2084 enum machine_mode dest_mode = GET_MODE (dest);
2085 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2087 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2089 if (GET_MODE_ALIGNMENT (dest_mode)
2090 >= GET_MODE_ALIGNMENT (tmp_mode))
2092 dest = assign_stack_temp (dest_mode,
2093 GET_MODE_SIZE (dest_mode));
2094 emit_move_insn (adjust_address (dest,
2095 tmp_mode,
2096 bytepos),
2097 tmps[i]);
2098 dst = dest;
2100 else
2102 dest = assign_stack_temp (tmp_mode,
2103 GET_MODE_SIZE (tmp_mode));
2104 emit_move_insn (dest, tmps[i]);
2105 dst = adjust_address (dest, dest_mode, bytepos);
2107 break;
2111 /* Handle trailing fragments that run over the size of the struct. */
2112 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2114 /* store_bit_field always takes its value from the lsb.
2115 Move the fragment to the lsb if it's not already there. */
2116 if (
2117 #ifdef BLOCK_REG_PADDING
2118 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2119 == (BYTES_BIG_ENDIAN ? upward : downward)
2120 #else
2121 BYTES_BIG_ENDIAN
2122 #endif
2125 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2126 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2127 shift, tmps[i], 0);
2130 /* Make sure not to write past the end of the struct. */
2131 store_bit_field (dest,
2132 adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2133 bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2134 VOIDmode, tmps[i]);
2137 /* Optimize the access just a bit. */
2138 else if (MEM_P (dest)
2139 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2140 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2141 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2142 && bytelen == GET_MODE_SIZE (mode))
2143 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2145 else
2146 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2147 0, 0, mode, tmps[i]);
2150 /* Copy from the pseudo into the (probable) hard reg. */
2151 if (orig_dst != dst)
2152 emit_move_insn (orig_dst, dst);
2155 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2156 of the value stored in X. */
2159 maybe_emit_group_store (rtx x, tree type)
2161 enum machine_mode mode = TYPE_MODE (type);
2162 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2163 if (GET_CODE (x) == PARALLEL)
2165 rtx result = gen_reg_rtx (mode);
2166 emit_group_store (result, x, type, int_size_in_bytes (type));
2167 return result;
2169 return x;
2172 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2174 This is used on targets that return BLKmode values in registers. */
2176 void
2177 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2179 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2180 rtx src = NULL, dst = NULL;
2181 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2182 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2183 enum machine_mode mode = GET_MODE (srcreg);
2184 enum machine_mode tmode = GET_MODE (target);
2185 enum machine_mode copy_mode;
2187 /* BLKmode registers created in the back-end shouldn't have survived. */
2188 gcc_assert (mode != BLKmode);
2190 /* If the structure doesn't take up a whole number of words, see whether
2191 SRCREG is padded on the left or on the right. If it's on the left,
2192 set PADDING_CORRECTION to the number of bits to skip.
2194 In most ABIs, the structure will be returned at the least end of
2195 the register, which translates to right padding on little-endian
2196 targets and left padding on big-endian targets. The opposite
2197 holds if the structure is returned at the most significant
2198 end of the register. */
2199 if (bytes % UNITS_PER_WORD != 0
2200 && (targetm.calls.return_in_msb (type)
2201 ? !BYTES_BIG_ENDIAN
2202 : BYTES_BIG_ENDIAN))
2203 padding_correction
2204 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2206 /* We can use a single move if we have an exact mode for the size. */
2207 else if (MEM_P (target)
2208 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2209 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2210 && bytes == GET_MODE_SIZE (mode))
2212 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2213 return;
2216 /* And if we additionally have the same mode for a register. */
2217 else if (REG_P (target)
2218 && GET_MODE (target) == mode
2219 && bytes == GET_MODE_SIZE (mode))
2221 emit_move_insn (target, srcreg);
2222 return;
2225 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2226 into a new pseudo which is a full word. */
2227 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2229 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2230 mode = word_mode;
2233 /* Copy the structure BITSIZE bits at a time. If the target lives in
2234 memory, take care of not reading/writing past its end by selecting
2235 a copy mode suited to BITSIZE. This should always be possible given
2236 how it is computed.
2238 If the target lives in register, make sure not to select a copy mode
2239 larger than the mode of the register.
2241 We could probably emit more efficient code for machines which do not use
2242 strict alignment, but it doesn't seem worth the effort at the current
2243 time. */
2245 copy_mode = word_mode;
2246 if (MEM_P (target))
2248 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2249 if (mem_mode != BLKmode)
2250 copy_mode = mem_mode;
2252 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2253 copy_mode = tmode;
2255 for (bitpos = 0, xbitpos = padding_correction;
2256 bitpos < bytes * BITS_PER_UNIT;
2257 bitpos += bitsize, xbitpos += bitsize)
2259 /* We need a new source operand each time xbitpos is on a
2260 word boundary and when xbitpos == padding_correction
2261 (the first time through). */
2262 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2263 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2265 /* We need a new destination operand each time bitpos is on
2266 a word boundary. */
2267 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2268 dst = target;
2269 else if (bitpos % BITS_PER_WORD == 0)
2270 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2272 /* Use xbitpos for the source extraction (right justified) and
2273 bitpos for the destination store (left justified). */
2274 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2275 extract_bit_field (src, bitsize,
2276 xbitpos % BITS_PER_WORD, 1,
2277 NULL_RTX, copy_mode, copy_mode));
2281 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2282 register if it contains any data, otherwise return null.
2284 This is used on targets that return BLKmode values in registers. */
2287 copy_blkmode_to_reg (enum machine_mode mode, tree src)
2289 int i, n_regs;
2290 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2291 unsigned int bitsize;
2292 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2293 enum machine_mode dst_mode;
2295 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2297 x = expand_normal (src);
2299 bytes = int_size_in_bytes (TREE_TYPE (src));
2300 if (bytes == 0)
2301 return NULL_RTX;
2303 /* If the structure doesn't take up a whole number of words, see
2304 whether the register value should be padded on the left or on
2305 the right. Set PADDING_CORRECTION to the number of padding
2306 bits needed on the left side.
2308 In most ABIs, the structure will be returned at the least end of
2309 the register, which translates to right padding on little-endian
2310 targets and left padding on big-endian targets. The opposite
2311 holds if the structure is returned at the most significant
2312 end of the register. */
2313 if (bytes % UNITS_PER_WORD != 0
2314 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2315 ? !BYTES_BIG_ENDIAN
2316 : BYTES_BIG_ENDIAN))
2317 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2318 * BITS_PER_UNIT));
2320 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2321 dst_words = XALLOCAVEC (rtx, n_regs);
2322 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2324 /* Copy the structure BITSIZE bits at a time. */
2325 for (bitpos = 0, xbitpos = padding_correction;
2326 bitpos < bytes * BITS_PER_UNIT;
2327 bitpos += bitsize, xbitpos += bitsize)
2329 /* We need a new destination pseudo each time xbitpos is
2330 on a word boundary and when xbitpos == padding_correction
2331 (the first time through). */
2332 if (xbitpos % BITS_PER_WORD == 0
2333 || xbitpos == padding_correction)
2335 /* Generate an appropriate register. */
2336 dst_word = gen_reg_rtx (word_mode);
2337 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2339 /* Clear the destination before we move anything into it. */
2340 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2343 /* We need a new source operand each time bitpos is on a word
2344 boundary. */
2345 if (bitpos % BITS_PER_WORD == 0)
2346 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2348 /* Use bitpos for the source extraction (left justified) and
2349 xbitpos for the destination store (right justified). */
2350 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2351 0, 0, word_mode,
2352 extract_bit_field (src_word, bitsize,
2353 bitpos % BITS_PER_WORD, 1,
2354 NULL_RTX, word_mode, word_mode));
2357 if (mode == BLKmode)
2359 /* Find the smallest integer mode large enough to hold the
2360 entire structure. */
2361 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2362 mode != VOIDmode;
2363 mode = GET_MODE_WIDER_MODE (mode))
2364 /* Have we found a large enough mode? */
2365 if (GET_MODE_SIZE (mode) >= bytes)
2366 break;
2368 /* A suitable mode should have been found. */
2369 gcc_assert (mode != VOIDmode);
2372 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2373 dst_mode = word_mode;
2374 else
2375 dst_mode = mode;
2376 dst = gen_reg_rtx (dst_mode);
2378 for (i = 0; i < n_regs; i++)
2379 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2381 if (mode != dst_mode)
2382 dst = gen_lowpart (mode, dst);
2384 return dst;
2387 /* Add a USE expression for REG to the (possibly empty) list pointed
2388 to by CALL_FUSAGE. REG must denote a hard register. */
2390 void
2391 use_reg_mode (rtx *call_fusage, rtx reg, enum machine_mode mode)
2393 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2395 *call_fusage
2396 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2399 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2400 to by CALL_FUSAGE. REG must denote a hard register. */
2402 void
2403 clobber_reg_mode (rtx *call_fusage, rtx reg, enum machine_mode mode)
2405 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2407 *call_fusage
2408 = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2411 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2412 starting at REGNO. All of these registers must be hard registers. */
2414 void
2415 use_regs (rtx *call_fusage, int regno, int nregs)
2417 int i;
2419 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2421 for (i = 0; i < nregs; i++)
2422 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2425 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2426 PARALLEL REGS. This is for calls that pass values in multiple
2427 non-contiguous locations. The Irix 6 ABI has examples of this. */
2429 void
2430 use_group_regs (rtx *call_fusage, rtx regs)
2432 int i;
2434 for (i = 0; i < XVECLEN (regs, 0); i++)
2436 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2438 /* A NULL entry means the parameter goes both on the stack and in
2439 registers. This can also be a MEM for targets that pass values
2440 partially on the stack and partially in registers. */
2441 if (reg != 0 && REG_P (reg))
2442 use_reg (call_fusage, reg);
2446 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2447 assigment and the code of the expresion on the RHS is CODE. Return
2448 NULL otherwise. */
2450 static gimple
2451 get_def_for_expr (tree name, enum tree_code code)
2453 gimple def_stmt;
2455 if (TREE_CODE (name) != SSA_NAME)
2456 return NULL;
2458 def_stmt = get_gimple_for_ssa_name (name);
2459 if (!def_stmt
2460 || gimple_assign_rhs_code (def_stmt) != code)
2461 return NULL;
2463 return def_stmt;
2466 #ifdef HAVE_conditional_move
2467 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2468 assigment and the class of the expresion on the RHS is CLASS. Return
2469 NULL otherwise. */
2471 static gimple
2472 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2474 gimple def_stmt;
2476 if (TREE_CODE (name) != SSA_NAME)
2477 return NULL;
2479 def_stmt = get_gimple_for_ssa_name (name);
2480 if (!def_stmt
2481 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2482 return NULL;
2484 return def_stmt;
2486 #endif
2489 /* Determine whether the LEN bytes generated by CONSTFUN can be
2490 stored to memory using several move instructions. CONSTFUNDATA is
2491 a pointer which will be passed as argument in every CONSTFUN call.
2492 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2493 a memset operation and false if it's a copy of a constant string.
2494 Return nonzero if a call to store_by_pieces should succeed. */
2497 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2498 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2499 void *constfundata, unsigned int align, bool memsetp)
2501 unsigned HOST_WIDE_INT l;
2502 unsigned int max_size;
2503 HOST_WIDE_INT offset = 0;
2504 enum machine_mode mode;
2505 enum insn_code icode;
2506 int reverse;
2507 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2508 rtx cst ATTRIBUTE_UNUSED;
2510 if (len == 0)
2511 return 1;
2513 if (! (memsetp
2514 ? SET_BY_PIECES_P (len, align)
2515 : STORE_BY_PIECES_P (len, align)))
2516 return 0;
2518 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2520 /* We would first store what we can in the largest integer mode, then go to
2521 successively smaller modes. */
2523 for (reverse = 0;
2524 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2525 reverse++)
2527 l = len;
2528 max_size = STORE_MAX_PIECES + 1;
2529 while (max_size > 1 && l > 0)
2531 mode = widest_int_mode_for_size (max_size);
2533 if (mode == VOIDmode)
2534 break;
2536 icode = optab_handler (mov_optab, mode);
2537 if (icode != CODE_FOR_nothing
2538 && align >= GET_MODE_ALIGNMENT (mode))
2540 unsigned int size = GET_MODE_SIZE (mode);
2542 while (l >= size)
2544 if (reverse)
2545 offset -= size;
2547 cst = (*constfun) (constfundata, offset, mode);
2548 if (!targetm.legitimate_constant_p (mode, cst))
2549 return 0;
2551 if (!reverse)
2552 offset += size;
2554 l -= size;
2558 max_size = GET_MODE_SIZE (mode);
2561 /* The code above should have handled everything. */
2562 gcc_assert (!l);
2565 return 1;
2568 /* Generate several move instructions to store LEN bytes generated by
2569 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2570 pointer which will be passed as argument in every CONSTFUN call.
2571 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2572 a memset operation and false if it's a copy of a constant string.
2573 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2574 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2575 stpcpy. */
2578 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2579 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2580 void *constfundata, unsigned int align, bool memsetp, int endp)
2582 enum machine_mode to_addr_mode = get_address_mode (to);
2583 struct store_by_pieces_d data;
2585 if (len == 0)
2587 gcc_assert (endp != 2);
2588 return to;
2591 gcc_assert (memsetp
2592 ? SET_BY_PIECES_P (len, align)
2593 : STORE_BY_PIECES_P (len, align));
2594 data.constfun = constfun;
2595 data.constfundata = constfundata;
2596 data.len = len;
2597 data.to = to;
2598 store_by_pieces_1 (&data, align);
2599 if (endp)
2601 rtx to1;
2603 gcc_assert (!data.reverse);
2604 if (data.autinc_to)
2606 if (endp == 2)
2608 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2609 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2610 else
2611 data.to_addr = copy_to_mode_reg (to_addr_mode,
2612 plus_constant (to_addr_mode,
2613 data.to_addr,
2614 -1));
2616 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2617 data.offset);
2619 else
2621 if (endp == 2)
2622 --data.offset;
2623 to1 = adjust_address (data.to, QImode, data.offset);
2625 return to1;
2627 else
2628 return data.to;
2631 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2632 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2634 static void
2635 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2637 struct store_by_pieces_d data;
2639 if (len == 0)
2640 return;
2642 data.constfun = clear_by_pieces_1;
2643 data.constfundata = NULL;
2644 data.len = len;
2645 data.to = to;
2646 store_by_pieces_1 (&data, align);
2649 /* Callback routine for clear_by_pieces.
2650 Return const0_rtx unconditionally. */
2652 static rtx
2653 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2654 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2655 enum machine_mode mode ATTRIBUTE_UNUSED)
2657 return const0_rtx;
2660 /* Subroutine of clear_by_pieces and store_by_pieces.
2661 Generate several move instructions to store LEN bytes of block TO. (A MEM
2662 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2664 static void
2665 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2666 unsigned int align ATTRIBUTE_UNUSED)
2668 enum machine_mode to_addr_mode = get_address_mode (data->to);
2669 rtx to_addr = XEXP (data->to, 0);
2670 unsigned int max_size = STORE_MAX_PIECES + 1;
2671 enum insn_code icode;
2673 data->offset = 0;
2674 data->to_addr = to_addr;
2675 data->autinc_to
2676 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2677 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2679 data->explicit_inc_to = 0;
2680 data->reverse
2681 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2682 if (data->reverse)
2683 data->offset = data->len;
2685 /* If storing requires more than two move insns,
2686 copy addresses to registers (to make displacements shorter)
2687 and use post-increment if available. */
2688 if (!data->autinc_to
2689 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2691 /* Determine the main mode we'll be using.
2692 MODE might not be used depending on the definitions of the
2693 USE_* macros below. */
2694 enum machine_mode mode ATTRIBUTE_UNUSED
2695 = widest_int_mode_for_size (max_size);
2697 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2699 data->to_addr = copy_to_mode_reg (to_addr_mode,
2700 plus_constant (to_addr_mode,
2701 to_addr,
2702 data->len));
2703 data->autinc_to = 1;
2704 data->explicit_inc_to = -1;
2707 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2708 && ! data->autinc_to)
2710 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2711 data->autinc_to = 1;
2712 data->explicit_inc_to = 1;
2715 if ( !data->autinc_to && CONSTANT_P (to_addr))
2716 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2719 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2721 /* First store what we can in the largest integer mode, then go to
2722 successively smaller modes. */
2724 while (max_size > 1 && data->len > 0)
2726 enum machine_mode mode = widest_int_mode_for_size (max_size);
2728 if (mode == VOIDmode)
2729 break;
2731 icode = optab_handler (mov_optab, mode);
2732 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2733 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2735 max_size = GET_MODE_SIZE (mode);
2738 /* The code above should have handled everything. */
2739 gcc_assert (!data->len);
2742 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2743 with move instructions for mode MODE. GENFUN is the gen_... function
2744 to make a move insn for that mode. DATA has all the other info. */
2746 static void
2747 store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
2748 struct store_by_pieces_d *data)
2750 unsigned int size = GET_MODE_SIZE (mode);
2751 rtx to1, cst;
2753 while (data->len >= size)
2755 if (data->reverse)
2756 data->offset -= size;
2758 if (data->autinc_to)
2759 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2760 data->offset);
2761 else
2762 to1 = adjust_address (data->to, mode, data->offset);
2764 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2765 emit_insn (gen_add2_insn (data->to_addr,
2766 gen_int_mode (-(HOST_WIDE_INT) size,
2767 GET_MODE (data->to_addr))));
2769 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2770 emit_insn ((*genfun) (to1, cst));
2772 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2773 emit_insn (gen_add2_insn (data->to_addr,
2774 gen_int_mode (size,
2775 GET_MODE (data->to_addr))));
2777 if (! data->reverse)
2778 data->offset += size;
2780 data->len -= size;
2784 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2785 its length in bytes. */
2788 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2789 unsigned int expected_align, HOST_WIDE_INT expected_size,
2790 unsigned HOST_WIDE_INT min_size,
2791 unsigned HOST_WIDE_INT max_size,
2792 unsigned HOST_WIDE_INT probable_max_size)
2794 enum machine_mode mode = GET_MODE (object);
2795 unsigned int align;
2797 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2799 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2800 just move a zero. Otherwise, do this a piece at a time. */
2801 if (mode != BLKmode
2802 && CONST_INT_P (size)
2803 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2805 rtx zero = CONST0_RTX (mode);
2806 if (zero != NULL)
2808 emit_move_insn (object, zero);
2809 return NULL;
2812 if (COMPLEX_MODE_P (mode))
2814 zero = CONST0_RTX (GET_MODE_INNER (mode));
2815 if (zero != NULL)
2817 write_complex_part (object, zero, 0);
2818 write_complex_part (object, zero, 1);
2819 return NULL;
2824 if (size == const0_rtx)
2825 return NULL;
2827 align = MEM_ALIGN (object);
2829 if (CONST_INT_P (size)
2830 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2831 clear_by_pieces (object, INTVAL (size), align);
2832 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2833 expected_align, expected_size,
2834 min_size, max_size, probable_max_size))
2836 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2837 return set_storage_via_libcall (object, size, const0_rtx,
2838 method == BLOCK_OP_TAILCALL);
2839 else
2840 gcc_unreachable ();
2842 return NULL;
2846 clear_storage (rtx object, rtx size, enum block_op_methods method)
2848 unsigned HOST_WIDE_INT max, min = 0;
2849 if (GET_CODE (size) == CONST_INT)
2850 min = max = UINTVAL (size);
2851 else
2852 max = GET_MODE_MASK (GET_MODE (size));
2853 return clear_storage_hints (object, size, method, 0, -1, min, max, max);
2857 /* A subroutine of clear_storage. Expand a call to memset.
2858 Return the return value of memset, 0 otherwise. */
2861 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2863 tree call_expr, fn, object_tree, size_tree, val_tree;
2864 enum machine_mode size_mode;
2865 rtx retval;
2867 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2868 place those into new pseudos into a VAR_DECL and use them later. */
2870 object = copy_addr_to_reg (XEXP (object, 0));
2872 size_mode = TYPE_MODE (sizetype);
2873 size = convert_to_mode (size_mode, size, 1);
2874 size = copy_to_mode_reg (size_mode, size);
2876 /* It is incorrect to use the libcall calling conventions to call
2877 memset in this context. This could be a user call to memset and
2878 the user may wish to examine the return value from memset. For
2879 targets where libcalls and normal calls have different conventions
2880 for returning pointers, we could end up generating incorrect code. */
2882 object_tree = make_tree (ptr_type_node, object);
2883 if (!CONST_INT_P (val))
2884 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2885 size_tree = make_tree (sizetype, size);
2886 val_tree = make_tree (integer_type_node, val);
2888 fn = clear_storage_libcall_fn (true);
2889 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2890 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2892 retval = expand_normal (call_expr);
2894 return retval;
2897 /* A subroutine of set_storage_via_libcall. Create the tree node
2898 for the function we use for block clears. */
2900 tree block_clear_fn;
2902 void
2903 init_block_clear_fn (const char *asmspec)
2905 if (!block_clear_fn)
2907 tree fn, args;
2909 fn = get_identifier ("memset");
2910 args = build_function_type_list (ptr_type_node, ptr_type_node,
2911 integer_type_node, sizetype,
2912 NULL_TREE);
2914 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2915 DECL_EXTERNAL (fn) = 1;
2916 TREE_PUBLIC (fn) = 1;
2917 DECL_ARTIFICIAL (fn) = 1;
2918 TREE_NOTHROW (fn) = 1;
2919 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2920 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2922 block_clear_fn = fn;
2925 if (asmspec)
2926 set_user_assembler_name (block_clear_fn, asmspec);
2929 static tree
2930 clear_storage_libcall_fn (int for_call)
2932 static bool emitted_extern;
2934 if (!block_clear_fn)
2935 init_block_clear_fn (NULL);
2937 if (for_call && !emitted_extern)
2939 emitted_extern = true;
2940 make_decl_rtl (block_clear_fn);
2943 return block_clear_fn;
2946 /* Expand a setmem pattern; return true if successful. */
2948 bool
2949 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2950 unsigned int expected_align, HOST_WIDE_INT expected_size,
2951 unsigned HOST_WIDE_INT min_size,
2952 unsigned HOST_WIDE_INT max_size,
2953 unsigned HOST_WIDE_INT probable_max_size)
2955 /* Try the most limited insn first, because there's no point
2956 including more than one in the machine description unless
2957 the more limited one has some advantage. */
2959 enum machine_mode mode;
2961 if (expected_align < align)
2962 expected_align = align;
2963 if (expected_size != -1)
2965 if ((unsigned HOST_WIDE_INT)expected_size > max_size)
2966 expected_size = max_size;
2967 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
2968 expected_size = min_size;
2971 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2972 mode = GET_MODE_WIDER_MODE (mode))
2974 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2976 if (code != CODE_FOR_nothing
2977 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2978 here because if SIZE is less than the mode mask, as it is
2979 returned by the macro, it will definitely be less than the
2980 actual mode mask. Since SIZE is within the Pmode address
2981 space, we limit MODE to Pmode. */
2982 && ((CONST_INT_P (size)
2983 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2984 <= (GET_MODE_MASK (mode) >> 1)))
2985 || max_size <= (GET_MODE_MASK (mode) >> 1)
2986 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
2988 struct expand_operand ops[9];
2989 unsigned int nops;
2991 nops = insn_data[(int) code].n_generator_args;
2992 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
2994 create_fixed_operand (&ops[0], object);
2995 /* The check above guarantees that this size conversion is valid. */
2996 create_convert_operand_to (&ops[1], size, mode, true);
2997 create_convert_operand_from (&ops[2], val, byte_mode, true);
2998 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2999 if (nops >= 6)
3001 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
3002 create_integer_operand (&ops[5], expected_size);
3004 if (nops >= 8)
3006 create_integer_operand (&ops[6], min_size);
3007 /* If we can not represent the maximal size,
3008 make parameter NULL. */
3009 if ((HOST_WIDE_INT) max_size != -1)
3010 create_integer_operand (&ops[7], max_size);
3011 else
3012 create_fixed_operand (&ops[7], NULL);
3014 if (nops == 9)
3016 /* If we can not represent the maximal size,
3017 make parameter NULL. */
3018 if ((HOST_WIDE_INT) probable_max_size != -1)
3019 create_integer_operand (&ops[8], probable_max_size);
3020 else
3021 create_fixed_operand (&ops[8], NULL);
3023 if (maybe_expand_insn (code, nops, ops))
3024 return true;
3028 return false;
3032 /* Write to one of the components of the complex value CPLX. Write VAL to
3033 the real part if IMAG_P is false, and the imaginary part if its true. */
3035 static void
3036 write_complex_part (rtx cplx, rtx val, bool imag_p)
3038 enum machine_mode cmode;
3039 enum machine_mode imode;
3040 unsigned ibitsize;
3042 if (GET_CODE (cplx) == CONCAT)
3044 emit_move_insn (XEXP (cplx, imag_p), val);
3045 return;
3048 cmode = GET_MODE (cplx);
3049 imode = GET_MODE_INNER (cmode);
3050 ibitsize = GET_MODE_BITSIZE (imode);
3052 /* For MEMs simplify_gen_subreg may generate an invalid new address
3053 because, e.g., the original address is considered mode-dependent
3054 by the target, which restricts simplify_subreg from invoking
3055 adjust_address_nv. Instead of preparing fallback support for an
3056 invalid address, we call adjust_address_nv directly. */
3057 if (MEM_P (cplx))
3059 emit_move_insn (adjust_address_nv (cplx, imode,
3060 imag_p ? GET_MODE_SIZE (imode) : 0),
3061 val);
3062 return;
3065 /* If the sub-object is at least word sized, then we know that subregging
3066 will work. This special case is important, since store_bit_field
3067 wants to operate on integer modes, and there's rarely an OImode to
3068 correspond to TCmode. */
3069 if (ibitsize >= BITS_PER_WORD
3070 /* For hard regs we have exact predicates. Assume we can split
3071 the original object if it spans an even number of hard regs.
3072 This special case is important for SCmode on 64-bit platforms
3073 where the natural size of floating-point regs is 32-bit. */
3074 || (REG_P (cplx)
3075 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3076 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3078 rtx part = simplify_gen_subreg (imode, cplx, cmode,
3079 imag_p ? GET_MODE_SIZE (imode) : 0);
3080 if (part)
3082 emit_move_insn (part, val);
3083 return;
3085 else
3086 /* simplify_gen_subreg may fail for sub-word MEMs. */
3087 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3090 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
3093 /* Extract one of the components of the complex value CPLX. Extract the
3094 real part if IMAG_P is false, and the imaginary part if it's true. */
3096 static rtx
3097 read_complex_part (rtx cplx, bool imag_p)
3099 enum machine_mode cmode, imode;
3100 unsigned ibitsize;
3102 if (GET_CODE (cplx) == CONCAT)
3103 return XEXP (cplx, imag_p);
3105 cmode = GET_MODE (cplx);
3106 imode = GET_MODE_INNER (cmode);
3107 ibitsize = GET_MODE_BITSIZE (imode);
3109 /* Special case reads from complex constants that got spilled to memory. */
3110 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3112 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3113 if (decl && TREE_CODE (decl) == COMPLEX_CST)
3115 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3116 if (CONSTANT_CLASS_P (part))
3117 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3121 /* For MEMs simplify_gen_subreg may generate an invalid new address
3122 because, e.g., the original address is considered mode-dependent
3123 by the target, which restricts simplify_subreg from invoking
3124 adjust_address_nv. Instead of preparing fallback support for an
3125 invalid address, we call adjust_address_nv directly. */
3126 if (MEM_P (cplx))
3127 return adjust_address_nv (cplx, imode,
3128 imag_p ? GET_MODE_SIZE (imode) : 0);
3130 /* If the sub-object is at least word sized, then we know that subregging
3131 will work. This special case is important, since extract_bit_field
3132 wants to operate on integer modes, and there's rarely an OImode to
3133 correspond to TCmode. */
3134 if (ibitsize >= BITS_PER_WORD
3135 /* For hard regs we have exact predicates. Assume we can split
3136 the original object if it spans an even number of hard regs.
3137 This special case is important for SCmode on 64-bit platforms
3138 where the natural size of floating-point regs is 32-bit. */
3139 || (REG_P (cplx)
3140 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3141 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3143 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3144 imag_p ? GET_MODE_SIZE (imode) : 0);
3145 if (ret)
3146 return ret;
3147 else
3148 /* simplify_gen_subreg may fail for sub-word MEMs. */
3149 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3152 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3153 true, NULL_RTX, imode, imode);
3156 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3157 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3158 represented in NEW_MODE. If FORCE is true, this will never happen, as
3159 we'll force-create a SUBREG if needed. */
3161 static rtx
3162 emit_move_change_mode (enum machine_mode new_mode,
3163 enum machine_mode old_mode, rtx x, bool force)
3165 rtx ret;
3167 if (push_operand (x, GET_MODE (x)))
3169 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3170 MEM_COPY_ATTRIBUTES (ret, x);
3172 else if (MEM_P (x))
3174 /* We don't have to worry about changing the address since the
3175 size in bytes is supposed to be the same. */
3176 if (reload_in_progress)
3178 /* Copy the MEM to change the mode and move any
3179 substitutions from the old MEM to the new one. */
3180 ret = adjust_address_nv (x, new_mode, 0);
3181 copy_replacements (x, ret);
3183 else
3184 ret = adjust_address (x, new_mode, 0);
3186 else
3188 /* Note that we do want simplify_subreg's behavior of validating
3189 that the new mode is ok for a hard register. If we were to use
3190 simplify_gen_subreg, we would create the subreg, but would
3191 probably run into the target not being able to implement it. */
3192 /* Except, of course, when FORCE is true, when this is exactly what
3193 we want. Which is needed for CCmodes on some targets. */
3194 if (force)
3195 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3196 else
3197 ret = simplify_subreg (new_mode, x, old_mode, 0);
3200 return ret;
3203 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3204 an integer mode of the same size as MODE. Returns the instruction
3205 emitted, or NULL if such a move could not be generated. */
3207 static rtx
3208 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
3210 enum machine_mode imode;
3211 enum insn_code code;
3213 /* There must exist a mode of the exact size we require. */
3214 imode = int_mode_for_mode (mode);
3215 if (imode == BLKmode)
3216 return NULL_RTX;
3218 /* The target must support moves in this mode. */
3219 code = optab_handler (mov_optab, imode);
3220 if (code == CODE_FOR_nothing)
3221 return NULL_RTX;
3223 x = emit_move_change_mode (imode, mode, x, force);
3224 if (x == NULL_RTX)
3225 return NULL_RTX;
3226 y = emit_move_change_mode (imode, mode, y, force);
3227 if (y == NULL_RTX)
3228 return NULL_RTX;
3229 return emit_insn (GEN_FCN (code) (x, y));
3232 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3233 Return an equivalent MEM that does not use an auto-increment. */
3236 emit_move_resolve_push (enum machine_mode mode, rtx x)
3238 enum rtx_code code = GET_CODE (XEXP (x, 0));
3239 HOST_WIDE_INT adjust;
3240 rtx temp;
3242 adjust = GET_MODE_SIZE (mode);
3243 #ifdef PUSH_ROUNDING
3244 adjust = PUSH_ROUNDING (adjust);
3245 #endif
3246 if (code == PRE_DEC || code == POST_DEC)
3247 adjust = -adjust;
3248 else if (code == PRE_MODIFY || code == POST_MODIFY)
3250 rtx expr = XEXP (XEXP (x, 0), 1);
3251 HOST_WIDE_INT val;
3253 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3254 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3255 val = INTVAL (XEXP (expr, 1));
3256 if (GET_CODE (expr) == MINUS)
3257 val = -val;
3258 gcc_assert (adjust == val || adjust == -val);
3259 adjust = val;
3262 /* Do not use anti_adjust_stack, since we don't want to update
3263 stack_pointer_delta. */
3264 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3265 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3266 0, OPTAB_LIB_WIDEN);
3267 if (temp != stack_pointer_rtx)
3268 emit_move_insn (stack_pointer_rtx, temp);
3270 switch (code)
3272 case PRE_INC:
3273 case PRE_DEC:
3274 case PRE_MODIFY:
3275 temp = stack_pointer_rtx;
3276 break;
3277 case POST_INC:
3278 case POST_DEC:
3279 case POST_MODIFY:
3280 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3281 break;
3282 default:
3283 gcc_unreachable ();
3286 return replace_equiv_address (x, temp);
3289 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3290 X is known to satisfy push_operand, and MODE is known to be complex.
3291 Returns the last instruction emitted. */
3294 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3296 enum machine_mode submode = GET_MODE_INNER (mode);
3297 bool imag_first;
3299 #ifdef PUSH_ROUNDING
3300 unsigned int submodesize = GET_MODE_SIZE (submode);
3302 /* In case we output to the stack, but the size is smaller than the
3303 machine can push exactly, we need to use move instructions. */
3304 if (PUSH_ROUNDING (submodesize) != submodesize)
3306 x = emit_move_resolve_push (mode, x);
3307 return emit_move_insn (x, y);
3309 #endif
3311 /* Note that the real part always precedes the imag part in memory
3312 regardless of machine's endianness. */
3313 switch (GET_CODE (XEXP (x, 0)))
3315 case PRE_DEC:
3316 case POST_DEC:
3317 imag_first = true;
3318 break;
3319 case PRE_INC:
3320 case POST_INC:
3321 imag_first = false;
3322 break;
3323 default:
3324 gcc_unreachable ();
3327 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3328 read_complex_part (y, imag_first));
3329 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3330 read_complex_part (y, !imag_first));
3333 /* A subroutine of emit_move_complex. Perform the move from Y to X
3334 via two moves of the parts. Returns the last instruction emitted. */
3337 emit_move_complex_parts (rtx x, rtx y)
3339 /* Show the output dies here. This is necessary for SUBREGs
3340 of pseudos since we cannot track their lifetimes correctly;
3341 hard regs shouldn't appear here except as return values. */
3342 if (!reload_completed && !reload_in_progress
3343 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3344 emit_clobber (x);
3346 write_complex_part (x, read_complex_part (y, false), false);
3347 write_complex_part (x, read_complex_part (y, true), true);
3349 return get_last_insn ();
3352 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3353 MODE is known to be complex. Returns the last instruction emitted. */
3355 static rtx
3356 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3358 bool try_int;
3360 /* Need to take special care for pushes, to maintain proper ordering
3361 of the data, and possibly extra padding. */
3362 if (push_operand (x, mode))
3363 return emit_move_complex_push (mode, x, y);
3365 /* See if we can coerce the target into moving both values at once, except
3366 for floating point where we favor moving as parts if this is easy. */
3367 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3368 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3369 && !(REG_P (x)
3370 && HARD_REGISTER_P (x)
3371 && hard_regno_nregs[REGNO (x)][mode] == 1)
3372 && !(REG_P (y)
3373 && HARD_REGISTER_P (y)
3374 && hard_regno_nregs[REGNO (y)][mode] == 1))
3375 try_int = false;
3376 /* Not possible if the values are inherently not adjacent. */
3377 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3378 try_int = false;
3379 /* Is possible if both are registers (or subregs of registers). */
3380 else if (register_operand (x, mode) && register_operand (y, mode))
3381 try_int = true;
3382 /* If one of the operands is a memory, and alignment constraints
3383 are friendly enough, we may be able to do combined memory operations.
3384 We do not attempt this if Y is a constant because that combination is
3385 usually better with the by-parts thing below. */
3386 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3387 && (!STRICT_ALIGNMENT
3388 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3389 try_int = true;
3390 else
3391 try_int = false;
3393 if (try_int)
3395 rtx ret;
3397 /* For memory to memory moves, optimal behavior can be had with the
3398 existing block move logic. */
3399 if (MEM_P (x) && MEM_P (y))
3401 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3402 BLOCK_OP_NO_LIBCALL);
3403 return get_last_insn ();
3406 ret = emit_move_via_integer (mode, x, y, true);
3407 if (ret)
3408 return ret;
3411 return emit_move_complex_parts (x, y);
3414 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3415 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3417 static rtx
3418 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3420 rtx ret;
3422 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3423 if (mode != CCmode)
3425 enum insn_code code = optab_handler (mov_optab, CCmode);
3426 if (code != CODE_FOR_nothing)
3428 x = emit_move_change_mode (CCmode, mode, x, true);
3429 y = emit_move_change_mode (CCmode, mode, y, true);
3430 return emit_insn (GEN_FCN (code) (x, y));
3434 /* Otherwise, find the MODE_INT mode of the same width. */
3435 ret = emit_move_via_integer (mode, x, y, false);
3436 gcc_assert (ret != NULL);
3437 return ret;
3440 /* Return true if word I of OP lies entirely in the
3441 undefined bits of a paradoxical subreg. */
3443 static bool
3444 undefined_operand_subword_p (const_rtx op, int i)
3446 enum machine_mode innermode, innermostmode;
3447 int offset;
3448 if (GET_CODE (op) != SUBREG)
3449 return false;
3450 innermode = GET_MODE (op);
3451 innermostmode = GET_MODE (SUBREG_REG (op));
3452 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3453 /* The SUBREG_BYTE represents offset, as if the value were stored in
3454 memory, except for a paradoxical subreg where we define
3455 SUBREG_BYTE to be 0; undo this exception as in
3456 simplify_subreg. */
3457 if (SUBREG_BYTE (op) == 0
3458 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3460 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3461 if (WORDS_BIG_ENDIAN)
3462 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3463 if (BYTES_BIG_ENDIAN)
3464 offset += difference % UNITS_PER_WORD;
3466 if (offset >= GET_MODE_SIZE (innermostmode)
3467 || offset <= -GET_MODE_SIZE (word_mode))
3468 return true;
3469 return false;
3472 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3473 MODE is any multi-word or full-word mode that lacks a move_insn
3474 pattern. Note that you will get better code if you define such
3475 patterns, even if they must turn into multiple assembler instructions. */
3477 static rtx
3478 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3480 rtx last_insn = 0;
3481 rtx seq, inner;
3482 bool need_clobber;
3483 int i;
3485 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3487 /* If X is a push on the stack, do the push now and replace
3488 X with a reference to the stack pointer. */
3489 if (push_operand (x, mode))
3490 x = emit_move_resolve_push (mode, x);
3492 /* If we are in reload, see if either operand is a MEM whose address
3493 is scheduled for replacement. */
3494 if (reload_in_progress && MEM_P (x)
3495 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3496 x = replace_equiv_address_nv (x, inner);
3497 if (reload_in_progress && MEM_P (y)
3498 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3499 y = replace_equiv_address_nv (y, inner);
3501 start_sequence ();
3503 need_clobber = false;
3504 for (i = 0;
3505 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3506 i++)
3508 rtx xpart = operand_subword (x, i, 1, mode);
3509 rtx ypart;
3511 /* Do not generate code for a move if it would come entirely
3512 from the undefined bits of a paradoxical subreg. */
3513 if (undefined_operand_subword_p (y, i))
3514 continue;
3516 ypart = operand_subword (y, i, 1, mode);
3518 /* If we can't get a part of Y, put Y into memory if it is a
3519 constant. Otherwise, force it into a register. Then we must
3520 be able to get a part of Y. */
3521 if (ypart == 0 && CONSTANT_P (y))
3523 y = use_anchored_address (force_const_mem (mode, y));
3524 ypart = operand_subword (y, i, 1, mode);
3526 else if (ypart == 0)
3527 ypart = operand_subword_force (y, i, mode);
3529 gcc_assert (xpart && ypart);
3531 need_clobber |= (GET_CODE (xpart) == SUBREG);
3533 last_insn = emit_move_insn (xpart, ypart);
3536 seq = get_insns ();
3537 end_sequence ();
3539 /* Show the output dies here. This is necessary for SUBREGs
3540 of pseudos since we cannot track their lifetimes correctly;
3541 hard regs shouldn't appear here except as return values.
3542 We never want to emit such a clobber after reload. */
3543 if (x != y
3544 && ! (reload_in_progress || reload_completed)
3545 && need_clobber != 0)
3546 emit_clobber (x);
3548 emit_insn (seq);
3550 return last_insn;
3553 /* Low level part of emit_move_insn.
3554 Called just like emit_move_insn, but assumes X and Y
3555 are basically valid. */
3558 emit_move_insn_1 (rtx x, rtx y)
3560 enum machine_mode mode = GET_MODE (x);
3561 enum insn_code code;
3563 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3565 code = optab_handler (mov_optab, mode);
3566 if (code != CODE_FOR_nothing)
3567 return emit_insn (GEN_FCN (code) (x, y));
3569 /* Expand complex moves by moving real part and imag part. */
3570 if (COMPLEX_MODE_P (mode))
3571 return emit_move_complex (mode, x, y);
3573 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3574 || ALL_FIXED_POINT_MODE_P (mode))
3576 rtx result = emit_move_via_integer (mode, x, y, true);
3578 /* If we can't find an integer mode, use multi words. */
3579 if (result)
3580 return result;
3581 else
3582 return emit_move_multi_word (mode, x, y);
3585 if (GET_MODE_CLASS (mode) == MODE_CC)
3586 return emit_move_ccmode (mode, x, y);
3588 /* Try using a move pattern for the corresponding integer mode. This is
3589 only safe when simplify_subreg can convert MODE constants into integer
3590 constants. At present, it can only do this reliably if the value
3591 fits within a HOST_WIDE_INT. */
3592 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3594 rtx ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3596 if (ret)
3598 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3599 return ret;
3603 return emit_move_multi_word (mode, x, y);
3606 /* Generate code to copy Y into X.
3607 Both Y and X must have the same mode, except that
3608 Y can be a constant with VOIDmode.
3609 This mode cannot be BLKmode; use emit_block_move for that.
3611 Return the last instruction emitted. */
3614 emit_move_insn (rtx x, rtx y)
3616 enum machine_mode mode = GET_MODE (x);
3617 rtx y_cst = NULL_RTX;
3618 rtx last_insn, set;
3620 gcc_assert (mode != BLKmode
3621 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3623 if (CONSTANT_P (y))
3625 if (optimize
3626 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3627 && (last_insn = compress_float_constant (x, y)))
3628 return last_insn;
3630 y_cst = y;
3632 if (!targetm.legitimate_constant_p (mode, y))
3634 y = force_const_mem (mode, y);
3636 /* If the target's cannot_force_const_mem prevented the spill,
3637 assume that the target's move expanders will also take care
3638 of the non-legitimate constant. */
3639 if (!y)
3640 y = y_cst;
3641 else
3642 y = use_anchored_address (y);
3646 /* If X or Y are memory references, verify that their addresses are valid
3647 for the machine. */
3648 if (MEM_P (x)
3649 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3650 MEM_ADDR_SPACE (x))
3651 && ! push_operand (x, GET_MODE (x))))
3652 x = validize_mem (x);
3654 if (MEM_P (y)
3655 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3656 MEM_ADDR_SPACE (y)))
3657 y = validize_mem (y);
3659 gcc_assert (mode != BLKmode);
3661 last_insn = emit_move_insn_1 (x, y);
3663 if (y_cst && REG_P (x)
3664 && (set = single_set (last_insn)) != NULL_RTX
3665 && SET_DEST (set) == x
3666 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3667 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3669 return last_insn;
3672 /* If Y is representable exactly in a narrower mode, and the target can
3673 perform the extension directly from constant or memory, then emit the
3674 move as an extension. */
3676 static rtx
3677 compress_float_constant (rtx x, rtx y)
3679 enum machine_mode dstmode = GET_MODE (x);
3680 enum machine_mode orig_srcmode = GET_MODE (y);
3681 enum machine_mode srcmode;
3682 REAL_VALUE_TYPE r;
3683 int oldcost, newcost;
3684 bool speed = optimize_insn_for_speed_p ();
3686 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3688 if (targetm.legitimate_constant_p (dstmode, y))
3689 oldcost = set_src_cost (y, speed);
3690 else
3691 oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3693 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3694 srcmode != orig_srcmode;
3695 srcmode = GET_MODE_WIDER_MODE (srcmode))
3697 enum insn_code ic;
3698 rtx trunc_y, last_insn;
3700 /* Skip if the target can't extend this way. */
3701 ic = can_extend_p (dstmode, srcmode, 0);
3702 if (ic == CODE_FOR_nothing)
3703 continue;
3705 /* Skip if the narrowed value isn't exact. */
3706 if (! exact_real_truncate (srcmode, &r))
3707 continue;
3709 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3711 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3713 /* Skip if the target needs extra instructions to perform
3714 the extension. */
3715 if (!insn_operand_matches (ic, 1, trunc_y))
3716 continue;
3717 /* This is valid, but may not be cheaper than the original. */
3718 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3719 speed);
3720 if (oldcost < newcost)
3721 continue;
3723 else if (float_extend_from_mem[dstmode][srcmode])
3725 trunc_y = force_const_mem (srcmode, trunc_y);
3726 /* This is valid, but may not be cheaper than the original. */
3727 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3728 speed);
3729 if (oldcost < newcost)
3730 continue;
3731 trunc_y = validize_mem (trunc_y);
3733 else
3734 continue;
3736 /* For CSE's benefit, force the compressed constant pool entry
3737 into a new pseudo. This constant may be used in different modes,
3738 and if not, combine will put things back together for us. */
3739 trunc_y = force_reg (srcmode, trunc_y);
3741 /* If x is a hard register, perform the extension into a pseudo,
3742 so that e.g. stack realignment code is aware of it. */
3743 rtx target = x;
3744 if (REG_P (x) && HARD_REGISTER_P (x))
3745 target = gen_reg_rtx (dstmode);
3747 emit_unop_insn (ic, target, trunc_y, UNKNOWN);
3748 last_insn = get_last_insn ();
3750 if (REG_P (target))
3751 set_unique_reg_note (last_insn, REG_EQUAL, y);
3753 if (target != x)
3754 return emit_move_insn (x, target);
3755 return last_insn;
3758 return NULL_RTX;
3761 /* Pushing data onto the stack. */
3763 /* Push a block of length SIZE (perhaps variable)
3764 and return an rtx to address the beginning of the block.
3765 The value may be virtual_outgoing_args_rtx.
3767 EXTRA is the number of bytes of padding to push in addition to SIZE.
3768 BELOW nonzero means this padding comes at low addresses;
3769 otherwise, the padding comes at high addresses. */
3772 push_block (rtx size, int extra, int below)
3774 rtx temp;
3776 size = convert_modes (Pmode, ptr_mode, size, 1);
3777 if (CONSTANT_P (size))
3778 anti_adjust_stack (plus_constant (Pmode, size, extra));
3779 else if (REG_P (size) && extra == 0)
3780 anti_adjust_stack (size);
3781 else
3783 temp = copy_to_mode_reg (Pmode, size);
3784 if (extra != 0)
3785 temp = expand_binop (Pmode, add_optab, temp,
3786 gen_int_mode (extra, Pmode),
3787 temp, 0, OPTAB_LIB_WIDEN);
3788 anti_adjust_stack (temp);
3791 #ifndef STACK_GROWS_DOWNWARD
3792 if (0)
3793 #else
3794 if (1)
3795 #endif
3797 temp = virtual_outgoing_args_rtx;
3798 if (extra != 0 && below)
3799 temp = plus_constant (Pmode, temp, extra);
3801 else
3803 if (CONST_INT_P (size))
3804 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3805 -INTVAL (size) - (below ? 0 : extra));
3806 else if (extra != 0 && !below)
3807 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3808 negate_rtx (Pmode, plus_constant (Pmode, size,
3809 extra)));
3810 else
3811 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3812 negate_rtx (Pmode, size));
3815 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3818 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3820 static rtx
3821 mem_autoinc_base (rtx mem)
3823 if (MEM_P (mem))
3825 rtx addr = XEXP (mem, 0);
3826 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3827 return XEXP (addr, 0);
3829 return NULL;
3832 /* A utility routine used here, in reload, and in try_split. The insns
3833 after PREV up to and including LAST are known to adjust the stack,
3834 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3835 placing notes as appropriate. PREV may be NULL, indicating the
3836 entire insn sequence prior to LAST should be scanned.
3838 The set of allowed stack pointer modifications is small:
3839 (1) One or more auto-inc style memory references (aka pushes),
3840 (2) One or more addition/subtraction with the SP as destination,
3841 (3) A single move insn with the SP as destination,
3842 (4) A call_pop insn,
3843 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3845 Insns in the sequence that do not modify the SP are ignored,
3846 except for noreturn calls.
3848 The return value is the amount of adjustment that can be trivially
3849 verified, via immediate operand or auto-inc. If the adjustment
3850 cannot be trivially extracted, the return value is INT_MIN. */
3852 HOST_WIDE_INT
3853 find_args_size_adjust (rtx insn)
3855 rtx dest, set, pat;
3856 int i;
3858 pat = PATTERN (insn);
3859 set = NULL;
3861 /* Look for a call_pop pattern. */
3862 if (CALL_P (insn))
3864 /* We have to allow non-call_pop patterns for the case
3865 of emit_single_push_insn of a TLS address. */
3866 if (GET_CODE (pat) != PARALLEL)
3867 return 0;
3869 /* All call_pop have a stack pointer adjust in the parallel.
3870 The call itself is always first, and the stack adjust is
3871 usually last, so search from the end. */
3872 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3874 set = XVECEXP (pat, 0, i);
3875 if (GET_CODE (set) != SET)
3876 continue;
3877 dest = SET_DEST (set);
3878 if (dest == stack_pointer_rtx)
3879 break;
3881 /* We'd better have found the stack pointer adjust. */
3882 if (i == 0)
3883 return 0;
3884 /* Fall through to process the extracted SET and DEST
3885 as if it was a standalone insn. */
3887 else if (GET_CODE (pat) == SET)
3888 set = pat;
3889 else if ((set = single_set (insn)) != NULL)
3891 else if (GET_CODE (pat) == PARALLEL)
3893 /* ??? Some older ports use a parallel with a stack adjust
3894 and a store for a PUSH_ROUNDING pattern, rather than a
3895 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3896 /* ??? See h8300 and m68k, pushqi1. */
3897 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3899 set = XVECEXP (pat, 0, i);
3900 if (GET_CODE (set) != SET)
3901 continue;
3902 dest = SET_DEST (set);
3903 if (dest == stack_pointer_rtx)
3904 break;
3906 /* We do not expect an auto-inc of the sp in the parallel. */
3907 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3908 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3909 != stack_pointer_rtx);
3911 if (i < 0)
3912 return 0;
3914 else
3915 return 0;
3917 dest = SET_DEST (set);
3919 /* Look for direct modifications of the stack pointer. */
3920 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3922 /* Look for a trivial adjustment, otherwise assume nothing. */
3923 /* Note that the SPU restore_stack_block pattern refers to
3924 the stack pointer in V4SImode. Consider that non-trivial. */
3925 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3926 && GET_CODE (SET_SRC (set)) == PLUS
3927 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3928 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3929 return INTVAL (XEXP (SET_SRC (set), 1));
3930 /* ??? Reload can generate no-op moves, which will be cleaned
3931 up later. Recognize it and continue searching. */
3932 else if (rtx_equal_p (dest, SET_SRC (set)))
3933 return 0;
3934 else
3935 return HOST_WIDE_INT_MIN;
3937 else
3939 rtx mem, addr;
3941 /* Otherwise only think about autoinc patterns. */
3942 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3944 mem = dest;
3945 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3946 != stack_pointer_rtx);
3948 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3949 mem = SET_SRC (set);
3950 else
3951 return 0;
3953 addr = XEXP (mem, 0);
3954 switch (GET_CODE (addr))
3956 case PRE_INC:
3957 case POST_INC:
3958 return GET_MODE_SIZE (GET_MODE (mem));
3959 case PRE_DEC:
3960 case POST_DEC:
3961 return -GET_MODE_SIZE (GET_MODE (mem));
3962 case PRE_MODIFY:
3963 case POST_MODIFY:
3964 addr = XEXP (addr, 1);
3965 gcc_assert (GET_CODE (addr) == PLUS);
3966 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3967 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3968 return INTVAL (XEXP (addr, 1));
3969 default:
3970 gcc_unreachable ();
3976 fixup_args_size_notes (rtx prev, rtx last, int end_args_size)
3978 int args_size = end_args_size;
3979 bool saw_unknown = false;
3980 rtx insn;
3982 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3984 HOST_WIDE_INT this_delta;
3986 if (!NONDEBUG_INSN_P (insn))
3987 continue;
3989 this_delta = find_args_size_adjust (insn);
3990 if (this_delta == 0)
3992 if (!CALL_P (insn)
3993 || ACCUMULATE_OUTGOING_ARGS
3994 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3995 continue;
3998 gcc_assert (!saw_unknown);
3999 if (this_delta == HOST_WIDE_INT_MIN)
4000 saw_unknown = true;
4002 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
4003 #ifdef STACK_GROWS_DOWNWARD
4004 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
4005 #endif
4006 args_size -= this_delta;
4009 return saw_unknown ? INT_MIN : args_size;
4012 #ifdef PUSH_ROUNDING
4013 /* Emit single push insn. */
4015 static void
4016 emit_single_push_insn_1 (enum machine_mode mode, rtx x, tree type)
4018 rtx dest_addr;
4019 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
4020 rtx dest;
4021 enum insn_code icode;
4023 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
4024 /* If there is push pattern, use it. Otherwise try old way of throwing
4025 MEM representing push operation to move expander. */
4026 icode = optab_handler (push_optab, mode);
4027 if (icode != CODE_FOR_nothing)
4029 struct expand_operand ops[1];
4031 create_input_operand (&ops[0], x, mode);
4032 if (maybe_expand_insn (icode, 1, ops))
4033 return;
4035 if (GET_MODE_SIZE (mode) == rounded_size)
4036 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
4037 /* If we are to pad downward, adjust the stack pointer first and
4038 then store X into the stack location using an offset. This is
4039 because emit_move_insn does not know how to pad; it does not have
4040 access to type. */
4041 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
4043 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
4044 HOST_WIDE_INT offset;
4046 emit_move_insn (stack_pointer_rtx,
4047 expand_binop (Pmode,
4048 #ifdef STACK_GROWS_DOWNWARD
4049 sub_optab,
4050 #else
4051 add_optab,
4052 #endif
4053 stack_pointer_rtx,
4054 gen_int_mode (rounded_size, Pmode),
4055 NULL_RTX, 0, OPTAB_LIB_WIDEN));
4057 offset = (HOST_WIDE_INT) padding_size;
4058 #ifdef STACK_GROWS_DOWNWARD
4059 if (STACK_PUSH_CODE == POST_DEC)
4060 /* We have already decremented the stack pointer, so get the
4061 previous value. */
4062 offset += (HOST_WIDE_INT) rounded_size;
4063 #else
4064 if (STACK_PUSH_CODE == POST_INC)
4065 /* We have already incremented the stack pointer, so get the
4066 previous value. */
4067 offset -= (HOST_WIDE_INT) rounded_size;
4068 #endif
4069 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4070 gen_int_mode (offset, Pmode));
4072 else
4074 #ifdef STACK_GROWS_DOWNWARD
4075 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4076 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4077 gen_int_mode (-(HOST_WIDE_INT) rounded_size,
4078 Pmode));
4079 #else
4080 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4081 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4082 gen_int_mode (rounded_size, Pmode));
4083 #endif
4084 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4087 dest = gen_rtx_MEM (mode, dest_addr);
4089 if (type != 0)
4091 set_mem_attributes (dest, type, 1);
4093 if (cfun->tail_call_marked)
4094 /* Function incoming arguments may overlap with sibling call
4095 outgoing arguments and we cannot allow reordering of reads
4096 from function arguments with stores to outgoing arguments
4097 of sibling calls. */
4098 set_mem_alias_set (dest, 0);
4100 emit_move_insn (dest, x);
4103 /* Emit and annotate a single push insn. */
4105 static void
4106 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
4108 int delta, old_delta = stack_pointer_delta;
4109 rtx prev = get_last_insn ();
4110 rtx last;
4112 emit_single_push_insn_1 (mode, x, type);
4114 last = get_last_insn ();
4116 /* Notice the common case where we emitted exactly one insn. */
4117 if (PREV_INSN (last) == prev)
4119 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4120 return;
4123 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4124 gcc_assert (delta == INT_MIN || delta == old_delta);
4126 #endif
4128 /* Generate code to push X onto the stack, assuming it has mode MODE and
4129 type TYPE.
4130 MODE is redundant except when X is a CONST_INT (since they don't
4131 carry mode info).
4132 SIZE is an rtx for the size of data to be copied (in bytes),
4133 needed only if X is BLKmode.
4135 ALIGN (in bits) is maximum alignment we can assume.
4137 If PARTIAL and REG are both nonzero, then copy that many of the first
4138 bytes of X into registers starting with REG, and push the rest of X.
4139 The amount of space pushed is decreased by PARTIAL bytes.
4140 REG must be a hard register in this case.
4141 If REG is zero but PARTIAL is not, take any all others actions for an
4142 argument partially in registers, but do not actually load any
4143 registers.
4145 EXTRA is the amount in bytes of extra space to leave next to this arg.
4146 This is ignored if an argument block has already been allocated.
4148 On a machine that lacks real push insns, ARGS_ADDR is the address of
4149 the bottom of the argument block for this call. We use indexing off there
4150 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4151 argument block has not been preallocated.
4153 ARGS_SO_FAR is the size of args previously pushed for this call.
4155 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4156 for arguments passed in registers. If nonzero, it will be the number
4157 of bytes required. */
4159 void
4160 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
4161 unsigned int align, int partial, rtx reg, int extra,
4162 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4163 rtx alignment_pad)
4165 rtx xinner;
4166 enum direction stack_direction
4167 #ifdef STACK_GROWS_DOWNWARD
4168 = downward;
4169 #else
4170 = upward;
4171 #endif
4173 /* Decide where to pad the argument: `downward' for below,
4174 `upward' for above, or `none' for don't pad it.
4175 Default is below for small data on big-endian machines; else above. */
4176 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4178 /* Invert direction if stack is post-decrement.
4179 FIXME: why? */
4180 if (STACK_PUSH_CODE == POST_DEC)
4181 if (where_pad != none)
4182 where_pad = (where_pad == downward ? upward : downward);
4184 xinner = x;
4186 if (mode == BLKmode
4187 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4189 /* Copy a block into the stack, entirely or partially. */
4191 rtx temp;
4192 int used;
4193 int offset;
4194 int skip;
4196 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4197 used = partial - offset;
4199 if (mode != BLKmode)
4201 /* A value is to be stored in an insufficiently aligned
4202 stack slot; copy via a suitably aligned slot if
4203 necessary. */
4204 size = GEN_INT (GET_MODE_SIZE (mode));
4205 if (!MEM_P (xinner))
4207 temp = assign_temp (type, 1, 1);
4208 emit_move_insn (temp, xinner);
4209 xinner = temp;
4213 gcc_assert (size);
4215 /* USED is now the # of bytes we need not copy to the stack
4216 because registers will take care of them. */
4218 if (partial != 0)
4219 xinner = adjust_address (xinner, BLKmode, used);
4221 /* If the partial register-part of the arg counts in its stack size,
4222 skip the part of stack space corresponding to the registers.
4223 Otherwise, start copying to the beginning of the stack space,
4224 by setting SKIP to 0. */
4225 skip = (reg_parm_stack_space == 0) ? 0 : used;
4227 #ifdef PUSH_ROUNDING
4228 /* Do it with several push insns if that doesn't take lots of insns
4229 and if there is no difficulty with push insns that skip bytes
4230 on the stack for alignment purposes. */
4231 if (args_addr == 0
4232 && PUSH_ARGS
4233 && CONST_INT_P (size)
4234 && skip == 0
4235 && MEM_ALIGN (xinner) >= align
4236 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
4237 /* Here we avoid the case of a structure whose weak alignment
4238 forces many pushes of a small amount of data,
4239 and such small pushes do rounding that causes trouble. */
4240 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4241 || align >= BIGGEST_ALIGNMENT
4242 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4243 == (align / BITS_PER_UNIT)))
4244 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4246 /* Push padding now if padding above and stack grows down,
4247 or if padding below and stack grows up.
4248 But if space already allocated, this has already been done. */
4249 if (extra && args_addr == 0
4250 && where_pad != none && where_pad != stack_direction)
4251 anti_adjust_stack (GEN_INT (extra));
4253 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4255 else
4256 #endif /* PUSH_ROUNDING */
4258 rtx target;
4260 /* Otherwise make space on the stack and copy the data
4261 to the address of that space. */
4263 /* Deduct words put into registers from the size we must copy. */
4264 if (partial != 0)
4266 if (CONST_INT_P (size))
4267 size = GEN_INT (INTVAL (size) - used);
4268 else
4269 size = expand_binop (GET_MODE (size), sub_optab, size,
4270 gen_int_mode (used, GET_MODE (size)),
4271 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4274 /* Get the address of the stack space.
4275 In this case, we do not deal with EXTRA separately.
4276 A single stack adjust will do. */
4277 if (! args_addr)
4279 temp = push_block (size, extra, where_pad == downward);
4280 extra = 0;
4282 else if (CONST_INT_P (args_so_far))
4283 temp = memory_address (BLKmode,
4284 plus_constant (Pmode, args_addr,
4285 skip + INTVAL (args_so_far)));
4286 else
4287 temp = memory_address (BLKmode,
4288 plus_constant (Pmode,
4289 gen_rtx_PLUS (Pmode,
4290 args_addr,
4291 args_so_far),
4292 skip));
4294 if (!ACCUMULATE_OUTGOING_ARGS)
4296 /* If the source is referenced relative to the stack pointer,
4297 copy it to another register to stabilize it. We do not need
4298 to do this if we know that we won't be changing sp. */
4300 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4301 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4302 temp = copy_to_reg (temp);
4305 target = gen_rtx_MEM (BLKmode, temp);
4307 /* We do *not* set_mem_attributes here, because incoming arguments
4308 may overlap with sibling call outgoing arguments and we cannot
4309 allow reordering of reads from function arguments with stores
4310 to outgoing arguments of sibling calls. We do, however, want
4311 to record the alignment of the stack slot. */
4312 /* ALIGN may well be better aligned than TYPE, e.g. due to
4313 PARM_BOUNDARY. Assume the caller isn't lying. */
4314 set_mem_align (target, align);
4316 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4319 else if (partial > 0)
4321 /* Scalar partly in registers. */
4323 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4324 int i;
4325 int not_stack;
4326 /* # bytes of start of argument
4327 that we must make space for but need not store. */
4328 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4329 int args_offset = INTVAL (args_so_far);
4330 int skip;
4332 /* Push padding now if padding above and stack grows down,
4333 or if padding below and stack grows up.
4334 But if space already allocated, this has already been done. */
4335 if (extra && args_addr == 0
4336 && where_pad != none && where_pad != stack_direction)
4337 anti_adjust_stack (GEN_INT (extra));
4339 /* If we make space by pushing it, we might as well push
4340 the real data. Otherwise, we can leave OFFSET nonzero
4341 and leave the space uninitialized. */
4342 if (args_addr == 0)
4343 offset = 0;
4345 /* Now NOT_STACK gets the number of words that we don't need to
4346 allocate on the stack. Convert OFFSET to words too. */
4347 not_stack = (partial - offset) / UNITS_PER_WORD;
4348 offset /= UNITS_PER_WORD;
4350 /* If the partial register-part of the arg counts in its stack size,
4351 skip the part of stack space corresponding to the registers.
4352 Otherwise, start copying to the beginning of the stack space,
4353 by setting SKIP to 0. */
4354 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4356 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4357 x = validize_mem (force_const_mem (mode, x));
4359 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4360 SUBREGs of such registers are not allowed. */
4361 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4362 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4363 x = copy_to_reg (x);
4365 /* Loop over all the words allocated on the stack for this arg. */
4366 /* We can do it by words, because any scalar bigger than a word
4367 has a size a multiple of a word. */
4368 #ifndef PUSH_ARGS_REVERSED
4369 for (i = not_stack; i < size; i++)
4370 #else
4371 for (i = size - 1; i >= not_stack; i--)
4372 #endif
4373 if (i >= not_stack + offset)
4374 emit_push_insn (operand_subword_force (x, i, mode),
4375 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4376 0, args_addr,
4377 GEN_INT (args_offset + ((i - not_stack + skip)
4378 * UNITS_PER_WORD)),
4379 reg_parm_stack_space, alignment_pad);
4381 else
4383 rtx addr;
4384 rtx dest;
4386 /* Push padding now if padding above and stack grows down,
4387 or if padding below and stack grows up.
4388 But if space already allocated, this has already been done. */
4389 if (extra && args_addr == 0
4390 && where_pad != none && where_pad != stack_direction)
4391 anti_adjust_stack (GEN_INT (extra));
4393 #ifdef PUSH_ROUNDING
4394 if (args_addr == 0 && PUSH_ARGS)
4395 emit_single_push_insn (mode, x, type);
4396 else
4397 #endif
4399 if (CONST_INT_P (args_so_far))
4400 addr
4401 = memory_address (mode,
4402 plus_constant (Pmode, args_addr,
4403 INTVAL (args_so_far)));
4404 else
4405 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4406 args_so_far));
4407 dest = gen_rtx_MEM (mode, addr);
4409 /* We do *not* set_mem_attributes here, because incoming arguments
4410 may overlap with sibling call outgoing arguments and we cannot
4411 allow reordering of reads from function arguments with stores
4412 to outgoing arguments of sibling calls. We do, however, want
4413 to record the alignment of the stack slot. */
4414 /* ALIGN may well be better aligned than TYPE, e.g. due to
4415 PARM_BOUNDARY. Assume the caller isn't lying. */
4416 set_mem_align (dest, align);
4418 emit_move_insn (dest, x);
4422 /* If part should go in registers, copy that part
4423 into the appropriate registers. Do this now, at the end,
4424 since mem-to-mem copies above may do function calls. */
4425 if (partial > 0 && reg != 0)
4427 /* Handle calls that pass values in multiple non-contiguous locations.
4428 The Irix 6 ABI has examples of this. */
4429 if (GET_CODE (reg) == PARALLEL)
4430 emit_group_load (reg, x, type, -1);
4431 else
4433 gcc_assert (partial % UNITS_PER_WORD == 0);
4434 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4438 if (extra && args_addr == 0 && where_pad == stack_direction)
4439 anti_adjust_stack (GEN_INT (extra));
4441 if (alignment_pad && args_addr == 0)
4442 anti_adjust_stack (alignment_pad);
4445 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4446 operations. */
4448 static rtx
4449 get_subtarget (rtx x)
4451 return (optimize
4452 || x == 0
4453 /* Only registers can be subtargets. */
4454 || !REG_P (x)
4455 /* Don't use hard regs to avoid extending their life. */
4456 || REGNO (x) < FIRST_PSEUDO_REGISTER
4457 ? 0 : x);
4460 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4461 FIELD is a bitfield. Returns true if the optimization was successful,
4462 and there's nothing else to do. */
4464 static bool
4465 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4466 unsigned HOST_WIDE_INT bitpos,
4467 unsigned HOST_WIDE_INT bitregion_start,
4468 unsigned HOST_WIDE_INT bitregion_end,
4469 enum machine_mode mode1, rtx str_rtx,
4470 tree to, tree src)
4472 enum machine_mode str_mode = GET_MODE (str_rtx);
4473 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4474 tree op0, op1;
4475 rtx value, result;
4476 optab binop;
4477 gimple srcstmt;
4478 enum tree_code code;
4480 if (mode1 != VOIDmode
4481 || bitsize >= BITS_PER_WORD
4482 || str_bitsize > BITS_PER_WORD
4483 || TREE_SIDE_EFFECTS (to)
4484 || TREE_THIS_VOLATILE (to))
4485 return false;
4487 STRIP_NOPS (src);
4488 if (TREE_CODE (src) != SSA_NAME)
4489 return false;
4490 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4491 return false;
4493 srcstmt = get_gimple_for_ssa_name (src);
4494 if (!srcstmt
4495 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4496 return false;
4498 code = gimple_assign_rhs_code (srcstmt);
4500 op0 = gimple_assign_rhs1 (srcstmt);
4502 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4503 to find its initialization. Hopefully the initialization will
4504 be from a bitfield load. */
4505 if (TREE_CODE (op0) == SSA_NAME)
4507 gimple op0stmt = get_gimple_for_ssa_name (op0);
4509 /* We want to eventually have OP0 be the same as TO, which
4510 should be a bitfield. */
4511 if (!op0stmt
4512 || !is_gimple_assign (op0stmt)
4513 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4514 return false;
4515 op0 = gimple_assign_rhs1 (op0stmt);
4518 op1 = gimple_assign_rhs2 (srcstmt);
4520 if (!operand_equal_p (to, op0, 0))
4521 return false;
4523 if (MEM_P (str_rtx))
4525 unsigned HOST_WIDE_INT offset1;
4527 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4528 str_mode = word_mode;
4529 str_mode = get_best_mode (bitsize, bitpos,
4530 bitregion_start, bitregion_end,
4531 MEM_ALIGN (str_rtx), str_mode, 0);
4532 if (str_mode == VOIDmode)
4533 return false;
4534 str_bitsize = GET_MODE_BITSIZE (str_mode);
4536 offset1 = bitpos;
4537 bitpos %= str_bitsize;
4538 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4539 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4541 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4542 return false;
4544 /* If the bit field covers the whole REG/MEM, store_field
4545 will likely generate better code. */
4546 if (bitsize >= str_bitsize)
4547 return false;
4549 /* We can't handle fields split across multiple entities. */
4550 if (bitpos + bitsize > str_bitsize)
4551 return false;
4553 if (BYTES_BIG_ENDIAN)
4554 bitpos = str_bitsize - bitpos - bitsize;
4556 switch (code)
4558 case PLUS_EXPR:
4559 case MINUS_EXPR:
4560 /* For now, just optimize the case of the topmost bitfield
4561 where we don't need to do any masking and also
4562 1 bit bitfields where xor can be used.
4563 We might win by one instruction for the other bitfields
4564 too if insv/extv instructions aren't used, so that
4565 can be added later. */
4566 if (bitpos + bitsize != str_bitsize
4567 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4568 break;
4570 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4571 value = convert_modes (str_mode,
4572 TYPE_MODE (TREE_TYPE (op1)), value,
4573 TYPE_UNSIGNED (TREE_TYPE (op1)));
4575 /* We may be accessing data outside the field, which means
4576 we can alias adjacent data. */
4577 if (MEM_P (str_rtx))
4579 str_rtx = shallow_copy_rtx (str_rtx);
4580 set_mem_alias_set (str_rtx, 0);
4581 set_mem_expr (str_rtx, 0);
4584 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4585 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4587 value = expand_and (str_mode, value, const1_rtx, NULL);
4588 binop = xor_optab;
4590 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4591 result = expand_binop (str_mode, binop, str_rtx,
4592 value, str_rtx, 1, OPTAB_WIDEN);
4593 if (result != str_rtx)
4594 emit_move_insn (str_rtx, result);
4595 return true;
4597 case BIT_IOR_EXPR:
4598 case BIT_XOR_EXPR:
4599 if (TREE_CODE (op1) != INTEGER_CST)
4600 break;
4601 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4602 value = convert_modes (str_mode,
4603 TYPE_MODE (TREE_TYPE (op1)), value,
4604 TYPE_UNSIGNED (TREE_TYPE (op1)));
4606 /* We may be accessing data outside the field, which means
4607 we can alias adjacent data. */
4608 if (MEM_P (str_rtx))
4610 str_rtx = shallow_copy_rtx (str_rtx);
4611 set_mem_alias_set (str_rtx, 0);
4612 set_mem_expr (str_rtx, 0);
4615 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4616 if (bitpos + bitsize != str_bitsize)
4618 rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4619 str_mode);
4620 value = expand_and (str_mode, value, mask, NULL_RTX);
4622 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4623 result = expand_binop (str_mode, binop, str_rtx,
4624 value, str_rtx, 1, OPTAB_WIDEN);
4625 if (result != str_rtx)
4626 emit_move_insn (str_rtx, result);
4627 return true;
4629 default:
4630 break;
4633 return false;
4636 /* In the C++ memory model, consecutive bit fields in a structure are
4637 considered one memory location.
4639 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4640 returns the bit range of consecutive bits in which this COMPONENT_REF
4641 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4642 and *OFFSET may be adjusted in the process.
4644 If the access does not need to be restricted, 0 is returned in both
4645 *BITSTART and *BITEND. */
4647 static void
4648 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4649 unsigned HOST_WIDE_INT *bitend,
4650 tree exp,
4651 HOST_WIDE_INT *bitpos,
4652 tree *offset)
4654 HOST_WIDE_INT bitoffset;
4655 tree field, repr;
4657 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4659 field = TREE_OPERAND (exp, 1);
4660 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4661 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4662 need to limit the range we can access. */
4663 if (!repr)
4665 *bitstart = *bitend = 0;
4666 return;
4669 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4670 part of a larger bit field, then the representative does not serve any
4671 useful purpose. This can occur in Ada. */
4672 if (handled_component_p (TREE_OPERAND (exp, 0)))
4674 enum machine_mode rmode;
4675 HOST_WIDE_INT rbitsize, rbitpos;
4676 tree roffset;
4677 int unsignedp;
4678 int volatilep = 0;
4679 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4680 &roffset, &rmode, &unsignedp, &volatilep, false);
4681 if ((rbitpos % BITS_PER_UNIT) != 0)
4683 *bitstart = *bitend = 0;
4684 return;
4688 /* Compute the adjustment to bitpos from the offset of the field
4689 relative to the representative. DECL_FIELD_OFFSET of field and
4690 repr are the same by construction if they are not constants,
4691 see finish_bitfield_layout. */
4692 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
4693 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
4694 bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
4695 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
4696 else
4697 bitoffset = 0;
4698 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4699 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4701 /* If the adjustment is larger than bitpos, we would have a negative bit
4702 position for the lower bound and this may wreak havoc later. Adjust
4703 offset and bitpos to make the lower bound non-negative in that case. */
4704 if (bitoffset > *bitpos)
4706 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4707 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4709 *bitpos += adjust;
4710 if (*offset == NULL_TREE)
4711 *offset = size_int (-adjust / BITS_PER_UNIT);
4712 else
4713 *offset
4714 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4715 *bitstart = 0;
4717 else
4718 *bitstart = *bitpos - bitoffset;
4720 *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4723 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4724 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4725 DECL_RTL was not set yet, return NORTL. */
4727 static inline bool
4728 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4730 if (TREE_CODE (addr) != ADDR_EXPR)
4731 return false;
4733 tree base = TREE_OPERAND (addr, 0);
4735 if (!DECL_P (base)
4736 || TREE_ADDRESSABLE (base)
4737 || DECL_MODE (base) == BLKmode)
4738 return false;
4740 if (!DECL_RTL_SET_P (base))
4741 return nortl;
4743 return (!MEM_P (DECL_RTL (base)));
4746 /* Returns true if the MEM_REF REF refers to an object that does not
4747 reside in memory and has non-BLKmode. */
4749 static inline bool
4750 mem_ref_refers_to_non_mem_p (tree ref)
4752 tree base = TREE_OPERAND (ref, 0);
4753 return addr_expr_of_non_mem_decl_p_1 (base, false);
4756 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4757 is true, try generating a nontemporal store. */
4759 void
4760 expand_assignment (tree to, tree from, bool nontemporal)
4762 rtx to_rtx = 0;
4763 rtx result;
4764 enum machine_mode mode;
4765 unsigned int align;
4766 enum insn_code icode;
4768 /* Don't crash if the lhs of the assignment was erroneous. */
4769 if (TREE_CODE (to) == ERROR_MARK)
4771 expand_normal (from);
4772 return;
4775 /* Optimize away no-op moves without side-effects. */
4776 if (operand_equal_p (to, from, 0))
4777 return;
4779 /* Handle misaligned stores. */
4780 mode = TYPE_MODE (TREE_TYPE (to));
4781 if ((TREE_CODE (to) == MEM_REF
4782 || TREE_CODE (to) == TARGET_MEM_REF)
4783 && mode != BLKmode
4784 && !mem_ref_refers_to_non_mem_p (to)
4785 && ((align = get_object_alignment (to))
4786 < GET_MODE_ALIGNMENT (mode))
4787 && (((icode = optab_handler (movmisalign_optab, mode))
4788 != CODE_FOR_nothing)
4789 || SLOW_UNALIGNED_ACCESS (mode, align)))
4791 rtx reg, mem;
4793 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4794 reg = force_not_mem (reg);
4795 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4797 if (icode != CODE_FOR_nothing)
4799 struct expand_operand ops[2];
4801 create_fixed_operand (&ops[0], mem);
4802 create_input_operand (&ops[1], reg, mode);
4803 /* The movmisalign<mode> pattern cannot fail, else the assignment
4804 would silently be omitted. */
4805 expand_insn (icode, 2, ops);
4807 else
4808 store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg);
4809 return;
4812 /* Assignment of a structure component needs special treatment
4813 if the structure component's rtx is not simply a MEM.
4814 Assignment of an array element at a constant index, and assignment of
4815 an array element in an unaligned packed structure field, has the same
4816 problem. Same for (partially) storing into a non-memory object. */
4817 if (handled_component_p (to)
4818 || (TREE_CODE (to) == MEM_REF
4819 && mem_ref_refers_to_non_mem_p (to))
4820 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4822 enum machine_mode mode1;
4823 HOST_WIDE_INT bitsize, bitpos;
4824 unsigned HOST_WIDE_INT bitregion_start = 0;
4825 unsigned HOST_WIDE_INT bitregion_end = 0;
4826 tree offset;
4827 int unsignedp;
4828 int volatilep = 0;
4829 tree tem;
4831 push_temp_slots ();
4832 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4833 &unsignedp, &volatilep, true);
4835 /* Make sure bitpos is not negative, it can wreak havoc later. */
4836 if (bitpos < 0)
4838 gcc_assert (offset == NULL_TREE);
4839 offset = size_int (bitpos >> (BITS_PER_UNIT == 8
4840 ? 3 : exact_log2 (BITS_PER_UNIT)));
4841 bitpos &= BITS_PER_UNIT - 1;
4844 if (TREE_CODE (to) == COMPONENT_REF
4845 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4846 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4847 /* The C++ memory model naturally applies to byte-aligned fields.
4848 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4849 BITSIZE are not byte-aligned, there is no need to limit the range
4850 we can access. This can occur with packed structures in Ada. */
4851 else if (bitsize > 0
4852 && bitsize % BITS_PER_UNIT == 0
4853 && bitpos % BITS_PER_UNIT == 0)
4855 bitregion_start = bitpos;
4856 bitregion_end = bitpos + bitsize - 1;
4859 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4861 /* If the field has a mode, we want to access it in the
4862 field's mode, not the computed mode.
4863 If a MEM has VOIDmode (external with incomplete type),
4864 use BLKmode for it instead. */
4865 if (MEM_P (to_rtx))
4867 if (mode1 != VOIDmode)
4868 to_rtx = adjust_address (to_rtx, mode1, 0);
4869 else if (GET_MODE (to_rtx) == VOIDmode)
4870 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4873 if (offset != 0)
4875 enum machine_mode address_mode;
4876 rtx offset_rtx;
4878 if (!MEM_P (to_rtx))
4880 /* We can get constant negative offsets into arrays with broken
4881 user code. Translate this to a trap instead of ICEing. */
4882 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4883 expand_builtin_trap ();
4884 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4887 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4888 address_mode = get_address_mode (to_rtx);
4889 if (GET_MODE (offset_rtx) != address_mode)
4890 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4892 /* The check for a constant address in TO_RTX not having VOIDmode
4893 is probably no longer necessary. */
4894 if (MEM_P (to_rtx)
4895 && GET_MODE (to_rtx) == BLKmode
4896 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4897 && bitsize > 0
4898 && (bitpos % bitsize) == 0
4899 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4900 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4902 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4903 bitregion_start = 0;
4904 if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos)
4905 bitregion_end -= bitpos;
4906 bitpos = 0;
4909 to_rtx = offset_address (to_rtx, offset_rtx,
4910 highest_pow2_factor_for_target (to,
4911 offset));
4914 /* No action is needed if the target is not a memory and the field
4915 lies completely outside that target. This can occur if the source
4916 code contains an out-of-bounds access to a small array. */
4917 if (!MEM_P (to_rtx)
4918 && GET_MODE (to_rtx) != BLKmode
4919 && (unsigned HOST_WIDE_INT) bitpos
4920 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4922 expand_normal (from);
4923 result = NULL;
4925 /* Handle expand_expr of a complex value returning a CONCAT. */
4926 else if (GET_CODE (to_rtx) == CONCAT)
4928 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4929 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4930 && bitpos == 0
4931 && bitsize == mode_bitsize)
4932 result = store_expr (from, to_rtx, false, nontemporal);
4933 else if (bitsize == mode_bitsize / 2
4934 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4935 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4936 nontemporal);
4937 else if (bitpos + bitsize <= mode_bitsize / 2)
4938 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4939 bitregion_start, bitregion_end,
4940 mode1, from,
4941 get_alias_set (to), nontemporal);
4942 else if (bitpos >= mode_bitsize / 2)
4943 result = store_field (XEXP (to_rtx, 1), bitsize,
4944 bitpos - mode_bitsize / 2,
4945 bitregion_start, bitregion_end,
4946 mode1, from,
4947 get_alias_set (to), nontemporal);
4948 else if (bitpos == 0 && bitsize == mode_bitsize)
4950 rtx from_rtx;
4951 result = expand_normal (from);
4952 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4953 TYPE_MODE (TREE_TYPE (from)), 0);
4954 emit_move_insn (XEXP (to_rtx, 0),
4955 read_complex_part (from_rtx, false));
4956 emit_move_insn (XEXP (to_rtx, 1),
4957 read_complex_part (from_rtx, true));
4959 else
4961 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4962 GET_MODE_SIZE (GET_MODE (to_rtx)));
4963 write_complex_part (temp, XEXP (to_rtx, 0), false);
4964 write_complex_part (temp, XEXP (to_rtx, 1), true);
4965 result = store_field (temp, bitsize, bitpos,
4966 bitregion_start, bitregion_end,
4967 mode1, from,
4968 get_alias_set (to), nontemporal);
4969 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4970 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4973 else
4975 if (MEM_P (to_rtx))
4977 /* If the field is at offset zero, we could have been given the
4978 DECL_RTX of the parent struct. Don't munge it. */
4979 to_rtx = shallow_copy_rtx (to_rtx);
4980 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4981 if (volatilep)
4982 MEM_VOLATILE_P (to_rtx) = 1;
4985 if (optimize_bitfield_assignment_op (bitsize, bitpos,
4986 bitregion_start, bitregion_end,
4987 mode1,
4988 to_rtx, to, from))
4989 result = NULL;
4990 else
4991 result = store_field (to_rtx, bitsize, bitpos,
4992 bitregion_start, bitregion_end,
4993 mode1, from,
4994 get_alias_set (to), nontemporal);
4997 if (result)
4998 preserve_temp_slots (result);
4999 pop_temp_slots ();
5000 return;
5003 /* If the rhs is a function call and its value is not an aggregate,
5004 call the function before we start to compute the lhs.
5005 This is needed for correct code for cases such as
5006 val = setjmp (buf) on machines where reference to val
5007 requires loading up part of an address in a separate insn.
5009 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5010 since it might be a promoted variable where the zero- or sign- extension
5011 needs to be done. Handling this in the normal way is safe because no
5012 computation is done before the call. The same is true for SSA names. */
5013 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
5014 && COMPLETE_TYPE_P (TREE_TYPE (from))
5015 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
5016 && ! (((TREE_CODE (to) == VAR_DECL
5017 || TREE_CODE (to) == PARM_DECL
5018 || TREE_CODE (to) == RESULT_DECL)
5019 && REG_P (DECL_RTL (to)))
5020 || TREE_CODE (to) == SSA_NAME))
5022 rtx value;
5024 push_temp_slots ();
5025 value = expand_normal (from);
5026 if (to_rtx == 0)
5027 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5029 /* Handle calls that return values in multiple non-contiguous locations.
5030 The Irix 6 ABI has examples of this. */
5031 if (GET_CODE (to_rtx) == PARALLEL)
5033 if (GET_CODE (value) == PARALLEL)
5034 emit_group_move (to_rtx, value);
5035 else
5036 emit_group_load (to_rtx, value, TREE_TYPE (from),
5037 int_size_in_bytes (TREE_TYPE (from)));
5039 else if (GET_CODE (value) == PARALLEL)
5040 emit_group_store (to_rtx, value, TREE_TYPE (from),
5041 int_size_in_bytes (TREE_TYPE (from)));
5042 else if (GET_MODE (to_rtx) == BLKmode)
5044 /* Handle calls that return BLKmode values in registers. */
5045 if (REG_P (value))
5046 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5047 else
5048 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5050 else
5052 if (POINTER_TYPE_P (TREE_TYPE (to)))
5053 value = convert_memory_address_addr_space
5054 (GET_MODE (to_rtx), value,
5055 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5057 emit_move_insn (to_rtx, value);
5059 preserve_temp_slots (to_rtx);
5060 pop_temp_slots ();
5061 return;
5064 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5065 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5067 /* Don't move directly into a return register. */
5068 if (TREE_CODE (to) == RESULT_DECL
5069 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5071 rtx temp;
5073 push_temp_slots ();
5075 /* If the source is itself a return value, it still is in a pseudo at
5076 this point so we can move it back to the return register directly. */
5077 if (REG_P (to_rtx)
5078 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5079 && TREE_CODE (from) != CALL_EXPR)
5080 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5081 else
5082 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5084 /* Handle calls that return values in multiple non-contiguous locations.
5085 The Irix 6 ABI has examples of this. */
5086 if (GET_CODE (to_rtx) == PARALLEL)
5088 if (GET_CODE (temp) == PARALLEL)
5089 emit_group_move (to_rtx, temp);
5090 else
5091 emit_group_load (to_rtx, temp, TREE_TYPE (from),
5092 int_size_in_bytes (TREE_TYPE (from)));
5094 else if (temp)
5095 emit_move_insn (to_rtx, temp);
5097 preserve_temp_slots (to_rtx);
5098 pop_temp_slots ();
5099 return;
5102 /* In case we are returning the contents of an object which overlaps
5103 the place the value is being stored, use a safe function when copying
5104 a value through a pointer into a structure value return block. */
5105 if (TREE_CODE (to) == RESULT_DECL
5106 && TREE_CODE (from) == INDIRECT_REF
5107 && ADDR_SPACE_GENERIC_P
5108 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5109 && refs_may_alias_p (to, from)
5110 && cfun->returns_struct
5111 && !cfun->returns_pcc_struct)
5113 rtx from_rtx, size;
5115 push_temp_slots ();
5116 size = expr_size (from);
5117 from_rtx = expand_normal (from);
5119 emit_library_call (memmove_libfunc, LCT_NORMAL,
5120 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
5121 XEXP (from_rtx, 0), Pmode,
5122 convert_to_mode (TYPE_MODE (sizetype),
5123 size, TYPE_UNSIGNED (sizetype)),
5124 TYPE_MODE (sizetype));
5126 preserve_temp_slots (to_rtx);
5127 pop_temp_slots ();
5128 return;
5131 /* Compute FROM and store the value in the rtx we got. */
5133 push_temp_slots ();
5134 result = store_expr (from, to_rtx, 0, nontemporal);
5135 preserve_temp_slots (result);
5136 pop_temp_slots ();
5137 return;
5140 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5141 succeeded, false otherwise. */
5143 bool
5144 emit_storent_insn (rtx to, rtx from)
5146 struct expand_operand ops[2];
5147 enum machine_mode mode = GET_MODE (to);
5148 enum insn_code code = optab_handler (storent_optab, mode);
5150 if (code == CODE_FOR_nothing)
5151 return false;
5153 create_fixed_operand (&ops[0], to);
5154 create_input_operand (&ops[1], from, mode);
5155 return maybe_expand_insn (code, 2, ops);
5158 /* Generate code for computing expression EXP,
5159 and storing the value into TARGET.
5161 If the mode is BLKmode then we may return TARGET itself.
5162 It turns out that in BLKmode it doesn't cause a problem.
5163 because C has no operators that could combine two different
5164 assignments into the same BLKmode object with different values
5165 with no sequence point. Will other languages need this to
5166 be more thorough?
5168 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5169 stack, and block moves may need to be treated specially.
5171 If NONTEMPORAL is true, try using a nontemporal store instruction. */
5174 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
5176 rtx temp;
5177 rtx alt_rtl = NULL_RTX;
5178 location_t loc = curr_insn_location ();
5180 if (VOID_TYPE_P (TREE_TYPE (exp)))
5182 /* C++ can generate ?: expressions with a throw expression in one
5183 branch and an rvalue in the other. Here, we resolve attempts to
5184 store the throw expression's nonexistent result. */
5185 gcc_assert (!call_param_p);
5186 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5187 return NULL_RTX;
5189 if (TREE_CODE (exp) == COMPOUND_EXPR)
5191 /* Perform first part of compound expression, then assign from second
5192 part. */
5193 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5194 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5195 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5196 nontemporal);
5198 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5200 /* For conditional expression, get safe form of the target. Then
5201 test the condition, doing the appropriate assignment on either
5202 side. This avoids the creation of unnecessary temporaries.
5203 For non-BLKmode, it is more efficient not to do this. */
5205 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
5207 do_pending_stack_adjust ();
5208 NO_DEFER_POP;
5209 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5210 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5211 nontemporal);
5212 emit_jump_insn (gen_jump (lab2));
5213 emit_barrier ();
5214 emit_label (lab1);
5215 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
5216 nontemporal);
5217 emit_label (lab2);
5218 OK_DEFER_POP;
5220 return NULL_RTX;
5222 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5223 /* If this is a scalar in a register that is stored in a wider mode
5224 than the declared mode, compute the result into its declared mode
5225 and then convert to the wider mode. Our value is the computed
5226 expression. */
5228 rtx inner_target = 0;
5230 /* We can do the conversion inside EXP, which will often result
5231 in some optimizations. Do the conversion in two steps: first
5232 change the signedness, if needed, then the extend. But don't
5233 do this if the type of EXP is a subtype of something else
5234 since then the conversion might involve more than just
5235 converting modes. */
5236 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5237 && TREE_TYPE (TREE_TYPE (exp)) == 0
5238 && GET_MODE_PRECISION (GET_MODE (target))
5239 == TYPE_PRECISION (TREE_TYPE (exp)))
5241 if (TYPE_UNSIGNED (TREE_TYPE (exp))
5242 != SUBREG_PROMOTED_UNSIGNED_P (target))
5244 /* Some types, e.g. Fortran's logical*4, won't have a signed
5245 version, so use the mode instead. */
5246 tree ntype
5247 = (signed_or_unsigned_type_for
5248 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
5249 if (ntype == NULL)
5250 ntype = lang_hooks.types.type_for_mode
5251 (TYPE_MODE (TREE_TYPE (exp)),
5252 SUBREG_PROMOTED_UNSIGNED_P (target));
5254 exp = fold_convert_loc (loc, ntype, exp);
5257 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5258 (GET_MODE (SUBREG_REG (target)),
5259 SUBREG_PROMOTED_UNSIGNED_P (target)),
5260 exp);
5262 inner_target = SUBREG_REG (target);
5265 temp = expand_expr (exp, inner_target, VOIDmode,
5266 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5268 /* If TEMP is a VOIDmode constant, use convert_modes to make
5269 sure that we properly convert it. */
5270 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5272 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5273 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
5274 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5275 GET_MODE (target), temp,
5276 SUBREG_PROMOTED_UNSIGNED_P (target));
5279 convert_move (SUBREG_REG (target), temp,
5280 SUBREG_PROMOTED_UNSIGNED_P (target));
5282 return NULL_RTX;
5284 else if ((TREE_CODE (exp) == STRING_CST
5285 || (TREE_CODE (exp) == MEM_REF
5286 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5287 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5288 == STRING_CST
5289 && integer_zerop (TREE_OPERAND (exp, 1))))
5290 && !nontemporal && !call_param_p
5291 && MEM_P (target))
5293 /* Optimize initialization of an array with a STRING_CST. */
5294 HOST_WIDE_INT exp_len, str_copy_len;
5295 rtx dest_mem;
5296 tree str = TREE_CODE (exp) == STRING_CST
5297 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5299 exp_len = int_expr_size (exp);
5300 if (exp_len <= 0)
5301 goto normal_expr;
5303 if (TREE_STRING_LENGTH (str) <= 0)
5304 goto normal_expr;
5306 str_copy_len = strlen (TREE_STRING_POINTER (str));
5307 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5308 goto normal_expr;
5310 str_copy_len = TREE_STRING_LENGTH (str);
5311 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5312 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5314 str_copy_len += STORE_MAX_PIECES - 1;
5315 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5317 str_copy_len = MIN (str_copy_len, exp_len);
5318 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5319 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5320 MEM_ALIGN (target), false))
5321 goto normal_expr;
5323 dest_mem = target;
5325 dest_mem = store_by_pieces (dest_mem,
5326 str_copy_len, builtin_strncpy_read_str,
5327 CONST_CAST (char *,
5328 TREE_STRING_POINTER (str)),
5329 MEM_ALIGN (target), false,
5330 exp_len > str_copy_len ? 1 : 0);
5331 if (exp_len > str_copy_len)
5332 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5333 GEN_INT (exp_len - str_copy_len),
5334 BLOCK_OP_NORMAL);
5335 return NULL_RTX;
5337 else
5339 rtx tmp_target;
5341 normal_expr:
5342 /* If we want to use a nontemporal store, force the value to
5343 register first. */
5344 tmp_target = nontemporal ? NULL_RTX : target;
5345 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5346 (call_param_p
5347 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5348 &alt_rtl, false);
5351 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5352 the same as that of TARGET, adjust the constant. This is needed, for
5353 example, in case it is a CONST_DOUBLE and we want only a word-sized
5354 value. */
5355 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5356 && TREE_CODE (exp) != ERROR_MARK
5357 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5358 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5359 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5361 /* If value was not generated in the target, store it there.
5362 Convert the value to TARGET's type first if necessary and emit the
5363 pending incrementations that have been queued when expanding EXP.
5364 Note that we cannot emit the whole queue blindly because this will
5365 effectively disable the POST_INC optimization later.
5367 If TEMP and TARGET compare equal according to rtx_equal_p, but
5368 one or both of them are volatile memory refs, we have to distinguish
5369 two cases:
5370 - expand_expr has used TARGET. In this case, we must not generate
5371 another copy. This can be detected by TARGET being equal according
5372 to == .
5373 - expand_expr has not used TARGET - that means that the source just
5374 happens to have the same RTX form. Since temp will have been created
5375 by expand_expr, it will compare unequal according to == .
5376 We must generate a copy in this case, to reach the correct number
5377 of volatile memory references. */
5379 if ((! rtx_equal_p (temp, target)
5380 || (temp != target && (side_effects_p (temp)
5381 || side_effects_p (target))))
5382 && TREE_CODE (exp) != ERROR_MARK
5383 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5384 but TARGET is not valid memory reference, TEMP will differ
5385 from TARGET although it is really the same location. */
5386 && !(alt_rtl
5387 && rtx_equal_p (alt_rtl, target)
5388 && !side_effects_p (alt_rtl)
5389 && !side_effects_p (target))
5390 /* If there's nothing to copy, don't bother. Don't call
5391 expr_size unless necessary, because some front-ends (C++)
5392 expr_size-hook must not be given objects that are not
5393 supposed to be bit-copied or bit-initialized. */
5394 && expr_size (exp) != const0_rtx)
5396 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5398 if (GET_MODE (target) == BLKmode)
5400 /* Handle calls that return BLKmode values in registers. */
5401 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5402 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5403 else
5404 store_bit_field (target,
5405 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5406 0, 0, 0, GET_MODE (temp), temp);
5408 else
5409 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5412 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5414 /* Handle copying a string constant into an array. The string
5415 constant may be shorter than the array. So copy just the string's
5416 actual length, and clear the rest. First get the size of the data
5417 type of the string, which is actually the size of the target. */
5418 rtx size = expr_size (exp);
5420 if (CONST_INT_P (size)
5421 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5422 emit_block_move (target, temp, size,
5423 (call_param_p
5424 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5425 else
5427 enum machine_mode pointer_mode
5428 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5429 enum machine_mode address_mode = get_address_mode (target);
5431 /* Compute the size of the data to copy from the string. */
5432 tree copy_size
5433 = size_binop_loc (loc, MIN_EXPR,
5434 make_tree (sizetype, size),
5435 size_int (TREE_STRING_LENGTH (exp)));
5436 rtx copy_size_rtx
5437 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5438 (call_param_p
5439 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5440 rtx label = 0;
5442 /* Copy that much. */
5443 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5444 TYPE_UNSIGNED (sizetype));
5445 emit_block_move (target, temp, copy_size_rtx,
5446 (call_param_p
5447 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5449 /* Figure out how much is left in TARGET that we have to clear.
5450 Do all calculations in pointer_mode. */
5451 if (CONST_INT_P (copy_size_rtx))
5453 size = plus_constant (address_mode, size,
5454 -INTVAL (copy_size_rtx));
5455 target = adjust_address (target, BLKmode,
5456 INTVAL (copy_size_rtx));
5458 else
5460 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5461 copy_size_rtx, NULL_RTX, 0,
5462 OPTAB_LIB_WIDEN);
5464 if (GET_MODE (copy_size_rtx) != address_mode)
5465 copy_size_rtx = convert_to_mode (address_mode,
5466 copy_size_rtx,
5467 TYPE_UNSIGNED (sizetype));
5469 target = offset_address (target, copy_size_rtx,
5470 highest_pow2_factor (copy_size));
5471 label = gen_label_rtx ();
5472 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5473 GET_MODE (size), 0, label);
5476 if (size != const0_rtx)
5477 clear_storage (target, size, BLOCK_OP_NORMAL);
5479 if (label)
5480 emit_label (label);
5483 /* Handle calls that return values in multiple non-contiguous locations.
5484 The Irix 6 ABI has examples of this. */
5485 else if (GET_CODE (target) == PARALLEL)
5487 if (GET_CODE (temp) == PARALLEL)
5488 emit_group_move (target, temp);
5489 else
5490 emit_group_load (target, temp, TREE_TYPE (exp),
5491 int_size_in_bytes (TREE_TYPE (exp)));
5493 else if (GET_CODE (temp) == PARALLEL)
5494 emit_group_store (target, temp, TREE_TYPE (exp),
5495 int_size_in_bytes (TREE_TYPE (exp)));
5496 else if (GET_MODE (temp) == BLKmode)
5497 emit_block_move (target, temp, expr_size (exp),
5498 (call_param_p
5499 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5500 /* If we emit a nontemporal store, there is nothing else to do. */
5501 else if (nontemporal && emit_storent_insn (target, temp))
5503 else
5505 temp = force_operand (temp, target);
5506 if (temp != target)
5507 emit_move_insn (target, temp);
5511 return NULL_RTX;
5514 /* Return true if field F of structure TYPE is a flexible array. */
5516 static bool
5517 flexible_array_member_p (const_tree f, const_tree type)
5519 const_tree tf;
5521 tf = TREE_TYPE (f);
5522 return (DECL_CHAIN (f) == NULL
5523 && TREE_CODE (tf) == ARRAY_TYPE
5524 && TYPE_DOMAIN (tf)
5525 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5526 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5527 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5528 && int_size_in_bytes (type) >= 0);
5531 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5532 must have in order for it to completely initialize a value of type TYPE.
5533 Return -1 if the number isn't known.
5535 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5537 static HOST_WIDE_INT
5538 count_type_elements (const_tree type, bool for_ctor_p)
5540 switch (TREE_CODE (type))
5542 case ARRAY_TYPE:
5544 tree nelts;
5546 nelts = array_type_nelts (type);
5547 if (nelts && tree_fits_uhwi_p (nelts))
5549 unsigned HOST_WIDE_INT n;
5551 n = tree_to_uhwi (nelts) + 1;
5552 if (n == 0 || for_ctor_p)
5553 return n;
5554 else
5555 return n * count_type_elements (TREE_TYPE (type), false);
5557 return for_ctor_p ? -1 : 1;
5560 case RECORD_TYPE:
5562 unsigned HOST_WIDE_INT n;
5563 tree f;
5565 n = 0;
5566 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5567 if (TREE_CODE (f) == FIELD_DECL)
5569 if (!for_ctor_p)
5570 n += count_type_elements (TREE_TYPE (f), false);
5571 else if (!flexible_array_member_p (f, type))
5572 /* Don't count flexible arrays, which are not supposed
5573 to be initialized. */
5574 n += 1;
5577 return n;
5580 case UNION_TYPE:
5581 case QUAL_UNION_TYPE:
5583 tree f;
5584 HOST_WIDE_INT n, m;
5586 gcc_assert (!for_ctor_p);
5587 /* Estimate the number of scalars in each field and pick the
5588 maximum. Other estimates would do instead; the idea is simply
5589 to make sure that the estimate is not sensitive to the ordering
5590 of the fields. */
5591 n = 1;
5592 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5593 if (TREE_CODE (f) == FIELD_DECL)
5595 m = count_type_elements (TREE_TYPE (f), false);
5596 /* If the field doesn't span the whole union, add an extra
5597 scalar for the rest. */
5598 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5599 TYPE_SIZE (type)) != 1)
5600 m++;
5601 if (n < m)
5602 n = m;
5604 return n;
5607 case COMPLEX_TYPE:
5608 return 2;
5610 case VECTOR_TYPE:
5611 return TYPE_VECTOR_SUBPARTS (type);
5613 case INTEGER_TYPE:
5614 case REAL_TYPE:
5615 case FIXED_POINT_TYPE:
5616 case ENUMERAL_TYPE:
5617 case BOOLEAN_TYPE:
5618 case POINTER_TYPE:
5619 case OFFSET_TYPE:
5620 case REFERENCE_TYPE:
5621 case NULLPTR_TYPE:
5622 return 1;
5624 case ERROR_MARK:
5625 return 0;
5627 case VOID_TYPE:
5628 case METHOD_TYPE:
5629 case FUNCTION_TYPE:
5630 case LANG_TYPE:
5631 default:
5632 gcc_unreachable ();
5636 /* Helper for categorize_ctor_elements. Identical interface. */
5638 static bool
5639 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5640 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5642 unsigned HOST_WIDE_INT idx;
5643 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5644 tree value, purpose, elt_type;
5646 /* Whether CTOR is a valid constant initializer, in accordance with what
5647 initializer_constant_valid_p does. If inferred from the constructor
5648 elements, true until proven otherwise. */
5649 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5650 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5652 nz_elts = 0;
5653 init_elts = 0;
5654 num_fields = 0;
5655 elt_type = NULL_TREE;
5657 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5659 HOST_WIDE_INT mult = 1;
5661 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5663 tree lo_index = TREE_OPERAND (purpose, 0);
5664 tree hi_index = TREE_OPERAND (purpose, 1);
5666 if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
5667 mult = (tree_to_uhwi (hi_index)
5668 - tree_to_uhwi (lo_index) + 1);
5670 num_fields += mult;
5671 elt_type = TREE_TYPE (value);
5673 switch (TREE_CODE (value))
5675 case CONSTRUCTOR:
5677 HOST_WIDE_INT nz = 0, ic = 0;
5679 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5680 p_complete);
5682 nz_elts += mult * nz;
5683 init_elts += mult * ic;
5685 if (const_from_elts_p && const_p)
5686 const_p = const_elt_p;
5688 break;
5690 case INTEGER_CST:
5691 case REAL_CST:
5692 case FIXED_CST:
5693 if (!initializer_zerop (value))
5694 nz_elts += mult;
5695 init_elts += mult;
5696 break;
5698 case STRING_CST:
5699 nz_elts += mult * TREE_STRING_LENGTH (value);
5700 init_elts += mult * TREE_STRING_LENGTH (value);
5701 break;
5703 case COMPLEX_CST:
5704 if (!initializer_zerop (TREE_REALPART (value)))
5705 nz_elts += mult;
5706 if (!initializer_zerop (TREE_IMAGPART (value)))
5707 nz_elts += mult;
5708 init_elts += mult;
5709 break;
5711 case VECTOR_CST:
5713 unsigned i;
5714 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5716 tree v = VECTOR_CST_ELT (value, i);
5717 if (!initializer_zerop (v))
5718 nz_elts += mult;
5719 init_elts += mult;
5722 break;
5724 default:
5726 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5727 nz_elts += mult * tc;
5728 init_elts += mult * tc;
5730 if (const_from_elts_p && const_p)
5731 const_p = initializer_constant_valid_p (value, elt_type)
5732 != NULL_TREE;
5734 break;
5738 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5739 num_fields, elt_type))
5740 *p_complete = false;
5742 *p_nz_elts += nz_elts;
5743 *p_init_elts += init_elts;
5745 return const_p;
5748 /* Examine CTOR to discover:
5749 * how many scalar fields are set to nonzero values,
5750 and place it in *P_NZ_ELTS;
5751 * how many scalar fields in total are in CTOR,
5752 and place it in *P_ELT_COUNT.
5753 * whether the constructor is complete -- in the sense that every
5754 meaningful byte is explicitly given a value --
5755 and place it in *P_COMPLETE.
5757 Return whether or not CTOR is a valid static constant initializer, the same
5758 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5760 bool
5761 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5762 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5764 *p_nz_elts = 0;
5765 *p_init_elts = 0;
5766 *p_complete = true;
5768 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5771 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5772 of which had type LAST_TYPE. Each element was itself a complete
5773 initializer, in the sense that every meaningful byte was explicitly
5774 given a value. Return true if the same is true for the constructor
5775 as a whole. */
5777 bool
5778 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5779 const_tree last_type)
5781 if (TREE_CODE (type) == UNION_TYPE
5782 || TREE_CODE (type) == QUAL_UNION_TYPE)
5784 if (num_elts == 0)
5785 return false;
5787 gcc_assert (num_elts == 1 && last_type);
5789 /* ??? We could look at each element of the union, and find the
5790 largest element. Which would avoid comparing the size of the
5791 initialized element against any tail padding in the union.
5792 Doesn't seem worth the effort... */
5793 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5796 return count_type_elements (type, true) == num_elts;
5799 /* Return 1 if EXP contains mostly (3/4) zeros. */
5801 static int
5802 mostly_zeros_p (const_tree exp)
5804 if (TREE_CODE (exp) == CONSTRUCTOR)
5806 HOST_WIDE_INT nz_elts, init_elts;
5807 bool complete_p;
5809 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5810 return !complete_p || nz_elts < init_elts / 4;
5813 return initializer_zerop (exp);
5816 /* Return 1 if EXP contains all zeros. */
5818 static int
5819 all_zeros_p (const_tree exp)
5821 if (TREE_CODE (exp) == CONSTRUCTOR)
5823 HOST_WIDE_INT nz_elts, init_elts;
5824 bool complete_p;
5826 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5827 return nz_elts == 0;
5830 return initializer_zerop (exp);
5833 /* Helper function for store_constructor.
5834 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5835 CLEARED is as for store_constructor.
5836 ALIAS_SET is the alias set to use for any stores.
5838 This provides a recursive shortcut back to store_constructor when it isn't
5839 necessary to go through store_field. This is so that we can pass through
5840 the cleared field to let store_constructor know that we may not have to
5841 clear a substructure if the outer structure has already been cleared. */
5843 static void
5844 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5845 HOST_WIDE_INT bitpos, enum machine_mode mode,
5846 tree exp, int cleared, alias_set_type alias_set)
5848 if (TREE_CODE (exp) == CONSTRUCTOR
5849 /* We can only call store_constructor recursively if the size and
5850 bit position are on a byte boundary. */
5851 && bitpos % BITS_PER_UNIT == 0
5852 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5853 /* If we have a nonzero bitpos for a register target, then we just
5854 let store_field do the bitfield handling. This is unlikely to
5855 generate unnecessary clear instructions anyways. */
5856 && (bitpos == 0 || MEM_P (target)))
5858 if (MEM_P (target))
5859 target
5860 = adjust_address (target,
5861 GET_MODE (target) == BLKmode
5862 || 0 != (bitpos
5863 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5864 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5867 /* Update the alias set, if required. */
5868 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5869 && MEM_ALIAS_SET (target) != 0)
5871 target = copy_rtx (target);
5872 set_mem_alias_set (target, alias_set);
5875 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5877 else
5878 store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false);
5882 /* Returns the number of FIELD_DECLs in TYPE. */
5884 static int
5885 fields_length (const_tree type)
5887 tree t = TYPE_FIELDS (type);
5888 int count = 0;
5890 for (; t; t = DECL_CHAIN (t))
5891 if (TREE_CODE (t) == FIELD_DECL)
5892 ++count;
5894 return count;
5898 /* Store the value of constructor EXP into the rtx TARGET.
5899 TARGET is either a REG or a MEM; we know it cannot conflict, since
5900 safe_from_p has been called.
5901 CLEARED is true if TARGET is known to have been zero'd.
5902 SIZE is the number of bytes of TARGET we are allowed to modify: this
5903 may not be the same as the size of EXP if we are assigning to a field
5904 which has been packed to exclude padding bits. */
5906 static void
5907 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5909 tree type = TREE_TYPE (exp);
5910 #ifdef WORD_REGISTER_OPERATIONS
5911 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5912 #endif
5914 switch (TREE_CODE (type))
5916 case RECORD_TYPE:
5917 case UNION_TYPE:
5918 case QUAL_UNION_TYPE:
5920 unsigned HOST_WIDE_INT idx;
5921 tree field, value;
5923 /* If size is zero or the target is already cleared, do nothing. */
5924 if (size == 0 || cleared)
5925 cleared = 1;
5926 /* We either clear the aggregate or indicate the value is dead. */
5927 else if ((TREE_CODE (type) == UNION_TYPE
5928 || TREE_CODE (type) == QUAL_UNION_TYPE)
5929 && ! CONSTRUCTOR_ELTS (exp))
5930 /* If the constructor is empty, clear the union. */
5932 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5933 cleared = 1;
5936 /* If we are building a static constructor into a register,
5937 set the initial value as zero so we can fold the value into
5938 a constant. But if more than one register is involved,
5939 this probably loses. */
5940 else if (REG_P (target) && TREE_STATIC (exp)
5941 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5943 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5944 cleared = 1;
5947 /* If the constructor has fewer fields than the structure or
5948 if we are initializing the structure to mostly zeros, clear
5949 the whole structure first. Don't do this if TARGET is a
5950 register whose mode size isn't equal to SIZE since
5951 clear_storage can't handle this case. */
5952 else if (size > 0
5953 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
5954 != fields_length (type))
5955 || mostly_zeros_p (exp))
5956 && (!REG_P (target)
5957 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5958 == size)))
5960 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5961 cleared = 1;
5964 if (REG_P (target) && !cleared)
5965 emit_clobber (target);
5967 /* Store each element of the constructor into the
5968 corresponding field of TARGET. */
5969 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5971 enum machine_mode mode;
5972 HOST_WIDE_INT bitsize;
5973 HOST_WIDE_INT bitpos = 0;
5974 tree offset;
5975 rtx to_rtx = target;
5977 /* Just ignore missing fields. We cleared the whole
5978 structure, above, if any fields are missing. */
5979 if (field == 0)
5980 continue;
5982 if (cleared && initializer_zerop (value))
5983 continue;
5985 if (tree_fits_uhwi_p (DECL_SIZE (field)))
5986 bitsize = tree_to_uhwi (DECL_SIZE (field));
5987 else
5988 bitsize = -1;
5990 mode = DECL_MODE (field);
5991 if (DECL_BIT_FIELD (field))
5992 mode = VOIDmode;
5994 offset = DECL_FIELD_OFFSET (field);
5995 if (tree_fits_shwi_p (offset)
5996 && tree_fits_shwi_p (bit_position (field)))
5998 bitpos = int_bit_position (field);
5999 offset = 0;
6001 else
6002 bitpos = tree_to_shwi (DECL_FIELD_BIT_OFFSET (field));
6004 if (offset)
6006 enum machine_mode address_mode;
6007 rtx offset_rtx;
6009 offset
6010 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
6011 make_tree (TREE_TYPE (exp),
6012 target));
6014 offset_rtx = expand_normal (offset);
6015 gcc_assert (MEM_P (to_rtx));
6017 address_mode = get_address_mode (to_rtx);
6018 if (GET_MODE (offset_rtx) != address_mode)
6019 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
6021 to_rtx = offset_address (to_rtx, offset_rtx,
6022 highest_pow2_factor (offset));
6025 #ifdef WORD_REGISTER_OPERATIONS
6026 /* If this initializes a field that is smaller than a
6027 word, at the start of a word, try to widen it to a full
6028 word. This special case allows us to output C++ member
6029 function initializations in a form that the optimizers
6030 can understand. */
6031 if (REG_P (target)
6032 && bitsize < BITS_PER_WORD
6033 && bitpos % BITS_PER_WORD == 0
6034 && GET_MODE_CLASS (mode) == MODE_INT
6035 && TREE_CODE (value) == INTEGER_CST
6036 && exp_size >= 0
6037 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6039 tree type = TREE_TYPE (value);
6041 if (TYPE_PRECISION (type) < BITS_PER_WORD)
6043 type = lang_hooks.types.type_for_mode
6044 (word_mode, TYPE_UNSIGNED (type));
6045 value = fold_convert (type, value);
6048 if (BYTES_BIG_ENDIAN)
6049 value
6050 = fold_build2 (LSHIFT_EXPR, type, value,
6051 build_int_cst (type,
6052 BITS_PER_WORD - bitsize));
6053 bitsize = BITS_PER_WORD;
6054 mode = word_mode;
6056 #endif
6058 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6059 && DECL_NONADDRESSABLE_P (field))
6061 to_rtx = copy_rtx (to_rtx);
6062 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6065 store_constructor_field (to_rtx, bitsize, bitpos, mode,
6066 value, cleared,
6067 get_alias_set (TREE_TYPE (field)));
6069 break;
6071 case ARRAY_TYPE:
6073 tree value, index;
6074 unsigned HOST_WIDE_INT i;
6075 int need_to_clear;
6076 tree domain;
6077 tree elttype = TREE_TYPE (type);
6078 int const_bounds_p;
6079 HOST_WIDE_INT minelt = 0;
6080 HOST_WIDE_INT maxelt = 0;
6082 domain = TYPE_DOMAIN (type);
6083 const_bounds_p = (TYPE_MIN_VALUE (domain)
6084 && TYPE_MAX_VALUE (domain)
6085 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6086 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6088 /* If we have constant bounds for the range of the type, get them. */
6089 if (const_bounds_p)
6091 minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6092 maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6095 /* If the constructor has fewer elements than the array, clear
6096 the whole array first. Similarly if this is static
6097 constructor of a non-BLKmode object. */
6098 if (cleared)
6099 need_to_clear = 0;
6100 else if (REG_P (target) && TREE_STATIC (exp))
6101 need_to_clear = 1;
6102 else
6104 unsigned HOST_WIDE_INT idx;
6105 tree index, value;
6106 HOST_WIDE_INT count = 0, zero_count = 0;
6107 need_to_clear = ! const_bounds_p;
6109 /* This loop is a more accurate version of the loop in
6110 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6111 is also needed to check for missing elements. */
6112 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6114 HOST_WIDE_INT this_node_count;
6116 if (need_to_clear)
6117 break;
6119 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6121 tree lo_index = TREE_OPERAND (index, 0);
6122 tree hi_index = TREE_OPERAND (index, 1);
6124 if (! tree_fits_uhwi_p (lo_index)
6125 || ! tree_fits_uhwi_p (hi_index))
6127 need_to_clear = 1;
6128 break;
6131 this_node_count = (tree_to_uhwi (hi_index)
6132 - tree_to_uhwi (lo_index) + 1);
6134 else
6135 this_node_count = 1;
6137 count += this_node_count;
6138 if (mostly_zeros_p (value))
6139 zero_count += this_node_count;
6142 /* Clear the entire array first if there are any missing
6143 elements, or if the incidence of zero elements is >=
6144 75%. */
6145 if (! need_to_clear
6146 && (count < maxelt - minelt + 1
6147 || 4 * zero_count >= 3 * count))
6148 need_to_clear = 1;
6151 if (need_to_clear && size > 0)
6153 if (REG_P (target))
6154 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6155 else
6156 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6157 cleared = 1;
6160 if (!cleared && REG_P (target))
6161 /* Inform later passes that the old value is dead. */
6162 emit_clobber (target);
6164 /* Store each element of the constructor into the
6165 corresponding element of TARGET, determined by counting the
6166 elements. */
6167 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6169 enum machine_mode mode;
6170 HOST_WIDE_INT bitsize;
6171 HOST_WIDE_INT bitpos;
6172 rtx xtarget = target;
6174 if (cleared && initializer_zerop (value))
6175 continue;
6177 mode = TYPE_MODE (elttype);
6178 if (mode == BLKmode)
6179 bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6180 ? tree_to_uhwi (TYPE_SIZE (elttype))
6181 : -1);
6182 else
6183 bitsize = GET_MODE_BITSIZE (mode);
6185 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6187 tree lo_index = TREE_OPERAND (index, 0);
6188 tree hi_index = TREE_OPERAND (index, 1);
6189 rtx index_r, pos_rtx;
6190 HOST_WIDE_INT lo, hi, count;
6191 tree position;
6193 /* If the range is constant and "small", unroll the loop. */
6194 if (const_bounds_p
6195 && tree_fits_shwi_p (lo_index)
6196 && tree_fits_shwi_p (hi_index)
6197 && (lo = tree_to_shwi (lo_index),
6198 hi = tree_to_shwi (hi_index),
6199 count = hi - lo + 1,
6200 (!MEM_P (target)
6201 || count <= 2
6202 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6203 && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6204 <= 40 * 8)))))
6206 lo -= minelt; hi -= minelt;
6207 for (; lo <= hi; lo++)
6209 bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6211 if (MEM_P (target)
6212 && !MEM_KEEP_ALIAS_SET_P (target)
6213 && TREE_CODE (type) == ARRAY_TYPE
6214 && TYPE_NONALIASED_COMPONENT (type))
6216 target = copy_rtx (target);
6217 MEM_KEEP_ALIAS_SET_P (target) = 1;
6220 store_constructor_field
6221 (target, bitsize, bitpos, mode, value, cleared,
6222 get_alias_set (elttype));
6225 else
6227 rtx loop_start = gen_label_rtx ();
6228 rtx loop_end = gen_label_rtx ();
6229 tree exit_cond;
6231 expand_normal (hi_index);
6233 index = build_decl (EXPR_LOCATION (exp),
6234 VAR_DECL, NULL_TREE, domain);
6235 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6236 SET_DECL_RTL (index, index_r);
6237 store_expr (lo_index, index_r, 0, false);
6239 /* Build the head of the loop. */
6240 do_pending_stack_adjust ();
6241 emit_label (loop_start);
6243 /* Assign value to element index. */
6244 position =
6245 fold_convert (ssizetype,
6246 fold_build2 (MINUS_EXPR,
6247 TREE_TYPE (index),
6248 index,
6249 TYPE_MIN_VALUE (domain)));
6251 position =
6252 size_binop (MULT_EXPR, position,
6253 fold_convert (ssizetype,
6254 TYPE_SIZE_UNIT (elttype)));
6256 pos_rtx = expand_normal (position);
6257 xtarget = offset_address (target, pos_rtx,
6258 highest_pow2_factor (position));
6259 xtarget = adjust_address (xtarget, mode, 0);
6260 if (TREE_CODE (value) == CONSTRUCTOR)
6261 store_constructor (value, xtarget, cleared,
6262 bitsize / BITS_PER_UNIT);
6263 else
6264 store_expr (value, xtarget, 0, false);
6266 /* Generate a conditional jump to exit the loop. */
6267 exit_cond = build2 (LT_EXPR, integer_type_node,
6268 index, hi_index);
6269 jumpif (exit_cond, loop_end, -1);
6271 /* Update the loop counter, and jump to the head of
6272 the loop. */
6273 expand_assignment (index,
6274 build2 (PLUS_EXPR, TREE_TYPE (index),
6275 index, integer_one_node),
6276 false);
6278 emit_jump (loop_start);
6280 /* Build the end of the loop. */
6281 emit_label (loop_end);
6284 else if ((index != 0 && ! tree_fits_shwi_p (index))
6285 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6287 tree position;
6289 if (index == 0)
6290 index = ssize_int (1);
6292 if (minelt)
6293 index = fold_convert (ssizetype,
6294 fold_build2 (MINUS_EXPR,
6295 TREE_TYPE (index),
6296 index,
6297 TYPE_MIN_VALUE (domain)));
6299 position =
6300 size_binop (MULT_EXPR, index,
6301 fold_convert (ssizetype,
6302 TYPE_SIZE_UNIT (elttype)));
6303 xtarget = offset_address (target,
6304 expand_normal (position),
6305 highest_pow2_factor (position));
6306 xtarget = adjust_address (xtarget, mode, 0);
6307 store_expr (value, xtarget, 0, false);
6309 else
6311 if (index != 0)
6312 bitpos = ((tree_to_shwi (index) - minelt)
6313 * tree_to_uhwi (TYPE_SIZE (elttype)));
6314 else
6315 bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6317 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6318 && TREE_CODE (type) == ARRAY_TYPE
6319 && TYPE_NONALIASED_COMPONENT (type))
6321 target = copy_rtx (target);
6322 MEM_KEEP_ALIAS_SET_P (target) = 1;
6324 store_constructor_field (target, bitsize, bitpos, mode, value,
6325 cleared, get_alias_set (elttype));
6328 break;
6331 case VECTOR_TYPE:
6333 unsigned HOST_WIDE_INT idx;
6334 constructor_elt *ce;
6335 int i;
6336 int need_to_clear;
6337 int icode = CODE_FOR_nothing;
6338 tree elttype = TREE_TYPE (type);
6339 int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6340 enum machine_mode eltmode = TYPE_MODE (elttype);
6341 HOST_WIDE_INT bitsize;
6342 HOST_WIDE_INT bitpos;
6343 rtvec vector = NULL;
6344 unsigned n_elts;
6345 alias_set_type alias;
6347 gcc_assert (eltmode != BLKmode);
6349 n_elts = TYPE_VECTOR_SUBPARTS (type);
6350 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6352 enum machine_mode mode = GET_MODE (target);
6354 icode = (int) optab_handler (vec_init_optab, mode);
6355 /* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
6356 if (icode != CODE_FOR_nothing)
6358 tree value;
6360 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6361 if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
6363 icode = CODE_FOR_nothing;
6364 break;
6367 if (icode != CODE_FOR_nothing)
6369 unsigned int i;
6371 vector = rtvec_alloc (n_elts);
6372 for (i = 0; i < n_elts; i++)
6373 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6377 /* If the constructor has fewer elements than the vector,
6378 clear the whole array first. Similarly if this is static
6379 constructor of a non-BLKmode object. */
6380 if (cleared)
6381 need_to_clear = 0;
6382 else if (REG_P (target) && TREE_STATIC (exp))
6383 need_to_clear = 1;
6384 else
6386 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6387 tree value;
6389 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6391 int n_elts_here = tree_to_uhwi
6392 (int_const_binop (TRUNC_DIV_EXPR,
6393 TYPE_SIZE (TREE_TYPE (value)),
6394 TYPE_SIZE (elttype)));
6396 count += n_elts_here;
6397 if (mostly_zeros_p (value))
6398 zero_count += n_elts_here;
6401 /* Clear the entire vector first if there are any missing elements,
6402 or if the incidence of zero elements is >= 75%. */
6403 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6406 if (need_to_clear && size > 0 && !vector)
6408 if (REG_P (target))
6409 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6410 else
6411 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6412 cleared = 1;
6415 /* Inform later passes that the old value is dead. */
6416 if (!cleared && !vector && REG_P (target))
6417 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6419 if (MEM_P (target))
6420 alias = MEM_ALIAS_SET (target);
6421 else
6422 alias = get_alias_set (elttype);
6424 /* Store each element of the constructor into the corresponding
6425 element of TARGET, determined by counting the elements. */
6426 for (idx = 0, i = 0;
6427 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6428 idx++, i += bitsize / elt_size)
6430 HOST_WIDE_INT eltpos;
6431 tree value = ce->value;
6433 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6434 if (cleared && initializer_zerop (value))
6435 continue;
6437 if (ce->index)
6438 eltpos = tree_to_uhwi (ce->index);
6439 else
6440 eltpos = i;
6442 if (vector)
6444 /* vec_init<mode> should not be used if there are VECTOR_TYPE
6445 elements. */
6446 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6447 RTVEC_ELT (vector, eltpos)
6448 = expand_normal (value);
6450 else
6452 enum machine_mode value_mode =
6453 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6454 ? TYPE_MODE (TREE_TYPE (value))
6455 : eltmode;
6456 bitpos = eltpos * elt_size;
6457 store_constructor_field (target, bitsize, bitpos, value_mode,
6458 value, cleared, alias);
6462 if (vector)
6463 emit_insn (GEN_FCN (icode)
6464 (target,
6465 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6466 break;
6469 default:
6470 gcc_unreachable ();
6474 /* Store the value of EXP (an expression tree)
6475 into a subfield of TARGET which has mode MODE and occupies
6476 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6477 If MODE is VOIDmode, it means that we are storing into a bit-field.
6479 BITREGION_START is bitpos of the first bitfield in this region.
6480 BITREGION_END is the bitpos of the ending bitfield in this region.
6481 These two fields are 0, if the C++ memory model does not apply,
6482 or we are not interested in keeping track of bitfield regions.
6484 Always return const0_rtx unless we have something particular to
6485 return.
6487 ALIAS_SET is the alias set for the destination. This value will
6488 (in general) be different from that for TARGET, since TARGET is a
6489 reference to the containing structure.
6491 If NONTEMPORAL is true, try generating a nontemporal store. */
6493 static rtx
6494 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6495 unsigned HOST_WIDE_INT bitregion_start,
6496 unsigned HOST_WIDE_INT bitregion_end,
6497 enum machine_mode mode, tree exp,
6498 alias_set_type alias_set, bool nontemporal)
6500 if (TREE_CODE (exp) == ERROR_MARK)
6501 return const0_rtx;
6503 /* If we have nothing to store, do nothing unless the expression has
6504 side-effects. */
6505 if (bitsize == 0)
6506 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6508 if (GET_CODE (target) == CONCAT)
6510 /* We're storing into a struct containing a single __complex. */
6512 gcc_assert (!bitpos);
6513 return store_expr (exp, target, 0, nontemporal);
6516 /* If the structure is in a register or if the component
6517 is a bit field, we cannot use addressing to access it.
6518 Use bit-field techniques or SUBREG to store in it. */
6520 if (mode == VOIDmode
6521 || (mode != BLKmode && ! direct_store[(int) mode]
6522 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6523 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6524 || REG_P (target)
6525 || GET_CODE (target) == SUBREG
6526 /* If the field isn't aligned enough to store as an ordinary memref,
6527 store it as a bit field. */
6528 || (mode != BLKmode
6529 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6530 || bitpos % GET_MODE_ALIGNMENT (mode))
6531 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6532 || (bitpos % BITS_PER_UNIT != 0)))
6533 || (bitsize >= 0 && mode != BLKmode
6534 && GET_MODE_BITSIZE (mode) > bitsize)
6535 /* If the RHS and field are a constant size and the size of the
6536 RHS isn't the same size as the bitfield, we must use bitfield
6537 operations. */
6538 || (bitsize >= 0
6539 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6540 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6541 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6542 decl we must use bitfield operations. */
6543 || (bitsize >= 0
6544 && TREE_CODE (exp) == MEM_REF
6545 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6546 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6547 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6548 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6550 rtx temp;
6551 gimple nop_def;
6553 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6554 implies a mask operation. If the precision is the same size as
6555 the field we're storing into, that mask is redundant. This is
6556 particularly common with bit field assignments generated by the
6557 C front end. */
6558 nop_def = get_def_for_expr (exp, NOP_EXPR);
6559 if (nop_def)
6561 tree type = TREE_TYPE (exp);
6562 if (INTEGRAL_TYPE_P (type)
6563 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6564 && bitsize == TYPE_PRECISION (type))
6566 tree op = gimple_assign_rhs1 (nop_def);
6567 type = TREE_TYPE (op);
6568 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6569 exp = op;
6573 temp = expand_normal (exp);
6575 /* If BITSIZE is narrower than the size of the type of EXP
6576 we will be narrowing TEMP. Normally, what's wanted are the
6577 low-order bits. However, if EXP's type is a record and this is
6578 big-endian machine, we want the upper BITSIZE bits. */
6579 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6580 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6581 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6582 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6583 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6584 NULL_RTX, 1);
6586 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6587 if (mode != VOIDmode && mode != BLKmode
6588 && mode != TYPE_MODE (TREE_TYPE (exp)))
6589 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6591 /* If the modes of TEMP and TARGET are both BLKmode, both
6592 must be in memory and BITPOS must be aligned on a byte
6593 boundary. If so, we simply do a block copy. Likewise
6594 for a BLKmode-like TARGET. */
6595 if (GET_MODE (temp) == BLKmode
6596 && (GET_MODE (target) == BLKmode
6597 || (MEM_P (target)
6598 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6599 && (bitpos % BITS_PER_UNIT) == 0
6600 && (bitsize % BITS_PER_UNIT) == 0)))
6602 gcc_assert (MEM_P (target) && MEM_P (temp)
6603 && (bitpos % BITS_PER_UNIT) == 0);
6605 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6606 emit_block_move (target, temp,
6607 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6608 / BITS_PER_UNIT),
6609 BLOCK_OP_NORMAL);
6611 return const0_rtx;
6614 /* Handle calls that return values in multiple non-contiguous locations.
6615 The Irix 6 ABI has examples of this. */
6616 if (GET_CODE (temp) == PARALLEL)
6618 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6619 rtx temp_target;
6620 if (mode == BLKmode)
6621 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6622 temp_target = gen_reg_rtx (mode);
6623 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6624 temp = temp_target;
6626 else if (mode == BLKmode)
6628 /* Handle calls that return BLKmode values in registers. */
6629 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6631 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6632 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6633 temp = temp_target;
6635 else
6637 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6638 rtx temp_target;
6639 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6640 temp_target = gen_reg_rtx (mode);
6641 temp_target
6642 = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
6643 temp_target, mode, mode);
6644 temp = temp_target;
6648 /* Store the value in the bitfield. */
6649 store_bit_field (target, bitsize, bitpos,
6650 bitregion_start, bitregion_end,
6651 mode, temp);
6653 return const0_rtx;
6655 else
6657 /* Now build a reference to just the desired component. */
6658 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6660 if (to_rtx == target)
6661 to_rtx = copy_rtx (to_rtx);
6663 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6664 set_mem_alias_set (to_rtx, alias_set);
6666 return store_expr (exp, to_rtx, 0, nontemporal);
6670 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6671 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6672 codes and find the ultimate containing object, which we return.
6674 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6675 bit position, and *PUNSIGNEDP to the signedness of the field.
6676 If the position of the field is variable, we store a tree
6677 giving the variable offset (in units) in *POFFSET.
6678 This offset is in addition to the bit position.
6679 If the position is not variable, we store 0 in *POFFSET.
6681 If any of the extraction expressions is volatile,
6682 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6684 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6685 Otherwise, it is a mode that can be used to access the field.
6687 If the field describes a variable-sized object, *PMODE is set to
6688 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6689 this case, but the address of the object can be found.
6691 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6692 look through nodes that serve as markers of a greater alignment than
6693 the one that can be deduced from the expression. These nodes make it
6694 possible for front-ends to prevent temporaries from being created by
6695 the middle-end on alignment considerations. For that purpose, the
6696 normal operating mode at high-level is to always pass FALSE so that
6697 the ultimate containing object is really returned; moreover, the
6698 associated predicate handled_component_p will always return TRUE
6699 on these nodes, thus indicating that they are essentially handled
6700 by get_inner_reference. TRUE should only be passed when the caller
6701 is scanning the expression in order to build another representation
6702 and specifically knows how to handle these nodes; as such, this is
6703 the normal operating mode in the RTL expanders. */
6705 tree
6706 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6707 HOST_WIDE_INT *pbitpos, tree *poffset,
6708 enum machine_mode *pmode, int *punsignedp,
6709 int *pvolatilep, bool keep_aligning)
6711 tree size_tree = 0;
6712 enum machine_mode mode = VOIDmode;
6713 bool blkmode_bitfield = false;
6714 tree offset = size_zero_node;
6715 double_int bit_offset = double_int_zero;
6717 /* First get the mode, signedness, and size. We do this from just the
6718 outermost expression. */
6719 *pbitsize = -1;
6720 if (TREE_CODE (exp) == COMPONENT_REF)
6722 tree field = TREE_OPERAND (exp, 1);
6723 size_tree = DECL_SIZE (field);
6724 if (flag_strict_volatile_bitfields > 0
6725 && TREE_THIS_VOLATILE (exp)
6726 && DECL_BIT_FIELD_TYPE (field)
6727 && DECL_MODE (field) != BLKmode)
6728 /* Volatile bitfields should be accessed in the mode of the
6729 field's type, not the mode computed based on the bit
6730 size. */
6731 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6732 else if (!DECL_BIT_FIELD (field))
6733 mode = DECL_MODE (field);
6734 else if (DECL_MODE (field) == BLKmode)
6735 blkmode_bitfield = true;
6737 *punsignedp = DECL_UNSIGNED (field);
6739 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6741 size_tree = TREE_OPERAND (exp, 1);
6742 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6743 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6745 /* For vector types, with the correct size of access, use the mode of
6746 inner type. */
6747 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6748 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6749 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6750 mode = TYPE_MODE (TREE_TYPE (exp));
6752 else
6754 mode = TYPE_MODE (TREE_TYPE (exp));
6755 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6757 if (mode == BLKmode)
6758 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6759 else
6760 *pbitsize = GET_MODE_BITSIZE (mode);
6763 if (size_tree != 0)
6765 if (! tree_fits_uhwi_p (size_tree))
6766 mode = BLKmode, *pbitsize = -1;
6767 else
6768 *pbitsize = tree_to_uhwi (size_tree);
6771 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6772 and find the ultimate containing object. */
6773 while (1)
6775 switch (TREE_CODE (exp))
6777 case BIT_FIELD_REF:
6778 bit_offset += tree_to_double_int (TREE_OPERAND (exp, 2));
6779 break;
6781 case COMPONENT_REF:
6783 tree field = TREE_OPERAND (exp, 1);
6784 tree this_offset = component_ref_field_offset (exp);
6786 /* If this field hasn't been filled in yet, don't go past it.
6787 This should only happen when folding expressions made during
6788 type construction. */
6789 if (this_offset == 0)
6790 break;
6792 offset = size_binop (PLUS_EXPR, offset, this_offset);
6793 bit_offset += tree_to_double_int (DECL_FIELD_BIT_OFFSET (field));
6795 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6797 break;
6799 case ARRAY_REF:
6800 case ARRAY_RANGE_REF:
6802 tree index = TREE_OPERAND (exp, 1);
6803 tree low_bound = array_ref_low_bound (exp);
6804 tree unit_size = array_ref_element_size (exp);
6806 /* We assume all arrays have sizes that are a multiple of a byte.
6807 First subtract the lower bound, if any, in the type of the
6808 index, then convert to sizetype and multiply by the size of
6809 the array element. */
6810 if (! integer_zerop (low_bound))
6811 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6812 index, low_bound);
6814 offset = size_binop (PLUS_EXPR, offset,
6815 size_binop (MULT_EXPR,
6816 fold_convert (sizetype, index),
6817 unit_size));
6819 break;
6821 case REALPART_EXPR:
6822 break;
6824 case IMAGPART_EXPR:
6825 bit_offset += double_int::from_uhwi (*pbitsize);
6826 break;
6828 case VIEW_CONVERT_EXPR:
6829 if (keep_aligning && STRICT_ALIGNMENT
6830 && (TYPE_ALIGN (TREE_TYPE (exp))
6831 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6832 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6833 < BIGGEST_ALIGNMENT)
6834 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6835 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6836 goto done;
6837 break;
6839 case MEM_REF:
6840 /* Hand back the decl for MEM[&decl, off]. */
6841 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6843 tree off = TREE_OPERAND (exp, 1);
6844 if (!integer_zerop (off))
6846 double_int boff, coff = mem_ref_offset (exp);
6847 boff = coff.lshift (BITS_PER_UNIT == 8
6848 ? 3 : exact_log2 (BITS_PER_UNIT));
6849 bit_offset += boff;
6851 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6853 goto done;
6855 default:
6856 goto done;
6859 /* If any reference in the chain is volatile, the effect is volatile. */
6860 if (TREE_THIS_VOLATILE (exp))
6861 *pvolatilep = 1;
6863 exp = TREE_OPERAND (exp, 0);
6865 done:
6867 /* If OFFSET is constant, see if we can return the whole thing as a
6868 constant bit position. Make sure to handle overflow during
6869 this conversion. */
6870 if (TREE_CODE (offset) == INTEGER_CST)
6872 double_int tem = tree_to_double_int (offset);
6873 tem = tem.sext (TYPE_PRECISION (sizetype));
6874 tem = tem.lshift (BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT));
6875 tem += bit_offset;
6876 if (tem.fits_shwi ())
6878 *pbitpos = tem.to_shwi ();
6879 *poffset = offset = NULL_TREE;
6883 /* Otherwise, split it up. */
6884 if (offset)
6886 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6887 if (bit_offset.is_negative ())
6889 double_int mask
6890 = double_int::mask (BITS_PER_UNIT == 8
6891 ? 3 : exact_log2 (BITS_PER_UNIT));
6892 double_int tem = bit_offset.and_not (mask);
6893 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6894 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6895 bit_offset -= tem;
6896 tem = tem.arshift (BITS_PER_UNIT == 8
6897 ? 3 : exact_log2 (BITS_PER_UNIT),
6898 HOST_BITS_PER_DOUBLE_INT);
6899 offset = size_binop (PLUS_EXPR, offset,
6900 double_int_to_tree (sizetype, tem));
6903 *pbitpos = bit_offset.to_shwi ();
6904 *poffset = offset;
6907 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6908 if (mode == VOIDmode
6909 && blkmode_bitfield
6910 && (*pbitpos % BITS_PER_UNIT) == 0
6911 && (*pbitsize % BITS_PER_UNIT) == 0)
6912 *pmode = BLKmode;
6913 else
6914 *pmode = mode;
6916 return exp;
6919 /* Return a tree of sizetype representing the size, in bytes, of the element
6920 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6922 tree
6923 array_ref_element_size (tree exp)
6925 tree aligned_size = TREE_OPERAND (exp, 3);
6926 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6927 location_t loc = EXPR_LOCATION (exp);
6929 /* If a size was specified in the ARRAY_REF, it's the size measured
6930 in alignment units of the element type. So multiply by that value. */
6931 if (aligned_size)
6933 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6934 sizetype from another type of the same width and signedness. */
6935 if (TREE_TYPE (aligned_size) != sizetype)
6936 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6937 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6938 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6941 /* Otherwise, take the size from that of the element type. Substitute
6942 any PLACEHOLDER_EXPR that we have. */
6943 else
6944 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6947 /* Return a tree representing the lower bound of the array mentioned in
6948 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6950 tree
6951 array_ref_low_bound (tree exp)
6953 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6955 /* If a lower bound is specified in EXP, use it. */
6956 if (TREE_OPERAND (exp, 2))
6957 return TREE_OPERAND (exp, 2);
6959 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6960 substituting for a PLACEHOLDER_EXPR as needed. */
6961 if (domain_type && TYPE_MIN_VALUE (domain_type))
6962 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6964 /* Otherwise, return a zero of the appropriate type. */
6965 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6968 /* Returns true if REF is an array reference to an array at the end of
6969 a structure. If this is the case, the array may be allocated larger
6970 than its upper bound implies. */
6972 bool
6973 array_at_struct_end_p (tree ref)
6975 if (TREE_CODE (ref) != ARRAY_REF
6976 && TREE_CODE (ref) != ARRAY_RANGE_REF)
6977 return false;
6979 while (handled_component_p (ref))
6981 /* If the reference chain contains a component reference to a
6982 non-union type and there follows another field the reference
6983 is not at the end of a structure. */
6984 if (TREE_CODE (ref) == COMPONENT_REF
6985 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
6987 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
6988 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
6989 nextf = DECL_CHAIN (nextf);
6990 if (nextf)
6991 return false;
6994 ref = TREE_OPERAND (ref, 0);
6997 /* If the reference is based on a declared entity, the size of the array
6998 is constrained by its given domain. */
6999 if (DECL_P (ref))
7000 return false;
7002 return true;
7005 /* Return a tree representing the upper bound of the array mentioned in
7006 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
7008 tree
7009 array_ref_up_bound (tree exp)
7011 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
7013 /* If there is a domain type and it has an upper bound, use it, substituting
7014 for a PLACEHOLDER_EXPR as needed. */
7015 if (domain_type && TYPE_MAX_VALUE (domain_type))
7016 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
7018 /* Otherwise fail. */
7019 return NULL_TREE;
7022 /* Return a tree representing the offset, in bytes, of the field referenced
7023 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
7025 tree
7026 component_ref_field_offset (tree exp)
7028 tree aligned_offset = TREE_OPERAND (exp, 2);
7029 tree field = TREE_OPERAND (exp, 1);
7030 location_t loc = EXPR_LOCATION (exp);
7032 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
7033 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
7034 value. */
7035 if (aligned_offset)
7037 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
7038 sizetype from another type of the same width and signedness. */
7039 if (TREE_TYPE (aligned_offset) != sizetype)
7040 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
7041 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
7042 size_int (DECL_OFFSET_ALIGN (field)
7043 / BITS_PER_UNIT));
7046 /* Otherwise, take the offset from that of the field. Substitute
7047 any PLACEHOLDER_EXPR that we have. */
7048 else
7049 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
7052 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7054 static unsigned HOST_WIDE_INT
7055 target_align (const_tree target)
7057 /* We might have a chain of nested references with intermediate misaligning
7058 bitfields components, so need to recurse to find out. */
7060 unsigned HOST_WIDE_INT this_align, outer_align;
7062 switch (TREE_CODE (target))
7064 case BIT_FIELD_REF:
7065 return 1;
7067 case COMPONENT_REF:
7068 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7069 outer_align = target_align (TREE_OPERAND (target, 0));
7070 return MIN (this_align, outer_align);
7072 case ARRAY_REF:
7073 case ARRAY_RANGE_REF:
7074 this_align = TYPE_ALIGN (TREE_TYPE (target));
7075 outer_align = target_align (TREE_OPERAND (target, 0));
7076 return MIN (this_align, outer_align);
7078 CASE_CONVERT:
7079 case NON_LVALUE_EXPR:
7080 case VIEW_CONVERT_EXPR:
7081 this_align = TYPE_ALIGN (TREE_TYPE (target));
7082 outer_align = target_align (TREE_OPERAND (target, 0));
7083 return MAX (this_align, outer_align);
7085 default:
7086 return TYPE_ALIGN (TREE_TYPE (target));
7091 /* Given an rtx VALUE that may contain additions and multiplications, return
7092 an equivalent value that just refers to a register, memory, or constant.
7093 This is done by generating instructions to perform the arithmetic and
7094 returning a pseudo-register containing the value.
7096 The returned value may be a REG, SUBREG, MEM or constant. */
7099 force_operand (rtx value, rtx target)
7101 rtx op1, op2;
7102 /* Use subtarget as the target for operand 0 of a binary operation. */
7103 rtx subtarget = get_subtarget (target);
7104 enum rtx_code code = GET_CODE (value);
7106 /* Check for subreg applied to an expression produced by loop optimizer. */
7107 if (code == SUBREG
7108 && !REG_P (SUBREG_REG (value))
7109 && !MEM_P (SUBREG_REG (value)))
7111 value
7112 = simplify_gen_subreg (GET_MODE (value),
7113 force_reg (GET_MODE (SUBREG_REG (value)),
7114 force_operand (SUBREG_REG (value),
7115 NULL_RTX)),
7116 GET_MODE (SUBREG_REG (value)),
7117 SUBREG_BYTE (value));
7118 code = GET_CODE (value);
7121 /* Check for a PIC address load. */
7122 if ((code == PLUS || code == MINUS)
7123 && XEXP (value, 0) == pic_offset_table_rtx
7124 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7125 || GET_CODE (XEXP (value, 1)) == LABEL_REF
7126 || GET_CODE (XEXP (value, 1)) == CONST))
7128 if (!subtarget)
7129 subtarget = gen_reg_rtx (GET_MODE (value));
7130 emit_move_insn (subtarget, value);
7131 return subtarget;
7134 if (ARITHMETIC_P (value))
7136 op2 = XEXP (value, 1);
7137 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7138 subtarget = 0;
7139 if (code == MINUS && CONST_INT_P (op2))
7141 code = PLUS;
7142 op2 = negate_rtx (GET_MODE (value), op2);
7145 /* Check for an addition with OP2 a constant integer and our first
7146 operand a PLUS of a virtual register and something else. In that
7147 case, we want to emit the sum of the virtual register and the
7148 constant first and then add the other value. This allows virtual
7149 register instantiation to simply modify the constant rather than
7150 creating another one around this addition. */
7151 if (code == PLUS && CONST_INT_P (op2)
7152 && GET_CODE (XEXP (value, 0)) == PLUS
7153 && REG_P (XEXP (XEXP (value, 0), 0))
7154 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7155 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7157 rtx temp = expand_simple_binop (GET_MODE (value), code,
7158 XEXP (XEXP (value, 0), 0), op2,
7159 subtarget, 0, OPTAB_LIB_WIDEN);
7160 return expand_simple_binop (GET_MODE (value), code, temp,
7161 force_operand (XEXP (XEXP (value,
7162 0), 1), 0),
7163 target, 0, OPTAB_LIB_WIDEN);
7166 op1 = force_operand (XEXP (value, 0), subtarget);
7167 op2 = force_operand (op2, NULL_RTX);
7168 switch (code)
7170 case MULT:
7171 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7172 case DIV:
7173 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7174 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7175 target, 1, OPTAB_LIB_WIDEN);
7176 else
7177 return expand_divmod (0,
7178 FLOAT_MODE_P (GET_MODE (value))
7179 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7180 GET_MODE (value), op1, op2, target, 0);
7181 case MOD:
7182 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7183 target, 0);
7184 case UDIV:
7185 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7186 target, 1);
7187 case UMOD:
7188 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7189 target, 1);
7190 case ASHIFTRT:
7191 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7192 target, 0, OPTAB_LIB_WIDEN);
7193 default:
7194 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7195 target, 1, OPTAB_LIB_WIDEN);
7198 if (UNARY_P (value))
7200 if (!target)
7201 target = gen_reg_rtx (GET_MODE (value));
7202 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7203 switch (code)
7205 case ZERO_EXTEND:
7206 case SIGN_EXTEND:
7207 case TRUNCATE:
7208 case FLOAT_EXTEND:
7209 case FLOAT_TRUNCATE:
7210 convert_move (target, op1, code == ZERO_EXTEND);
7211 return target;
7213 case FIX:
7214 case UNSIGNED_FIX:
7215 expand_fix (target, op1, code == UNSIGNED_FIX);
7216 return target;
7218 case FLOAT:
7219 case UNSIGNED_FLOAT:
7220 expand_float (target, op1, code == UNSIGNED_FLOAT);
7221 return target;
7223 default:
7224 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7228 #ifdef INSN_SCHEDULING
7229 /* On machines that have insn scheduling, we want all memory reference to be
7230 explicit, so we need to deal with such paradoxical SUBREGs. */
7231 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7232 value
7233 = simplify_gen_subreg (GET_MODE (value),
7234 force_reg (GET_MODE (SUBREG_REG (value)),
7235 force_operand (SUBREG_REG (value),
7236 NULL_RTX)),
7237 GET_MODE (SUBREG_REG (value)),
7238 SUBREG_BYTE (value));
7239 #endif
7241 return value;
7244 /* Subroutine of expand_expr: return nonzero iff there is no way that
7245 EXP can reference X, which is being modified. TOP_P is nonzero if this
7246 call is going to be used to determine whether we need a temporary
7247 for EXP, as opposed to a recursive call to this function.
7249 It is always safe for this routine to return zero since it merely
7250 searches for optimization opportunities. */
7253 safe_from_p (const_rtx x, tree exp, int top_p)
7255 rtx exp_rtl = 0;
7256 int i, nops;
7258 if (x == 0
7259 /* If EXP has varying size, we MUST use a target since we currently
7260 have no way of allocating temporaries of variable size
7261 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7262 So we assume here that something at a higher level has prevented a
7263 clash. This is somewhat bogus, but the best we can do. Only
7264 do this when X is BLKmode and when we are at the top level. */
7265 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7266 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7267 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7268 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7269 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7270 != INTEGER_CST)
7271 && GET_MODE (x) == BLKmode)
7272 /* If X is in the outgoing argument area, it is always safe. */
7273 || (MEM_P (x)
7274 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7275 || (GET_CODE (XEXP (x, 0)) == PLUS
7276 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7277 return 1;
7279 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7280 find the underlying pseudo. */
7281 if (GET_CODE (x) == SUBREG)
7283 x = SUBREG_REG (x);
7284 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7285 return 0;
7288 /* Now look at our tree code and possibly recurse. */
7289 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7291 case tcc_declaration:
7292 exp_rtl = DECL_RTL_IF_SET (exp);
7293 break;
7295 case tcc_constant:
7296 return 1;
7298 case tcc_exceptional:
7299 if (TREE_CODE (exp) == TREE_LIST)
7301 while (1)
7303 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7304 return 0;
7305 exp = TREE_CHAIN (exp);
7306 if (!exp)
7307 return 1;
7308 if (TREE_CODE (exp) != TREE_LIST)
7309 return safe_from_p (x, exp, 0);
7312 else if (TREE_CODE (exp) == CONSTRUCTOR)
7314 constructor_elt *ce;
7315 unsigned HOST_WIDE_INT idx;
7317 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7318 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7319 || !safe_from_p (x, ce->value, 0))
7320 return 0;
7321 return 1;
7323 else if (TREE_CODE (exp) == ERROR_MARK)
7324 return 1; /* An already-visited SAVE_EXPR? */
7325 else
7326 return 0;
7328 case tcc_statement:
7329 /* The only case we look at here is the DECL_INITIAL inside a
7330 DECL_EXPR. */
7331 return (TREE_CODE (exp) != DECL_EXPR
7332 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7333 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7334 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7336 case tcc_binary:
7337 case tcc_comparison:
7338 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7339 return 0;
7340 /* Fall through. */
7342 case tcc_unary:
7343 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7345 case tcc_expression:
7346 case tcc_reference:
7347 case tcc_vl_exp:
7348 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7349 the expression. If it is set, we conflict iff we are that rtx or
7350 both are in memory. Otherwise, we check all operands of the
7351 expression recursively. */
7353 switch (TREE_CODE (exp))
7355 case ADDR_EXPR:
7356 /* If the operand is static or we are static, we can't conflict.
7357 Likewise if we don't conflict with the operand at all. */
7358 if (staticp (TREE_OPERAND (exp, 0))
7359 || TREE_STATIC (exp)
7360 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7361 return 1;
7363 /* Otherwise, the only way this can conflict is if we are taking
7364 the address of a DECL a that address if part of X, which is
7365 very rare. */
7366 exp = TREE_OPERAND (exp, 0);
7367 if (DECL_P (exp))
7369 if (!DECL_RTL_SET_P (exp)
7370 || !MEM_P (DECL_RTL (exp)))
7371 return 0;
7372 else
7373 exp_rtl = XEXP (DECL_RTL (exp), 0);
7375 break;
7377 case MEM_REF:
7378 if (MEM_P (x)
7379 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7380 get_alias_set (exp)))
7381 return 0;
7382 break;
7384 case CALL_EXPR:
7385 /* Assume that the call will clobber all hard registers and
7386 all of memory. */
7387 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7388 || MEM_P (x))
7389 return 0;
7390 break;
7392 case WITH_CLEANUP_EXPR:
7393 case CLEANUP_POINT_EXPR:
7394 /* Lowered by gimplify.c. */
7395 gcc_unreachable ();
7397 case SAVE_EXPR:
7398 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7400 default:
7401 break;
7404 /* If we have an rtx, we do not need to scan our operands. */
7405 if (exp_rtl)
7406 break;
7408 nops = TREE_OPERAND_LENGTH (exp);
7409 for (i = 0; i < nops; i++)
7410 if (TREE_OPERAND (exp, i) != 0
7411 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7412 return 0;
7414 break;
7416 case tcc_type:
7417 /* Should never get a type here. */
7418 gcc_unreachable ();
7421 /* If we have an rtl, find any enclosed object. Then see if we conflict
7422 with it. */
7423 if (exp_rtl)
7425 if (GET_CODE (exp_rtl) == SUBREG)
7427 exp_rtl = SUBREG_REG (exp_rtl);
7428 if (REG_P (exp_rtl)
7429 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7430 return 0;
7433 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7434 are memory and they conflict. */
7435 return ! (rtx_equal_p (x, exp_rtl)
7436 || (MEM_P (x) && MEM_P (exp_rtl)
7437 && true_dependence (exp_rtl, VOIDmode, x)));
7440 /* If we reach here, it is safe. */
7441 return 1;
7445 /* Return the highest power of two that EXP is known to be a multiple of.
7446 This is used in updating alignment of MEMs in array references. */
7448 unsigned HOST_WIDE_INT
7449 highest_pow2_factor (const_tree exp)
7451 unsigned HOST_WIDE_INT ret;
7452 int trailing_zeros = tree_ctz (exp);
7453 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7454 return BIGGEST_ALIGNMENT;
7455 ret = (unsigned HOST_WIDE_INT) 1 << trailing_zeros;
7456 if (ret > BIGGEST_ALIGNMENT)
7457 return BIGGEST_ALIGNMENT;
7458 return ret;
7461 /* Similar, except that the alignment requirements of TARGET are
7462 taken into account. Assume it is at least as aligned as its
7463 type, unless it is a COMPONENT_REF in which case the layout of
7464 the structure gives the alignment. */
7466 static unsigned HOST_WIDE_INT
7467 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7469 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7470 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7472 return MAX (factor, talign);
7475 #ifdef HAVE_conditional_move
7476 /* Convert the tree comparison code TCODE to the rtl one where the
7477 signedness is UNSIGNEDP. */
7479 static enum rtx_code
7480 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7482 enum rtx_code code;
7483 switch (tcode)
7485 case EQ_EXPR:
7486 code = EQ;
7487 break;
7488 case NE_EXPR:
7489 code = NE;
7490 break;
7491 case LT_EXPR:
7492 code = unsignedp ? LTU : LT;
7493 break;
7494 case LE_EXPR:
7495 code = unsignedp ? LEU : LE;
7496 break;
7497 case GT_EXPR:
7498 code = unsignedp ? GTU : GT;
7499 break;
7500 case GE_EXPR:
7501 code = unsignedp ? GEU : GE;
7502 break;
7503 case UNORDERED_EXPR:
7504 code = UNORDERED;
7505 break;
7506 case ORDERED_EXPR:
7507 code = ORDERED;
7508 break;
7509 case UNLT_EXPR:
7510 code = UNLT;
7511 break;
7512 case UNLE_EXPR:
7513 code = UNLE;
7514 break;
7515 case UNGT_EXPR:
7516 code = UNGT;
7517 break;
7518 case UNGE_EXPR:
7519 code = UNGE;
7520 break;
7521 case UNEQ_EXPR:
7522 code = UNEQ;
7523 break;
7524 case LTGT_EXPR:
7525 code = LTGT;
7526 break;
7528 default:
7529 gcc_unreachable ();
7531 return code;
7533 #endif
7535 /* Subroutine of expand_expr. Expand the two operands of a binary
7536 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7537 The value may be stored in TARGET if TARGET is nonzero. The
7538 MODIFIER argument is as documented by expand_expr. */
7540 static void
7541 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7542 enum expand_modifier modifier)
7544 if (! safe_from_p (target, exp1, 1))
7545 target = 0;
7546 if (operand_equal_p (exp0, exp1, 0))
7548 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7549 *op1 = copy_rtx (*op0);
7551 else
7553 /* If we need to preserve evaluation order, copy exp0 into its own
7554 temporary variable so that it can't be clobbered by exp1. */
7555 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7556 exp0 = save_expr (exp0);
7557 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7558 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7563 /* Return a MEM that contains constant EXP. DEFER is as for
7564 output_constant_def and MODIFIER is as for expand_expr. */
7566 static rtx
7567 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7569 rtx mem;
7571 mem = output_constant_def (exp, defer);
7572 if (modifier != EXPAND_INITIALIZER)
7573 mem = use_anchored_address (mem);
7574 return mem;
7577 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7578 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7580 static rtx
7581 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
7582 enum expand_modifier modifier, addr_space_t as)
7584 rtx result, subtarget;
7585 tree inner, offset;
7586 HOST_WIDE_INT bitsize, bitpos;
7587 int volatilep, unsignedp;
7588 enum machine_mode mode1;
7590 /* If we are taking the address of a constant and are at the top level,
7591 we have to use output_constant_def since we can't call force_const_mem
7592 at top level. */
7593 /* ??? This should be considered a front-end bug. We should not be
7594 generating ADDR_EXPR of something that isn't an LVALUE. The only
7595 exception here is STRING_CST. */
7596 if (CONSTANT_CLASS_P (exp))
7598 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7599 if (modifier < EXPAND_SUM)
7600 result = force_operand (result, target);
7601 return result;
7604 /* Everything must be something allowed by is_gimple_addressable. */
7605 switch (TREE_CODE (exp))
7607 case INDIRECT_REF:
7608 /* This case will happen via recursion for &a->b. */
7609 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7611 case MEM_REF:
7613 tree tem = TREE_OPERAND (exp, 0);
7614 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7615 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7616 return expand_expr (tem, target, tmode, modifier);
7619 case CONST_DECL:
7620 /* Expand the initializer like constants above. */
7621 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7622 0, modifier), 0);
7623 if (modifier < EXPAND_SUM)
7624 result = force_operand (result, target);
7625 return result;
7627 case REALPART_EXPR:
7628 /* The real part of the complex number is always first, therefore
7629 the address is the same as the address of the parent object. */
7630 offset = 0;
7631 bitpos = 0;
7632 inner = TREE_OPERAND (exp, 0);
7633 break;
7635 case IMAGPART_EXPR:
7636 /* The imaginary part of the complex number is always second.
7637 The expression is therefore always offset by the size of the
7638 scalar type. */
7639 offset = 0;
7640 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7641 inner = TREE_OPERAND (exp, 0);
7642 break;
7644 case COMPOUND_LITERAL_EXPR:
7645 /* Allow COMPOUND_LITERAL_EXPR in initializers, if e.g.
7646 rtl_for_decl_init is called on DECL_INITIAL with
7647 COMPOUNT_LITERAL_EXPRs in it, they aren't gimplified. */
7648 if (modifier == EXPAND_INITIALIZER
7649 && COMPOUND_LITERAL_EXPR_DECL (exp))
7650 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7651 target, tmode, modifier, as);
7652 /* FALLTHRU */
7653 default:
7654 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7655 expand_expr, as that can have various side effects; LABEL_DECLs for
7656 example, may not have their DECL_RTL set yet. Expand the rtl of
7657 CONSTRUCTORs too, which should yield a memory reference for the
7658 constructor's contents. Assume language specific tree nodes can
7659 be expanded in some interesting way. */
7660 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7661 if (DECL_P (exp)
7662 || TREE_CODE (exp) == CONSTRUCTOR
7663 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7665 result = expand_expr (exp, target, tmode,
7666 modifier == EXPAND_INITIALIZER
7667 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7669 /* If the DECL isn't in memory, then the DECL wasn't properly
7670 marked TREE_ADDRESSABLE, which will be either a front-end
7671 or a tree optimizer bug. */
7673 if (TREE_ADDRESSABLE (exp)
7674 && ! MEM_P (result)
7675 && ! targetm.calls.allocate_stack_slots_for_args ())
7677 error ("local frame unavailable (naked function?)");
7678 return result;
7680 else
7681 gcc_assert (MEM_P (result));
7682 result = XEXP (result, 0);
7684 /* ??? Is this needed anymore? */
7685 if (DECL_P (exp))
7686 TREE_USED (exp) = 1;
7688 if (modifier != EXPAND_INITIALIZER
7689 && modifier != EXPAND_CONST_ADDRESS
7690 && modifier != EXPAND_SUM)
7691 result = force_operand (result, target);
7692 return result;
7695 /* Pass FALSE as the last argument to get_inner_reference although
7696 we are expanding to RTL. The rationale is that we know how to
7697 handle "aligning nodes" here: we can just bypass them because
7698 they won't change the final object whose address will be returned
7699 (they actually exist only for that purpose). */
7700 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7701 &mode1, &unsignedp, &volatilep, false);
7702 break;
7705 /* We must have made progress. */
7706 gcc_assert (inner != exp);
7708 subtarget = offset || bitpos ? NULL_RTX : target;
7709 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7710 inner alignment, force the inner to be sufficiently aligned. */
7711 if (CONSTANT_CLASS_P (inner)
7712 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7714 inner = copy_node (inner);
7715 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7716 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7717 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7719 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7721 if (offset)
7723 rtx tmp;
7725 if (modifier != EXPAND_NORMAL)
7726 result = force_operand (result, NULL);
7727 tmp = expand_expr (offset, NULL_RTX, tmode,
7728 modifier == EXPAND_INITIALIZER
7729 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7731 /* expand_expr is allowed to return an object in a mode other
7732 than TMODE. If it did, we need to convert. */
7733 if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
7734 tmp = convert_modes (tmode, GET_MODE (tmp),
7735 tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
7736 result = convert_memory_address_addr_space (tmode, result, as);
7737 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7739 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7740 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7741 else
7743 subtarget = bitpos ? NULL_RTX : target;
7744 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7745 1, OPTAB_LIB_WIDEN);
7749 if (bitpos)
7751 /* Someone beforehand should have rejected taking the address
7752 of such an object. */
7753 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7755 result = convert_memory_address_addr_space (tmode, result, as);
7756 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7757 if (modifier < EXPAND_SUM)
7758 result = force_operand (result, target);
7761 return result;
7764 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7765 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7767 static rtx
7768 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
7769 enum expand_modifier modifier)
7771 addr_space_t as = ADDR_SPACE_GENERIC;
7772 enum machine_mode address_mode = Pmode;
7773 enum machine_mode pointer_mode = ptr_mode;
7774 enum machine_mode rmode;
7775 rtx result;
7777 /* Target mode of VOIDmode says "whatever's natural". */
7778 if (tmode == VOIDmode)
7779 tmode = TYPE_MODE (TREE_TYPE (exp));
7781 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7783 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7784 address_mode = targetm.addr_space.address_mode (as);
7785 pointer_mode = targetm.addr_space.pointer_mode (as);
7788 /* We can get called with some Weird Things if the user does silliness
7789 like "(short) &a". In that case, convert_memory_address won't do
7790 the right thing, so ignore the given target mode. */
7791 if (tmode != address_mode && tmode != pointer_mode)
7792 tmode = address_mode;
7794 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7795 tmode, modifier, as);
7797 /* Despite expand_expr claims concerning ignoring TMODE when not
7798 strictly convenient, stuff breaks if we don't honor it. Note
7799 that combined with the above, we only do this for pointer modes. */
7800 rmode = GET_MODE (result);
7801 if (rmode == VOIDmode)
7802 rmode = tmode;
7803 if (rmode != tmode)
7804 result = convert_memory_address_addr_space (tmode, result, as);
7806 return result;
7809 /* Generate code for computing CONSTRUCTOR EXP.
7810 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7811 is TRUE, instead of creating a temporary variable in memory
7812 NULL is returned and the caller needs to handle it differently. */
7814 static rtx
7815 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7816 bool avoid_temp_mem)
7818 tree type = TREE_TYPE (exp);
7819 enum machine_mode mode = TYPE_MODE (type);
7821 /* Try to avoid creating a temporary at all. This is possible
7822 if all of the initializer is zero.
7823 FIXME: try to handle all [0..255] initializers we can handle
7824 with memset. */
7825 if (TREE_STATIC (exp)
7826 && !TREE_ADDRESSABLE (exp)
7827 && target != 0 && mode == BLKmode
7828 && all_zeros_p (exp))
7830 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7831 return target;
7834 /* All elts simple constants => refer to a constant in memory. But
7835 if this is a non-BLKmode mode, let it store a field at a time
7836 since that should make a CONST_INT or CONST_DOUBLE when we
7837 fold. Likewise, if we have a target we can use, it is best to
7838 store directly into the target unless the type is large enough
7839 that memcpy will be used. If we are making an initializer and
7840 all operands are constant, put it in memory as well.
7842 FIXME: Avoid trying to fill vector constructors piece-meal.
7843 Output them with output_constant_def below unless we're sure
7844 they're zeros. This should go away when vector initializers
7845 are treated like VECTOR_CST instead of arrays. */
7846 if ((TREE_STATIC (exp)
7847 && ((mode == BLKmode
7848 && ! (target != 0 && safe_from_p (target, exp, 1)))
7849 || TREE_ADDRESSABLE (exp)
7850 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
7851 && (! MOVE_BY_PIECES_P
7852 (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
7853 TYPE_ALIGN (type)))
7854 && ! mostly_zeros_p (exp))))
7855 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7856 && TREE_CONSTANT (exp)))
7858 rtx constructor;
7860 if (avoid_temp_mem)
7861 return NULL_RTX;
7863 constructor = expand_expr_constant (exp, 1, modifier);
7865 if (modifier != EXPAND_CONST_ADDRESS
7866 && modifier != EXPAND_INITIALIZER
7867 && modifier != EXPAND_SUM)
7868 constructor = validize_mem (constructor);
7870 return constructor;
7873 /* Handle calls that pass values in multiple non-contiguous
7874 locations. The Irix 6 ABI has examples of this. */
7875 if (target == 0 || ! safe_from_p (target, exp, 1)
7876 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7878 if (avoid_temp_mem)
7879 return NULL_RTX;
7881 target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
7884 store_constructor (exp, target, 0, int_expr_size (exp));
7885 return target;
7889 /* expand_expr: generate code for computing expression EXP.
7890 An rtx for the computed value is returned. The value is never null.
7891 In the case of a void EXP, const0_rtx is returned.
7893 The value may be stored in TARGET if TARGET is nonzero.
7894 TARGET is just a suggestion; callers must assume that
7895 the rtx returned may not be the same as TARGET.
7897 If TARGET is CONST0_RTX, it means that the value will be ignored.
7899 If TMODE is not VOIDmode, it suggests generating the
7900 result in mode TMODE. But this is done only when convenient.
7901 Otherwise, TMODE is ignored and the value generated in its natural mode.
7902 TMODE is just a suggestion; callers must assume that
7903 the rtx returned may not have mode TMODE.
7905 Note that TARGET may have neither TMODE nor MODE. In that case, it
7906 probably will not be used.
7908 If MODIFIER is EXPAND_SUM then when EXP is an addition
7909 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7910 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7911 products as above, or REG or MEM, or constant.
7912 Ordinarily in such cases we would output mul or add instructions
7913 and then return a pseudo reg containing the sum.
7915 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7916 it also marks a label as absolutely required (it can't be dead).
7917 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7918 This is used for outputting expressions used in initializers.
7920 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7921 with a constant address even if that address is not normally legitimate.
7922 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7924 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7925 a call parameter. Such targets require special care as we haven't yet
7926 marked TARGET so that it's safe from being trashed by libcalls. We
7927 don't want to use TARGET for anything but the final result;
7928 Intermediate values must go elsewhere. Additionally, calls to
7929 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7931 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7932 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7933 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7934 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7935 recursively.
7937 If INNER_REFERENCE_P is true, we are expanding an inner reference.
7938 In this case, we don't adjust a returned MEM rtx that wouldn't be
7939 sufficiently aligned for its mode; instead, it's up to the caller
7940 to deal with it afterwards. This is used to make sure that unaligned
7941 base objects for which out-of-bounds accesses are supported, for
7942 example record types with trailing arrays, aren't realigned behind
7943 the back of the caller.
7944 The normal operating mode is to pass FALSE for this parameter. */
7947 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7948 enum expand_modifier modifier, rtx *alt_rtl,
7949 bool inner_reference_p)
7951 rtx ret;
7953 /* Handle ERROR_MARK before anybody tries to access its type. */
7954 if (TREE_CODE (exp) == ERROR_MARK
7955 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7957 ret = CONST0_RTX (tmode);
7958 return ret ? ret : const0_rtx;
7961 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
7962 inner_reference_p);
7963 return ret;
7966 /* Try to expand the conditional expression which is represented by
7967 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
7968 return the rtl reg which repsents the result. Otherwise return
7969 NULL_RTL. */
7971 static rtx
7972 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
7973 tree treeop1 ATTRIBUTE_UNUSED,
7974 tree treeop2 ATTRIBUTE_UNUSED)
7976 #ifdef HAVE_conditional_move
7977 rtx insn;
7978 rtx op00, op01, op1, op2;
7979 enum rtx_code comparison_code;
7980 enum machine_mode comparison_mode;
7981 gimple srcstmt;
7982 rtx temp;
7983 tree type = TREE_TYPE (treeop1);
7984 int unsignedp = TYPE_UNSIGNED (type);
7985 enum machine_mode mode = TYPE_MODE (type);
7986 enum machine_mode orig_mode = mode;
7988 /* If we cannot do a conditional move on the mode, try doing it
7989 with the promoted mode. */
7990 if (!can_conditionally_move_p (mode))
7992 mode = promote_mode (type, mode, &unsignedp);
7993 if (!can_conditionally_move_p (mode))
7994 return NULL_RTX;
7995 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
7997 else
7998 temp = assign_temp (type, 0, 1);
8000 start_sequence ();
8001 expand_operands (treeop1, treeop2,
8002 temp, &op1, &op2, EXPAND_NORMAL);
8004 if (TREE_CODE (treeop0) == SSA_NAME
8005 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
8007 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
8008 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
8009 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
8010 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
8011 comparison_mode = TYPE_MODE (type);
8012 unsignedp = TYPE_UNSIGNED (type);
8013 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8015 else if (TREE_CODE_CLASS (TREE_CODE (treeop0)) == tcc_comparison)
8017 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
8018 enum tree_code cmpcode = TREE_CODE (treeop0);
8019 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
8020 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
8021 unsignedp = TYPE_UNSIGNED (type);
8022 comparison_mode = TYPE_MODE (type);
8023 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8025 else
8027 op00 = expand_normal (treeop0);
8028 op01 = const0_rtx;
8029 comparison_code = NE;
8030 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
8033 if (GET_MODE (op1) != mode)
8034 op1 = gen_lowpart (mode, op1);
8036 if (GET_MODE (op2) != mode)
8037 op2 = gen_lowpart (mode, op2);
8039 /* Try to emit the conditional move. */
8040 insn = emit_conditional_move (temp, comparison_code,
8041 op00, op01, comparison_mode,
8042 op1, op2, mode,
8043 unsignedp);
8045 /* If we could do the conditional move, emit the sequence,
8046 and return. */
8047 if (insn)
8049 rtx seq = get_insns ();
8050 end_sequence ();
8051 emit_insn (seq);
8052 return convert_modes (orig_mode, mode, temp, 0);
8055 /* Otherwise discard the sequence and fall back to code with
8056 branches. */
8057 end_sequence ();
8058 #endif
8059 return NULL_RTX;
8063 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
8064 enum expand_modifier modifier)
8066 rtx op0, op1, op2, temp;
8067 tree type;
8068 int unsignedp;
8069 enum machine_mode mode;
8070 enum tree_code code = ops->code;
8071 optab this_optab;
8072 rtx subtarget, original_target;
8073 int ignore;
8074 bool reduce_bit_field;
8075 location_t loc = ops->location;
8076 tree treeop0, treeop1, treeop2;
8077 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8078 ? reduce_to_bit_field_precision ((expr), \
8079 target, \
8080 type) \
8081 : (expr))
8083 type = ops->type;
8084 mode = TYPE_MODE (type);
8085 unsignedp = TYPE_UNSIGNED (type);
8087 treeop0 = ops->op0;
8088 treeop1 = ops->op1;
8089 treeop2 = ops->op2;
8091 /* We should be called only on simple (binary or unary) expressions,
8092 exactly those that are valid in gimple expressions that aren't
8093 GIMPLE_SINGLE_RHS (or invalid). */
8094 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8095 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8096 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8098 ignore = (target == const0_rtx
8099 || ((CONVERT_EXPR_CODE_P (code)
8100 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8101 && TREE_CODE (type) == VOID_TYPE));
8103 /* We should be called only if we need the result. */
8104 gcc_assert (!ignore);
8106 /* An operation in what may be a bit-field type needs the
8107 result to be reduced to the precision of the bit-field type,
8108 which is narrower than that of the type's mode. */
8109 reduce_bit_field = (INTEGRAL_TYPE_P (type)
8110 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8112 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8113 target = 0;
8115 /* Use subtarget as the target for operand 0 of a binary operation. */
8116 subtarget = get_subtarget (target);
8117 original_target = target;
8119 switch (code)
8121 case NON_LVALUE_EXPR:
8122 case PAREN_EXPR:
8123 CASE_CONVERT:
8124 if (treeop0 == error_mark_node)
8125 return const0_rtx;
8127 if (TREE_CODE (type) == UNION_TYPE)
8129 tree valtype = TREE_TYPE (treeop0);
8131 /* If both input and output are BLKmode, this conversion isn't doing
8132 anything except possibly changing memory attribute. */
8133 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8135 rtx result = expand_expr (treeop0, target, tmode,
8136 modifier);
8138 result = copy_rtx (result);
8139 set_mem_attributes (result, type, 0);
8140 return result;
8143 if (target == 0)
8145 if (TYPE_MODE (type) != BLKmode)
8146 target = gen_reg_rtx (TYPE_MODE (type));
8147 else
8148 target = assign_temp (type, 1, 1);
8151 if (MEM_P (target))
8152 /* Store data into beginning of memory target. */
8153 store_expr (treeop0,
8154 adjust_address (target, TYPE_MODE (valtype), 0),
8155 modifier == EXPAND_STACK_PARM,
8156 false);
8158 else
8160 gcc_assert (REG_P (target));
8162 /* Store this field into a union of the proper type. */
8163 store_field (target,
8164 MIN ((int_size_in_bytes (TREE_TYPE
8165 (treeop0))
8166 * BITS_PER_UNIT),
8167 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8168 0, 0, 0, TYPE_MODE (valtype), treeop0, 0, false);
8171 /* Return the entire union. */
8172 return target;
8175 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8177 op0 = expand_expr (treeop0, target, VOIDmode,
8178 modifier);
8180 /* If the signedness of the conversion differs and OP0 is
8181 a promoted SUBREG, clear that indication since we now
8182 have to do the proper extension. */
8183 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8184 && GET_CODE (op0) == SUBREG)
8185 SUBREG_PROMOTED_VAR_P (op0) = 0;
8187 return REDUCE_BIT_FIELD (op0);
8190 op0 = expand_expr (treeop0, NULL_RTX, mode,
8191 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8192 if (GET_MODE (op0) == mode)
8195 /* If OP0 is a constant, just convert it into the proper mode. */
8196 else if (CONSTANT_P (op0))
8198 tree inner_type = TREE_TYPE (treeop0);
8199 enum machine_mode inner_mode = GET_MODE (op0);
8201 if (inner_mode == VOIDmode)
8202 inner_mode = TYPE_MODE (inner_type);
8204 if (modifier == EXPAND_INITIALIZER)
8205 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8206 subreg_lowpart_offset (mode,
8207 inner_mode));
8208 else
8209 op0= convert_modes (mode, inner_mode, op0,
8210 TYPE_UNSIGNED (inner_type));
8213 else if (modifier == EXPAND_INITIALIZER)
8214 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8216 else if (target == 0)
8217 op0 = convert_to_mode (mode, op0,
8218 TYPE_UNSIGNED (TREE_TYPE
8219 (treeop0)));
8220 else
8222 convert_move (target, op0,
8223 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8224 op0 = target;
8227 return REDUCE_BIT_FIELD (op0);
8229 case ADDR_SPACE_CONVERT_EXPR:
8231 tree treeop0_type = TREE_TYPE (treeop0);
8232 addr_space_t as_to;
8233 addr_space_t as_from;
8235 gcc_assert (POINTER_TYPE_P (type));
8236 gcc_assert (POINTER_TYPE_P (treeop0_type));
8238 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8239 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8241 /* Conversions between pointers to the same address space should
8242 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8243 gcc_assert (as_to != as_from);
8245 /* Ask target code to handle conversion between pointers
8246 to overlapping address spaces. */
8247 if (targetm.addr_space.subset_p (as_to, as_from)
8248 || targetm.addr_space.subset_p (as_from, as_to))
8250 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8251 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8252 gcc_assert (op0);
8253 return op0;
8256 /* For disjoint address spaces, converting anything but
8257 a null pointer invokes undefined behaviour. We simply
8258 always return a null pointer here. */
8259 return CONST0_RTX (mode);
8262 case POINTER_PLUS_EXPR:
8263 /* Even though the sizetype mode and the pointer's mode can be different
8264 expand is able to handle this correctly and get the correct result out
8265 of the PLUS_EXPR code. */
8266 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8267 if sizetype precision is smaller than pointer precision. */
8268 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8269 treeop1 = fold_convert_loc (loc, type,
8270 fold_convert_loc (loc, ssizetype,
8271 treeop1));
8272 /* If sizetype precision is larger than pointer precision, truncate the
8273 offset to have matching modes. */
8274 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8275 treeop1 = fold_convert_loc (loc, type, treeop1);
8277 case PLUS_EXPR:
8278 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8279 something else, make sure we add the register to the constant and
8280 then to the other thing. This case can occur during strength
8281 reduction and doing it this way will produce better code if the
8282 frame pointer or argument pointer is eliminated.
8284 fold-const.c will ensure that the constant is always in the inner
8285 PLUS_EXPR, so the only case we need to do anything about is if
8286 sp, ap, or fp is our second argument, in which case we must swap
8287 the innermost first argument and our second argument. */
8289 if (TREE_CODE (treeop0) == PLUS_EXPR
8290 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8291 && TREE_CODE (treeop1) == VAR_DECL
8292 && (DECL_RTL (treeop1) == frame_pointer_rtx
8293 || DECL_RTL (treeop1) == stack_pointer_rtx
8294 || DECL_RTL (treeop1) == arg_pointer_rtx))
8296 gcc_unreachable ();
8299 /* If the result is to be ptr_mode and we are adding an integer to
8300 something, we might be forming a constant. So try to use
8301 plus_constant. If it produces a sum and we can't accept it,
8302 use force_operand. This allows P = &ARR[const] to generate
8303 efficient code on machines where a SYMBOL_REF is not a valid
8304 address.
8306 If this is an EXPAND_SUM call, always return the sum. */
8307 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8308 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8310 if (modifier == EXPAND_STACK_PARM)
8311 target = 0;
8312 if (TREE_CODE (treeop0) == INTEGER_CST
8313 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8314 && TREE_CONSTANT (treeop1))
8316 rtx constant_part;
8318 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8319 EXPAND_SUM);
8320 /* Use immed_double_const to ensure that the constant is
8321 truncated according to the mode of OP1, then sign extended
8322 to a HOST_WIDE_INT. Using the constant directly can result
8323 in non-canonical RTL in a 64x32 cross compile. */
8324 constant_part
8325 = immed_double_const (TREE_INT_CST_LOW (treeop0),
8326 (HOST_WIDE_INT) 0,
8327 TYPE_MODE (TREE_TYPE (treeop1)));
8328 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8329 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8330 op1 = force_operand (op1, target);
8331 return REDUCE_BIT_FIELD (op1);
8334 else if (TREE_CODE (treeop1) == INTEGER_CST
8335 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8336 && TREE_CONSTANT (treeop0))
8338 rtx constant_part;
8340 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8341 (modifier == EXPAND_INITIALIZER
8342 ? EXPAND_INITIALIZER : EXPAND_SUM));
8343 if (! CONSTANT_P (op0))
8345 op1 = expand_expr (treeop1, NULL_RTX,
8346 VOIDmode, modifier);
8347 /* Return a PLUS if modifier says it's OK. */
8348 if (modifier == EXPAND_SUM
8349 || modifier == EXPAND_INITIALIZER)
8350 return simplify_gen_binary (PLUS, mode, op0, op1);
8351 goto binop2;
8353 /* Use immed_double_const to ensure that the constant is
8354 truncated according to the mode of OP1, then sign extended
8355 to a HOST_WIDE_INT. Using the constant directly can result
8356 in non-canonical RTL in a 64x32 cross compile. */
8357 constant_part
8358 = immed_double_const (TREE_INT_CST_LOW (treeop1),
8359 (HOST_WIDE_INT) 0,
8360 TYPE_MODE (TREE_TYPE (treeop0)));
8361 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8362 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8363 op0 = force_operand (op0, target);
8364 return REDUCE_BIT_FIELD (op0);
8368 /* Use TER to expand pointer addition of a negated value
8369 as pointer subtraction. */
8370 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8371 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8372 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8373 && TREE_CODE (treeop1) == SSA_NAME
8374 && TYPE_MODE (TREE_TYPE (treeop0))
8375 == TYPE_MODE (TREE_TYPE (treeop1)))
8377 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8378 if (def)
8380 treeop1 = gimple_assign_rhs1 (def);
8381 code = MINUS_EXPR;
8382 goto do_minus;
8386 /* No sense saving up arithmetic to be done
8387 if it's all in the wrong mode to form part of an address.
8388 And force_operand won't know whether to sign-extend or
8389 zero-extend. */
8390 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8391 || mode != ptr_mode)
8393 expand_operands (treeop0, treeop1,
8394 subtarget, &op0, &op1, EXPAND_NORMAL);
8395 if (op0 == const0_rtx)
8396 return op1;
8397 if (op1 == const0_rtx)
8398 return op0;
8399 goto binop2;
8402 expand_operands (treeop0, treeop1,
8403 subtarget, &op0, &op1, modifier);
8404 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8406 case MINUS_EXPR:
8407 do_minus:
8408 /* For initializers, we are allowed to return a MINUS of two
8409 symbolic constants. Here we handle all cases when both operands
8410 are constant. */
8411 /* Handle difference of two symbolic constants,
8412 for the sake of an initializer. */
8413 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8414 && really_constant_p (treeop0)
8415 && really_constant_p (treeop1))
8417 expand_operands (treeop0, treeop1,
8418 NULL_RTX, &op0, &op1, modifier);
8420 /* If the last operand is a CONST_INT, use plus_constant of
8421 the negated constant. Else make the MINUS. */
8422 if (CONST_INT_P (op1))
8423 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8424 -INTVAL (op1)));
8425 else
8426 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8429 /* No sense saving up arithmetic to be done
8430 if it's all in the wrong mode to form part of an address.
8431 And force_operand won't know whether to sign-extend or
8432 zero-extend. */
8433 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8434 || mode != ptr_mode)
8435 goto binop;
8437 expand_operands (treeop0, treeop1,
8438 subtarget, &op0, &op1, modifier);
8440 /* Convert A - const to A + (-const). */
8441 if (CONST_INT_P (op1))
8443 op1 = negate_rtx (mode, op1);
8444 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8447 goto binop2;
8449 case WIDEN_MULT_PLUS_EXPR:
8450 case WIDEN_MULT_MINUS_EXPR:
8451 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8452 op2 = expand_normal (treeop2);
8453 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8454 target, unsignedp);
8455 return target;
8457 case WIDEN_MULT_EXPR:
8458 /* If first operand is constant, swap them.
8459 Thus the following special case checks need only
8460 check the second operand. */
8461 if (TREE_CODE (treeop0) == INTEGER_CST)
8463 tree t1 = treeop0;
8464 treeop0 = treeop1;
8465 treeop1 = t1;
8468 /* First, check if we have a multiplication of one signed and one
8469 unsigned operand. */
8470 if (TREE_CODE (treeop1) != INTEGER_CST
8471 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8472 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8474 enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8475 this_optab = usmul_widen_optab;
8476 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8477 != CODE_FOR_nothing)
8479 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8480 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8481 EXPAND_NORMAL);
8482 else
8483 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8484 EXPAND_NORMAL);
8485 /* op0 and op1 might still be constant, despite the above
8486 != INTEGER_CST check. Handle it. */
8487 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8489 op0 = convert_modes (innermode, mode, op0, true);
8490 op1 = convert_modes (innermode, mode, op1, false);
8491 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8492 target, unsignedp));
8494 goto binop3;
8497 /* Check for a multiplication with matching signedness. */
8498 else if ((TREE_CODE (treeop1) == INTEGER_CST
8499 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8500 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8501 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8503 tree op0type = TREE_TYPE (treeop0);
8504 enum machine_mode innermode = TYPE_MODE (op0type);
8505 bool zextend_p = TYPE_UNSIGNED (op0type);
8506 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8507 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8509 if (TREE_CODE (treeop0) != INTEGER_CST)
8511 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8512 != CODE_FOR_nothing)
8514 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8515 EXPAND_NORMAL);
8516 /* op0 and op1 might still be constant, despite the above
8517 != INTEGER_CST check. Handle it. */
8518 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8520 widen_mult_const:
8521 op0 = convert_modes (innermode, mode, op0, zextend_p);
8523 = convert_modes (innermode, mode, op1,
8524 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8525 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8526 target,
8527 unsignedp));
8529 temp = expand_widening_mult (mode, op0, op1, target,
8530 unsignedp, this_optab);
8531 return REDUCE_BIT_FIELD (temp);
8533 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8534 != CODE_FOR_nothing
8535 && innermode == word_mode)
8537 rtx htem, hipart;
8538 op0 = expand_normal (treeop0);
8539 if (TREE_CODE (treeop1) == INTEGER_CST)
8540 op1 = convert_modes (innermode, mode,
8541 expand_normal (treeop1),
8542 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8543 else
8544 op1 = expand_normal (treeop1);
8545 /* op0 and op1 might still be constant, despite the above
8546 != INTEGER_CST check. Handle it. */
8547 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8548 goto widen_mult_const;
8549 temp = expand_binop (mode, other_optab, op0, op1, target,
8550 unsignedp, OPTAB_LIB_WIDEN);
8551 hipart = gen_highpart (innermode, temp);
8552 htem = expand_mult_highpart_adjust (innermode, hipart,
8553 op0, op1, hipart,
8554 zextend_p);
8555 if (htem != hipart)
8556 emit_move_insn (hipart, htem);
8557 return REDUCE_BIT_FIELD (temp);
8561 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8562 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8563 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8564 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8566 case FMA_EXPR:
8568 optab opt = fma_optab;
8569 gimple def0, def2;
8571 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8572 call. */
8573 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8575 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8576 tree call_expr;
8578 gcc_assert (fn != NULL_TREE);
8579 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8580 return expand_builtin (call_expr, target, subtarget, mode, false);
8583 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8584 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8586 op0 = op2 = NULL;
8588 if (def0 && def2
8589 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8591 opt = fnms_optab;
8592 op0 = expand_normal (gimple_assign_rhs1 (def0));
8593 op2 = expand_normal (gimple_assign_rhs1 (def2));
8595 else if (def0
8596 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8598 opt = fnma_optab;
8599 op0 = expand_normal (gimple_assign_rhs1 (def0));
8601 else if (def2
8602 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8604 opt = fms_optab;
8605 op2 = expand_normal (gimple_assign_rhs1 (def2));
8608 if (op0 == NULL)
8609 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8610 if (op2 == NULL)
8611 op2 = expand_normal (treeop2);
8612 op1 = expand_normal (treeop1);
8614 return expand_ternary_op (TYPE_MODE (type), opt,
8615 op0, op1, op2, target, 0);
8618 case MULT_EXPR:
8619 /* If this is a fixed-point operation, then we cannot use the code
8620 below because "expand_mult" doesn't support sat/no-sat fixed-point
8621 multiplications. */
8622 if (ALL_FIXED_POINT_MODE_P (mode))
8623 goto binop;
8625 /* If first operand is constant, swap them.
8626 Thus the following special case checks need only
8627 check the second operand. */
8628 if (TREE_CODE (treeop0) == INTEGER_CST)
8630 tree t1 = treeop0;
8631 treeop0 = treeop1;
8632 treeop1 = t1;
8635 /* Attempt to return something suitable for generating an
8636 indexed address, for machines that support that. */
8638 if (modifier == EXPAND_SUM && mode == ptr_mode
8639 && tree_fits_shwi_p (treeop1))
8641 tree exp1 = treeop1;
8643 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8644 EXPAND_SUM);
8646 if (!REG_P (op0))
8647 op0 = force_operand (op0, NULL_RTX);
8648 if (!REG_P (op0))
8649 op0 = copy_to_mode_reg (mode, op0);
8651 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8652 gen_int_mode (tree_to_shwi (exp1),
8653 TYPE_MODE (TREE_TYPE (exp1)))));
8656 if (modifier == EXPAND_STACK_PARM)
8657 target = 0;
8659 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8660 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8662 case TRUNC_DIV_EXPR:
8663 case FLOOR_DIV_EXPR:
8664 case CEIL_DIV_EXPR:
8665 case ROUND_DIV_EXPR:
8666 case EXACT_DIV_EXPR:
8667 /* If this is a fixed-point operation, then we cannot use the code
8668 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8669 divisions. */
8670 if (ALL_FIXED_POINT_MODE_P (mode))
8671 goto binop;
8673 if (modifier == EXPAND_STACK_PARM)
8674 target = 0;
8675 /* Possible optimization: compute the dividend with EXPAND_SUM
8676 then if the divisor is constant can optimize the case
8677 where some terms of the dividend have coeffs divisible by it. */
8678 expand_operands (treeop0, treeop1,
8679 subtarget, &op0, &op1, EXPAND_NORMAL);
8680 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8682 case RDIV_EXPR:
8683 goto binop;
8685 case MULT_HIGHPART_EXPR:
8686 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8687 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8688 gcc_assert (temp);
8689 return temp;
8691 case TRUNC_MOD_EXPR:
8692 case FLOOR_MOD_EXPR:
8693 case CEIL_MOD_EXPR:
8694 case ROUND_MOD_EXPR:
8695 if (modifier == EXPAND_STACK_PARM)
8696 target = 0;
8697 expand_operands (treeop0, treeop1,
8698 subtarget, &op0, &op1, EXPAND_NORMAL);
8699 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8701 case FIXED_CONVERT_EXPR:
8702 op0 = expand_normal (treeop0);
8703 if (target == 0 || modifier == EXPAND_STACK_PARM)
8704 target = gen_reg_rtx (mode);
8706 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8707 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8708 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8709 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8710 else
8711 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8712 return target;
8714 case FIX_TRUNC_EXPR:
8715 op0 = expand_normal (treeop0);
8716 if (target == 0 || modifier == EXPAND_STACK_PARM)
8717 target = gen_reg_rtx (mode);
8718 expand_fix (target, op0, unsignedp);
8719 return target;
8721 case FLOAT_EXPR:
8722 op0 = expand_normal (treeop0);
8723 if (target == 0 || modifier == EXPAND_STACK_PARM)
8724 target = gen_reg_rtx (mode);
8725 /* expand_float can't figure out what to do if FROM has VOIDmode.
8726 So give it the correct mode. With -O, cse will optimize this. */
8727 if (GET_MODE (op0) == VOIDmode)
8728 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8729 op0);
8730 expand_float (target, op0,
8731 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8732 return target;
8734 case NEGATE_EXPR:
8735 op0 = expand_expr (treeop0, subtarget,
8736 VOIDmode, EXPAND_NORMAL);
8737 if (modifier == EXPAND_STACK_PARM)
8738 target = 0;
8739 temp = expand_unop (mode,
8740 optab_for_tree_code (NEGATE_EXPR, type,
8741 optab_default),
8742 op0, target, 0);
8743 gcc_assert (temp);
8744 return REDUCE_BIT_FIELD (temp);
8746 case ABS_EXPR:
8747 op0 = expand_expr (treeop0, subtarget,
8748 VOIDmode, EXPAND_NORMAL);
8749 if (modifier == EXPAND_STACK_PARM)
8750 target = 0;
8752 /* ABS_EXPR is not valid for complex arguments. */
8753 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8754 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8756 /* Unsigned abs is simply the operand. Testing here means we don't
8757 risk generating incorrect code below. */
8758 if (TYPE_UNSIGNED (type))
8759 return op0;
8761 return expand_abs (mode, op0, target, unsignedp,
8762 safe_from_p (target, treeop0, 1));
8764 case MAX_EXPR:
8765 case MIN_EXPR:
8766 target = original_target;
8767 if (target == 0
8768 || modifier == EXPAND_STACK_PARM
8769 || (MEM_P (target) && MEM_VOLATILE_P (target))
8770 || GET_MODE (target) != mode
8771 || (REG_P (target)
8772 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8773 target = gen_reg_rtx (mode);
8774 expand_operands (treeop0, treeop1,
8775 target, &op0, &op1, EXPAND_NORMAL);
8777 /* First try to do it with a special MIN or MAX instruction.
8778 If that does not win, use a conditional jump to select the proper
8779 value. */
8780 this_optab = optab_for_tree_code (code, type, optab_default);
8781 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8782 OPTAB_WIDEN);
8783 if (temp != 0)
8784 return temp;
8786 /* At this point, a MEM target is no longer useful; we will get better
8787 code without it. */
8789 if (! REG_P (target))
8790 target = gen_reg_rtx (mode);
8792 /* If op1 was placed in target, swap op0 and op1. */
8793 if (target != op0 && target == op1)
8795 temp = op0;
8796 op0 = op1;
8797 op1 = temp;
8800 /* We generate better code and avoid problems with op1 mentioning
8801 target by forcing op1 into a pseudo if it isn't a constant. */
8802 if (! CONSTANT_P (op1))
8803 op1 = force_reg (mode, op1);
8806 enum rtx_code comparison_code;
8807 rtx cmpop1 = op1;
8809 if (code == MAX_EXPR)
8810 comparison_code = unsignedp ? GEU : GE;
8811 else
8812 comparison_code = unsignedp ? LEU : LE;
8814 /* Canonicalize to comparisons against 0. */
8815 if (op1 == const1_rtx)
8817 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8818 or (a != 0 ? a : 1) for unsigned.
8819 For MIN we are safe converting (a <= 1 ? a : 1)
8820 into (a <= 0 ? a : 1) */
8821 cmpop1 = const0_rtx;
8822 if (code == MAX_EXPR)
8823 comparison_code = unsignedp ? NE : GT;
8825 if (op1 == constm1_rtx && !unsignedp)
8827 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8828 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8829 cmpop1 = const0_rtx;
8830 if (code == MIN_EXPR)
8831 comparison_code = LT;
8833 #ifdef HAVE_conditional_move
8834 /* Use a conditional move if possible. */
8835 if (can_conditionally_move_p (mode))
8837 rtx insn;
8839 start_sequence ();
8841 /* Try to emit the conditional move. */
8842 insn = emit_conditional_move (target, comparison_code,
8843 op0, cmpop1, mode,
8844 op0, op1, mode,
8845 unsignedp);
8847 /* If we could do the conditional move, emit the sequence,
8848 and return. */
8849 if (insn)
8851 rtx seq = get_insns ();
8852 end_sequence ();
8853 emit_insn (seq);
8854 return target;
8857 /* Otherwise discard the sequence and fall back to code with
8858 branches. */
8859 end_sequence ();
8861 #endif
8862 if (target != op0)
8863 emit_move_insn (target, op0);
8865 temp = gen_label_rtx ();
8866 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8867 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8868 -1);
8870 emit_move_insn (target, op1);
8871 emit_label (temp);
8872 return target;
8874 case BIT_NOT_EXPR:
8875 op0 = expand_expr (treeop0, subtarget,
8876 VOIDmode, EXPAND_NORMAL);
8877 if (modifier == EXPAND_STACK_PARM)
8878 target = 0;
8879 /* In case we have to reduce the result to bitfield precision
8880 for unsigned bitfield expand this as XOR with a proper constant
8881 instead. */
8882 if (reduce_bit_field && TYPE_UNSIGNED (type))
8883 temp = expand_binop (mode, xor_optab, op0,
8884 immed_double_int_const
8885 (double_int::mask (TYPE_PRECISION (type)), mode),
8886 target, 1, OPTAB_LIB_WIDEN);
8887 else
8888 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8889 gcc_assert (temp);
8890 return temp;
8892 /* ??? Can optimize bitwise operations with one arg constant.
8893 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8894 and (a bitwise1 b) bitwise2 b (etc)
8895 but that is probably not worth while. */
8897 case BIT_AND_EXPR:
8898 case BIT_IOR_EXPR:
8899 case BIT_XOR_EXPR:
8900 goto binop;
8902 case LROTATE_EXPR:
8903 case RROTATE_EXPR:
8904 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8905 || (GET_MODE_PRECISION (TYPE_MODE (type))
8906 == TYPE_PRECISION (type)));
8907 /* fall through */
8909 case LSHIFT_EXPR:
8910 case RSHIFT_EXPR:
8911 /* If this is a fixed-point operation, then we cannot use the code
8912 below because "expand_shift" doesn't support sat/no-sat fixed-point
8913 shifts. */
8914 if (ALL_FIXED_POINT_MODE_P (mode))
8915 goto binop;
8917 if (! safe_from_p (subtarget, treeop1, 1))
8918 subtarget = 0;
8919 if (modifier == EXPAND_STACK_PARM)
8920 target = 0;
8921 op0 = expand_expr (treeop0, subtarget,
8922 VOIDmode, EXPAND_NORMAL);
8923 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8924 unsignedp);
8925 if (code == LSHIFT_EXPR)
8926 temp = REDUCE_BIT_FIELD (temp);
8927 return temp;
8929 /* Could determine the answer when only additive constants differ. Also,
8930 the addition of one can be handled by changing the condition. */
8931 case LT_EXPR:
8932 case LE_EXPR:
8933 case GT_EXPR:
8934 case GE_EXPR:
8935 case EQ_EXPR:
8936 case NE_EXPR:
8937 case UNORDERED_EXPR:
8938 case ORDERED_EXPR:
8939 case UNLT_EXPR:
8940 case UNLE_EXPR:
8941 case UNGT_EXPR:
8942 case UNGE_EXPR:
8943 case UNEQ_EXPR:
8944 case LTGT_EXPR:
8945 temp = do_store_flag (ops,
8946 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8947 tmode != VOIDmode ? tmode : mode);
8948 if (temp)
8949 return temp;
8951 /* Use a compare and a jump for BLKmode comparisons, or for function
8952 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8954 if ((target == 0
8955 || modifier == EXPAND_STACK_PARM
8956 || ! safe_from_p (target, treeop0, 1)
8957 || ! safe_from_p (target, treeop1, 1)
8958 /* Make sure we don't have a hard reg (such as function's return
8959 value) live across basic blocks, if not optimizing. */
8960 || (!optimize && REG_P (target)
8961 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8962 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8964 emit_move_insn (target, const0_rtx);
8966 op1 = gen_label_rtx ();
8967 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8969 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8970 emit_move_insn (target, constm1_rtx);
8971 else
8972 emit_move_insn (target, const1_rtx);
8974 emit_label (op1);
8975 return target;
8977 case COMPLEX_EXPR:
8978 /* Get the rtx code of the operands. */
8979 op0 = expand_normal (treeop0);
8980 op1 = expand_normal (treeop1);
8982 if (!target)
8983 target = gen_reg_rtx (TYPE_MODE (type));
8984 else
8985 /* If target overlaps with op1, then either we need to force
8986 op1 into a pseudo (if target also overlaps with op0),
8987 or write the complex parts in reverse order. */
8988 switch (GET_CODE (target))
8990 case CONCAT:
8991 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
8993 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
8995 complex_expr_force_op1:
8996 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
8997 emit_move_insn (temp, op1);
8998 op1 = temp;
8999 break;
9001 complex_expr_swap_order:
9002 /* Move the imaginary (op1) and real (op0) parts to their
9003 location. */
9004 write_complex_part (target, op1, true);
9005 write_complex_part (target, op0, false);
9007 return target;
9009 break;
9010 case MEM:
9011 temp = adjust_address_nv (target,
9012 GET_MODE_INNER (GET_MODE (target)), 0);
9013 if (reg_overlap_mentioned_p (temp, op1))
9015 enum machine_mode imode = GET_MODE_INNER (GET_MODE (target));
9016 temp = adjust_address_nv (target, imode,
9017 GET_MODE_SIZE (imode));
9018 if (reg_overlap_mentioned_p (temp, op0))
9019 goto complex_expr_force_op1;
9020 goto complex_expr_swap_order;
9022 break;
9023 default:
9024 if (reg_overlap_mentioned_p (target, op1))
9026 if (reg_overlap_mentioned_p (target, op0))
9027 goto complex_expr_force_op1;
9028 goto complex_expr_swap_order;
9030 break;
9033 /* Move the real (op0) and imaginary (op1) parts to their location. */
9034 write_complex_part (target, op0, false);
9035 write_complex_part (target, op1, true);
9037 return target;
9039 case WIDEN_SUM_EXPR:
9041 tree oprnd0 = treeop0;
9042 tree oprnd1 = treeop1;
9044 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9045 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
9046 target, unsignedp);
9047 return target;
9050 case REDUC_MAX_EXPR:
9051 case REDUC_MIN_EXPR:
9052 case REDUC_PLUS_EXPR:
9054 op0 = expand_normal (treeop0);
9055 this_optab = optab_for_tree_code (code, type, optab_default);
9056 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
9057 gcc_assert (temp);
9058 return temp;
9061 case VEC_LSHIFT_EXPR:
9062 case VEC_RSHIFT_EXPR:
9064 target = expand_vec_shift_expr (ops, target);
9065 return target;
9068 case VEC_UNPACK_HI_EXPR:
9069 case VEC_UNPACK_LO_EXPR:
9071 op0 = expand_normal (treeop0);
9072 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9073 target, unsignedp);
9074 gcc_assert (temp);
9075 return temp;
9078 case VEC_UNPACK_FLOAT_HI_EXPR:
9079 case VEC_UNPACK_FLOAT_LO_EXPR:
9081 op0 = expand_normal (treeop0);
9082 /* The signedness is determined from input operand. */
9083 temp = expand_widen_pattern_expr
9084 (ops, op0, NULL_RTX, NULL_RTX,
9085 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9087 gcc_assert (temp);
9088 return temp;
9091 case VEC_WIDEN_MULT_HI_EXPR:
9092 case VEC_WIDEN_MULT_LO_EXPR:
9093 case VEC_WIDEN_MULT_EVEN_EXPR:
9094 case VEC_WIDEN_MULT_ODD_EXPR:
9095 case VEC_WIDEN_LSHIFT_HI_EXPR:
9096 case VEC_WIDEN_LSHIFT_LO_EXPR:
9097 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9098 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9099 target, unsignedp);
9100 gcc_assert (target);
9101 return target;
9103 case VEC_PACK_TRUNC_EXPR:
9104 case VEC_PACK_SAT_EXPR:
9105 case VEC_PACK_FIX_TRUNC_EXPR:
9106 mode = TYPE_MODE (TREE_TYPE (treeop0));
9107 goto binop;
9109 case VEC_PERM_EXPR:
9110 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9111 op2 = expand_normal (treeop2);
9113 /* Careful here: if the target doesn't support integral vector modes,
9114 a constant selection vector could wind up smooshed into a normal
9115 integral constant. */
9116 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9118 tree sel_type = TREE_TYPE (treeop2);
9119 enum machine_mode vmode
9120 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9121 TYPE_VECTOR_SUBPARTS (sel_type));
9122 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9123 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9124 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9126 else
9127 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9129 temp = expand_vec_perm (mode, op0, op1, op2, target);
9130 gcc_assert (temp);
9131 return temp;
9133 case DOT_PROD_EXPR:
9135 tree oprnd0 = treeop0;
9136 tree oprnd1 = treeop1;
9137 tree oprnd2 = treeop2;
9138 rtx op2;
9140 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9141 op2 = expand_normal (oprnd2);
9142 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9143 target, unsignedp);
9144 return target;
9147 case REALIGN_LOAD_EXPR:
9149 tree oprnd0 = treeop0;
9150 tree oprnd1 = treeop1;
9151 tree oprnd2 = treeop2;
9152 rtx op2;
9154 this_optab = optab_for_tree_code (code, type, optab_default);
9155 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9156 op2 = expand_normal (oprnd2);
9157 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9158 target, unsignedp);
9159 gcc_assert (temp);
9160 return temp;
9163 case COND_EXPR:
9164 /* A COND_EXPR with its type being VOID_TYPE represents a
9165 conditional jump and is handled in
9166 expand_gimple_cond_expr. */
9167 gcc_assert (!VOID_TYPE_P (type));
9169 /* Note that COND_EXPRs whose type is a structure or union
9170 are required to be constructed to contain assignments of
9171 a temporary variable, so that we can evaluate them here
9172 for side effect only. If type is void, we must do likewise. */
9174 gcc_assert (!TREE_ADDRESSABLE (type)
9175 && !ignore
9176 && TREE_TYPE (treeop1) != void_type_node
9177 && TREE_TYPE (treeop2) != void_type_node);
9179 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9180 if (temp)
9181 return temp;
9183 /* If we are not to produce a result, we have no target. Otherwise,
9184 if a target was specified use it; it will not be used as an
9185 intermediate target unless it is safe. If no target, use a
9186 temporary. */
9188 if (modifier != EXPAND_STACK_PARM
9189 && original_target
9190 && safe_from_p (original_target, treeop0, 1)
9191 && GET_MODE (original_target) == mode
9192 && !MEM_P (original_target))
9193 temp = original_target;
9194 else
9195 temp = assign_temp (type, 0, 1);
9197 do_pending_stack_adjust ();
9198 NO_DEFER_POP;
9199 op0 = gen_label_rtx ();
9200 op1 = gen_label_rtx ();
9201 jumpifnot (treeop0, op0, -1);
9202 store_expr (treeop1, temp,
9203 modifier == EXPAND_STACK_PARM,
9204 false);
9206 emit_jump_insn (gen_jump (op1));
9207 emit_barrier ();
9208 emit_label (op0);
9209 store_expr (treeop2, temp,
9210 modifier == EXPAND_STACK_PARM,
9211 false);
9213 emit_label (op1);
9214 OK_DEFER_POP;
9215 return temp;
9217 case VEC_COND_EXPR:
9218 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9219 return target;
9221 default:
9222 gcc_unreachable ();
9225 /* Here to do an ordinary binary operator. */
9226 binop:
9227 expand_operands (treeop0, treeop1,
9228 subtarget, &op0, &op1, EXPAND_NORMAL);
9229 binop2:
9230 this_optab = optab_for_tree_code (code, type, optab_default);
9231 binop3:
9232 if (modifier == EXPAND_STACK_PARM)
9233 target = 0;
9234 temp = expand_binop (mode, this_optab, op0, op1, target,
9235 unsignedp, OPTAB_LIB_WIDEN);
9236 gcc_assert (temp);
9237 /* Bitwise operations do not need bitfield reduction as we expect their
9238 operands being properly truncated. */
9239 if (code == BIT_XOR_EXPR
9240 || code == BIT_AND_EXPR
9241 || code == BIT_IOR_EXPR)
9242 return temp;
9243 return REDUCE_BIT_FIELD (temp);
9245 #undef REDUCE_BIT_FIELD
9248 /* Return TRUE if expression STMT is suitable for replacement.
9249 Never consider memory loads as replaceable, because those don't ever lead
9250 into constant expressions. */
9252 static bool
9253 stmt_is_replaceable_p (gimple stmt)
9255 if (ssa_is_replaceable_p (stmt))
9257 /* Don't move around loads. */
9258 if (!gimple_assign_single_p (stmt)
9259 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9260 return true;
9262 return false;
9266 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
9267 enum expand_modifier modifier, rtx *alt_rtl,
9268 bool inner_reference_p)
9270 rtx op0, op1, temp, decl_rtl;
9271 tree type;
9272 int unsignedp;
9273 enum machine_mode mode;
9274 enum tree_code code = TREE_CODE (exp);
9275 rtx subtarget, original_target;
9276 int ignore;
9277 tree context;
9278 bool reduce_bit_field;
9279 location_t loc = EXPR_LOCATION (exp);
9280 struct separate_ops ops;
9281 tree treeop0, treeop1, treeop2;
9282 tree ssa_name = NULL_TREE;
9283 gimple g;
9285 type = TREE_TYPE (exp);
9286 mode = TYPE_MODE (type);
9287 unsignedp = TYPE_UNSIGNED (type);
9289 treeop0 = treeop1 = treeop2 = NULL_TREE;
9290 if (!VL_EXP_CLASS_P (exp))
9291 switch (TREE_CODE_LENGTH (code))
9293 default:
9294 case 3: treeop2 = TREE_OPERAND (exp, 2);
9295 case 2: treeop1 = TREE_OPERAND (exp, 1);
9296 case 1: treeop0 = TREE_OPERAND (exp, 0);
9297 case 0: break;
9299 ops.code = code;
9300 ops.type = type;
9301 ops.op0 = treeop0;
9302 ops.op1 = treeop1;
9303 ops.op2 = treeop2;
9304 ops.location = loc;
9306 ignore = (target == const0_rtx
9307 || ((CONVERT_EXPR_CODE_P (code)
9308 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9309 && TREE_CODE (type) == VOID_TYPE));
9311 /* An operation in what may be a bit-field type needs the
9312 result to be reduced to the precision of the bit-field type,
9313 which is narrower than that of the type's mode. */
9314 reduce_bit_field = (!ignore
9315 && INTEGRAL_TYPE_P (type)
9316 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9318 /* If we are going to ignore this result, we need only do something
9319 if there is a side-effect somewhere in the expression. If there
9320 is, short-circuit the most common cases here. Note that we must
9321 not call expand_expr with anything but const0_rtx in case this
9322 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9324 if (ignore)
9326 if (! TREE_SIDE_EFFECTS (exp))
9327 return const0_rtx;
9329 /* Ensure we reference a volatile object even if value is ignored, but
9330 don't do this if all we are doing is taking its address. */
9331 if (TREE_THIS_VOLATILE (exp)
9332 && TREE_CODE (exp) != FUNCTION_DECL
9333 && mode != VOIDmode && mode != BLKmode
9334 && modifier != EXPAND_CONST_ADDRESS)
9336 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9337 if (MEM_P (temp))
9338 copy_to_reg (temp);
9339 return const0_rtx;
9342 if (TREE_CODE_CLASS (code) == tcc_unary
9343 || code == BIT_FIELD_REF
9344 || code == COMPONENT_REF
9345 || code == INDIRECT_REF)
9346 return expand_expr (treeop0, const0_rtx, VOIDmode,
9347 modifier);
9349 else if (TREE_CODE_CLASS (code) == tcc_binary
9350 || TREE_CODE_CLASS (code) == tcc_comparison
9351 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9353 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9354 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9355 return const0_rtx;
9358 target = 0;
9361 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9362 target = 0;
9364 /* Use subtarget as the target for operand 0 of a binary operation. */
9365 subtarget = get_subtarget (target);
9366 original_target = target;
9368 switch (code)
9370 case LABEL_DECL:
9372 tree function = decl_function_context (exp);
9374 temp = label_rtx (exp);
9375 temp = gen_rtx_LABEL_REF (Pmode, temp);
9377 if (function != current_function_decl
9378 && function != 0)
9379 LABEL_REF_NONLOCAL_P (temp) = 1;
9381 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9382 return temp;
9385 case SSA_NAME:
9386 /* ??? ivopts calls expander, without any preparation from
9387 out-of-ssa. So fake instructions as if this was an access to the
9388 base variable. This unnecessarily allocates a pseudo, see how we can
9389 reuse it, if partition base vars have it set already. */
9390 if (!currently_expanding_to_rtl)
9392 tree var = SSA_NAME_VAR (exp);
9393 if (var && DECL_RTL_SET_P (var))
9394 return DECL_RTL (var);
9395 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9396 LAST_VIRTUAL_REGISTER + 1);
9399 g = get_gimple_for_ssa_name (exp);
9400 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9401 if (g == NULL
9402 && modifier == EXPAND_INITIALIZER
9403 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9404 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9405 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9406 g = SSA_NAME_DEF_STMT (exp);
9407 if (g)
9409 rtx r;
9410 ops.code = gimple_assign_rhs_code (g);
9411 switch (get_gimple_rhs_class (ops.code))
9413 case GIMPLE_TERNARY_RHS:
9414 ops.op2 = gimple_assign_rhs3 (g);
9415 /* Fallthru */
9416 case GIMPLE_BINARY_RHS:
9417 ops.op1 = gimple_assign_rhs2 (g);
9418 /* Fallthru */
9419 case GIMPLE_UNARY_RHS:
9420 ops.op0 = gimple_assign_rhs1 (g);
9421 ops.type = TREE_TYPE (gimple_assign_lhs (g));
9422 ops.location = gimple_location (g);
9423 r = expand_expr_real_2 (&ops, target, tmode, modifier);
9424 break;
9425 case GIMPLE_SINGLE_RHS:
9427 location_t saved_loc = curr_insn_location ();
9428 set_curr_insn_location (gimple_location (g));
9429 r = expand_expr_real (gimple_assign_rhs1 (g), target,
9430 tmode, modifier, NULL, inner_reference_p);
9431 set_curr_insn_location (saved_loc);
9432 break;
9434 default:
9435 gcc_unreachable ();
9437 if (REG_P (r) && !REG_EXPR (r))
9438 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9439 return r;
9442 ssa_name = exp;
9443 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9444 exp = SSA_NAME_VAR (ssa_name);
9445 goto expand_decl_rtl;
9447 case PARM_DECL:
9448 case VAR_DECL:
9449 /* If a static var's type was incomplete when the decl was written,
9450 but the type is complete now, lay out the decl now. */
9451 if (DECL_SIZE (exp) == 0
9452 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9453 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9454 layout_decl (exp, 0);
9456 /* ... fall through ... */
9458 case FUNCTION_DECL:
9459 case RESULT_DECL:
9460 decl_rtl = DECL_RTL (exp);
9461 expand_decl_rtl:
9462 gcc_assert (decl_rtl);
9463 decl_rtl = copy_rtx (decl_rtl);
9464 /* Record writes to register variables. */
9465 if (modifier == EXPAND_WRITE
9466 && REG_P (decl_rtl)
9467 && HARD_REGISTER_P (decl_rtl))
9468 add_to_hard_reg_set (&crtl->asm_clobbers,
9469 GET_MODE (decl_rtl), REGNO (decl_rtl));
9471 /* Ensure variable marked as used even if it doesn't go through
9472 a parser. If it hasn't be used yet, write out an external
9473 definition. */
9474 TREE_USED (exp) = 1;
9476 /* Show we haven't gotten RTL for this yet. */
9477 temp = 0;
9479 /* Variables inherited from containing functions should have
9480 been lowered by this point. */
9481 context = decl_function_context (exp);
9482 gcc_assert (SCOPE_FILE_SCOPE_P (context)
9483 || context == current_function_decl
9484 || TREE_STATIC (exp)
9485 || DECL_EXTERNAL (exp)
9486 /* ??? C++ creates functions that are not TREE_STATIC. */
9487 || TREE_CODE (exp) == FUNCTION_DECL);
9489 /* This is the case of an array whose size is to be determined
9490 from its initializer, while the initializer is still being parsed.
9491 ??? We aren't parsing while expanding anymore. */
9493 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9494 temp = validize_mem (decl_rtl);
9496 /* If DECL_RTL is memory, we are in the normal case and the
9497 address is not valid, get the address into a register. */
9499 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9501 if (alt_rtl)
9502 *alt_rtl = decl_rtl;
9503 decl_rtl = use_anchored_address (decl_rtl);
9504 if (modifier != EXPAND_CONST_ADDRESS
9505 && modifier != EXPAND_SUM
9506 && !memory_address_addr_space_p (DECL_MODE (exp),
9507 XEXP (decl_rtl, 0),
9508 MEM_ADDR_SPACE (decl_rtl)))
9509 temp = replace_equiv_address (decl_rtl,
9510 copy_rtx (XEXP (decl_rtl, 0)));
9513 /* If we got something, return it. But first, set the alignment
9514 if the address is a register. */
9515 if (temp != 0)
9517 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9518 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9520 return temp;
9523 /* If the mode of DECL_RTL does not match that of the decl,
9524 there are two cases: we are dealing with a BLKmode value
9525 that is returned in a register, or we are dealing with
9526 a promoted value. In the latter case, return a SUBREG
9527 of the wanted mode, but mark it so that we know that it
9528 was already extended. */
9529 if (REG_P (decl_rtl)
9530 && DECL_MODE (exp) != BLKmode
9531 && GET_MODE (decl_rtl) != DECL_MODE (exp))
9533 enum machine_mode pmode;
9535 /* Get the signedness to be used for this variable. Ensure we get
9536 the same mode we got when the variable was declared. */
9537 if (code == SSA_NAME
9538 && (g = SSA_NAME_DEF_STMT (ssa_name))
9539 && gimple_code (g) == GIMPLE_CALL
9540 && !gimple_call_internal_p (g))
9541 pmode = promote_function_mode (type, mode, &unsignedp,
9542 gimple_call_fntype (g),
9544 else
9545 pmode = promote_decl_mode (exp, &unsignedp);
9546 gcc_assert (GET_MODE (decl_rtl) == pmode);
9548 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9549 SUBREG_PROMOTED_VAR_P (temp) = 1;
9550 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
9551 return temp;
9554 return decl_rtl;
9556 case INTEGER_CST:
9557 temp = immed_double_const (TREE_INT_CST_LOW (exp),
9558 TREE_INT_CST_HIGH (exp), mode);
9560 return temp;
9562 case VECTOR_CST:
9564 tree tmp = NULL_TREE;
9565 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9566 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9567 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9568 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9569 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9570 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9571 return const_vector_from_tree (exp);
9572 if (GET_MODE_CLASS (mode) == MODE_INT)
9574 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9575 if (type_for_mode)
9576 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9578 if (!tmp)
9580 vec<constructor_elt, va_gc> *v;
9581 unsigned i;
9582 vec_alloc (v, VECTOR_CST_NELTS (exp));
9583 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9584 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9585 tmp = build_constructor (type, v);
9587 return expand_expr (tmp, ignore ? const0_rtx : target,
9588 tmode, modifier);
9591 case CONST_DECL:
9592 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9594 case REAL_CST:
9595 /* If optimized, generate immediate CONST_DOUBLE
9596 which will be turned into memory by reload if necessary.
9598 We used to force a register so that loop.c could see it. But
9599 this does not allow gen_* patterns to perform optimizations with
9600 the constants. It also produces two insns in cases like "x = 1.0;".
9601 On most machines, floating-point constants are not permitted in
9602 many insns, so we'd end up copying it to a register in any case.
9604 Now, we do the copying in expand_binop, if appropriate. */
9605 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9606 TYPE_MODE (TREE_TYPE (exp)));
9608 case FIXED_CST:
9609 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9610 TYPE_MODE (TREE_TYPE (exp)));
9612 case COMPLEX_CST:
9613 /* Handle evaluating a complex constant in a CONCAT target. */
9614 if (original_target && GET_CODE (original_target) == CONCAT)
9616 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9617 rtx rtarg, itarg;
9619 rtarg = XEXP (original_target, 0);
9620 itarg = XEXP (original_target, 1);
9622 /* Move the real and imaginary parts separately. */
9623 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9624 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9626 if (op0 != rtarg)
9627 emit_move_insn (rtarg, op0);
9628 if (op1 != itarg)
9629 emit_move_insn (itarg, op1);
9631 return original_target;
9634 /* ... fall through ... */
9636 case STRING_CST:
9637 temp = expand_expr_constant (exp, 1, modifier);
9639 /* temp contains a constant address.
9640 On RISC machines where a constant address isn't valid,
9641 make some insns to get that address into a register. */
9642 if (modifier != EXPAND_CONST_ADDRESS
9643 && modifier != EXPAND_INITIALIZER
9644 && modifier != EXPAND_SUM
9645 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9646 MEM_ADDR_SPACE (temp)))
9647 return replace_equiv_address (temp,
9648 copy_rtx (XEXP (temp, 0)));
9649 return temp;
9651 case SAVE_EXPR:
9653 tree val = treeop0;
9654 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
9655 inner_reference_p);
9657 if (!SAVE_EXPR_RESOLVED_P (exp))
9659 /* We can indeed still hit this case, typically via builtin
9660 expanders calling save_expr immediately before expanding
9661 something. Assume this means that we only have to deal
9662 with non-BLKmode values. */
9663 gcc_assert (GET_MODE (ret) != BLKmode);
9665 val = build_decl (curr_insn_location (),
9666 VAR_DECL, NULL, TREE_TYPE (exp));
9667 DECL_ARTIFICIAL (val) = 1;
9668 DECL_IGNORED_P (val) = 1;
9669 treeop0 = val;
9670 TREE_OPERAND (exp, 0) = treeop0;
9671 SAVE_EXPR_RESOLVED_P (exp) = 1;
9673 if (!CONSTANT_P (ret))
9674 ret = copy_to_reg (ret);
9675 SET_DECL_RTL (val, ret);
9678 return ret;
9682 case CONSTRUCTOR:
9683 /* If we don't need the result, just ensure we evaluate any
9684 subexpressions. */
9685 if (ignore)
9687 unsigned HOST_WIDE_INT idx;
9688 tree value;
9690 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9691 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9693 return const0_rtx;
9696 return expand_constructor (exp, target, modifier, false);
9698 case TARGET_MEM_REF:
9700 addr_space_t as
9701 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9702 enum insn_code icode;
9703 unsigned int align;
9705 op0 = addr_for_mem_ref (exp, as, true);
9706 op0 = memory_address_addr_space (mode, op0, as);
9707 temp = gen_rtx_MEM (mode, op0);
9708 set_mem_attributes (temp, exp, 0);
9709 set_mem_addr_space (temp, as);
9710 align = get_object_alignment (exp);
9711 if (modifier != EXPAND_WRITE
9712 && modifier != EXPAND_MEMORY
9713 && mode != BLKmode
9714 && align < GET_MODE_ALIGNMENT (mode)
9715 /* If the target does not have special handling for unaligned
9716 loads of mode then it can use regular moves for them. */
9717 && ((icode = optab_handler (movmisalign_optab, mode))
9718 != CODE_FOR_nothing))
9720 struct expand_operand ops[2];
9722 /* We've already validated the memory, and we're creating a
9723 new pseudo destination. The predicates really can't fail,
9724 nor can the generator. */
9725 create_output_operand (&ops[0], NULL_RTX, mode);
9726 create_fixed_operand (&ops[1], temp);
9727 expand_insn (icode, 2, ops);
9728 temp = ops[0].value;
9730 return temp;
9733 case MEM_REF:
9735 addr_space_t as
9736 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9737 enum machine_mode address_mode;
9738 tree base = TREE_OPERAND (exp, 0);
9739 gimple def_stmt;
9740 enum insn_code icode;
9741 unsigned align;
9742 /* Handle expansion of non-aliased memory with non-BLKmode. That
9743 might end up in a register. */
9744 if (mem_ref_refers_to_non_mem_p (exp))
9746 HOST_WIDE_INT offset = mem_ref_offset (exp).low;
9747 base = TREE_OPERAND (base, 0);
9748 if (offset == 0
9749 && tree_fits_uhwi_p (TYPE_SIZE (type))
9750 && (GET_MODE_BITSIZE (DECL_MODE (base))
9751 == tree_to_uhwi (TYPE_SIZE (type))))
9752 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
9753 target, tmode, modifier);
9754 if (TYPE_MODE (type) == BLKmode)
9756 temp = assign_stack_temp (DECL_MODE (base),
9757 GET_MODE_SIZE (DECL_MODE (base)));
9758 store_expr (base, temp, 0, false);
9759 temp = adjust_address (temp, BLKmode, offset);
9760 set_mem_size (temp, int_size_in_bytes (type));
9761 return temp;
9763 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
9764 bitsize_int (offset * BITS_PER_UNIT));
9765 return expand_expr (exp, target, tmode, modifier);
9767 address_mode = targetm.addr_space.address_mode (as);
9768 base = TREE_OPERAND (exp, 0);
9769 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9771 tree mask = gimple_assign_rhs2 (def_stmt);
9772 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9773 gimple_assign_rhs1 (def_stmt), mask);
9774 TREE_OPERAND (exp, 0) = base;
9776 align = get_object_alignment (exp);
9777 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9778 op0 = memory_address_addr_space (mode, op0, as);
9779 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9781 rtx off
9782 = immed_double_int_const (mem_ref_offset (exp), address_mode);
9783 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9784 op0 = memory_address_addr_space (mode, op0, as);
9786 temp = gen_rtx_MEM (mode, op0);
9787 set_mem_attributes (temp, exp, 0);
9788 set_mem_addr_space (temp, as);
9789 if (TREE_THIS_VOLATILE (exp))
9790 MEM_VOLATILE_P (temp) = 1;
9791 if (modifier != EXPAND_WRITE
9792 && modifier != EXPAND_MEMORY
9793 && !inner_reference_p
9794 && mode != BLKmode
9795 && align < GET_MODE_ALIGNMENT (mode))
9797 if ((icode = optab_handler (movmisalign_optab, mode))
9798 != CODE_FOR_nothing)
9800 struct expand_operand ops[2];
9802 /* We've already validated the memory, and we're creating a
9803 new pseudo destination. The predicates really can't fail,
9804 nor can the generator. */
9805 create_output_operand (&ops[0], NULL_RTX, mode);
9806 create_fixed_operand (&ops[1], temp);
9807 expand_insn (icode, 2, ops);
9808 temp = ops[0].value;
9810 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9811 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9812 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9813 (modifier == EXPAND_STACK_PARM
9814 ? NULL_RTX : target),
9815 mode, mode);
9817 return temp;
9820 case ARRAY_REF:
9823 tree array = treeop0;
9824 tree index = treeop1;
9825 tree init;
9827 /* Fold an expression like: "foo"[2].
9828 This is not done in fold so it won't happen inside &.
9829 Don't fold if this is for wide characters since it's too
9830 difficult to do correctly and this is a very rare case. */
9832 if (modifier != EXPAND_CONST_ADDRESS
9833 && modifier != EXPAND_INITIALIZER
9834 && modifier != EXPAND_MEMORY)
9836 tree t = fold_read_from_constant_string (exp);
9838 if (t)
9839 return expand_expr (t, target, tmode, modifier);
9842 /* If this is a constant index into a constant array,
9843 just get the value from the array. Handle both the cases when
9844 we have an explicit constructor and when our operand is a variable
9845 that was declared const. */
9847 if (modifier != EXPAND_CONST_ADDRESS
9848 && modifier != EXPAND_INITIALIZER
9849 && modifier != EXPAND_MEMORY
9850 && TREE_CODE (array) == CONSTRUCTOR
9851 && ! TREE_SIDE_EFFECTS (array)
9852 && TREE_CODE (index) == INTEGER_CST)
9854 unsigned HOST_WIDE_INT ix;
9855 tree field, value;
9857 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9858 field, value)
9859 if (tree_int_cst_equal (field, index))
9861 if (!TREE_SIDE_EFFECTS (value))
9862 return expand_expr (fold (value), target, tmode, modifier);
9863 break;
9867 else if (optimize >= 1
9868 && modifier != EXPAND_CONST_ADDRESS
9869 && modifier != EXPAND_INITIALIZER
9870 && modifier != EXPAND_MEMORY
9871 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9872 && TREE_CODE (index) == INTEGER_CST
9873 && (TREE_CODE (array) == VAR_DECL
9874 || TREE_CODE (array) == CONST_DECL)
9875 && (init = ctor_for_folding (array)) != error_mark_node)
9877 if (init == NULL_TREE)
9879 tree value = build_zero_cst (type);
9880 if (TREE_CODE (value) == CONSTRUCTOR)
9882 /* If VALUE is a CONSTRUCTOR, this optimization is only
9883 useful if this doesn't store the CONSTRUCTOR into
9884 memory. If it does, it is more efficient to just
9885 load the data from the array directly. */
9886 rtx ret = expand_constructor (value, target,
9887 modifier, true);
9888 if (ret == NULL_RTX)
9889 value = NULL_TREE;
9892 if (value)
9893 return expand_expr (value, target, tmode, modifier);
9895 else if (TREE_CODE (init) == CONSTRUCTOR)
9897 unsigned HOST_WIDE_INT ix;
9898 tree field, value;
9900 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
9901 field, value)
9902 if (tree_int_cst_equal (field, index))
9904 if (TREE_SIDE_EFFECTS (value))
9905 break;
9907 if (TREE_CODE (value) == CONSTRUCTOR)
9909 /* If VALUE is a CONSTRUCTOR, this
9910 optimization is only useful if
9911 this doesn't store the CONSTRUCTOR
9912 into memory. If it does, it is more
9913 efficient to just load the data from
9914 the array directly. */
9915 rtx ret = expand_constructor (value, target,
9916 modifier, true);
9917 if (ret == NULL_RTX)
9918 break;
9921 return
9922 expand_expr (fold (value), target, tmode, modifier);
9925 else if (TREE_CODE (init) == STRING_CST)
9927 tree low_bound = array_ref_low_bound (exp);
9928 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
9930 /* Optimize the special case of a zero lower bound.
9932 We convert the lower bound to sizetype to avoid problems
9933 with constant folding. E.g. suppose the lower bound is
9934 1 and its mode is QI. Without the conversion
9935 (ARRAY + (INDEX - (unsigned char)1))
9936 becomes
9937 (ARRAY + (-(unsigned char)1) + INDEX)
9938 which becomes
9939 (ARRAY + 255 + INDEX). Oops! */
9940 if (!integer_zerop (low_bound))
9941 index1 = size_diffop_loc (loc, index1,
9942 fold_convert_loc (loc, sizetype,
9943 low_bound));
9945 if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
9947 tree type = TREE_TYPE (TREE_TYPE (init));
9948 enum machine_mode mode = TYPE_MODE (type);
9950 if (GET_MODE_CLASS (mode) == MODE_INT
9951 && GET_MODE_SIZE (mode) == 1)
9952 return gen_int_mode (TREE_STRING_POINTER (init)
9953 [TREE_INT_CST_LOW (index1)],
9954 mode);
9959 goto normal_inner_ref;
9961 case COMPONENT_REF:
9962 /* If the operand is a CONSTRUCTOR, we can just extract the
9963 appropriate field if it is present. */
9964 if (TREE_CODE (treeop0) == CONSTRUCTOR)
9966 unsigned HOST_WIDE_INT idx;
9967 tree field, value;
9969 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
9970 idx, field, value)
9971 if (field == treeop1
9972 /* We can normally use the value of the field in the
9973 CONSTRUCTOR. However, if this is a bitfield in
9974 an integral mode that we can fit in a HOST_WIDE_INT,
9975 we must mask only the number of bits in the bitfield,
9976 since this is done implicitly by the constructor. If
9977 the bitfield does not meet either of those conditions,
9978 we can't do this optimization. */
9979 && (! DECL_BIT_FIELD (field)
9980 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
9981 && (GET_MODE_PRECISION (DECL_MODE (field))
9982 <= HOST_BITS_PER_WIDE_INT))))
9984 if (DECL_BIT_FIELD (field)
9985 && modifier == EXPAND_STACK_PARM)
9986 target = 0;
9987 op0 = expand_expr (value, target, tmode, modifier);
9988 if (DECL_BIT_FIELD (field))
9990 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
9991 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
9993 if (TYPE_UNSIGNED (TREE_TYPE (field)))
9995 op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
9996 imode);
9997 op0 = expand_and (imode, op0, op1, target);
9999 else
10001 int count = GET_MODE_PRECISION (imode) - bitsize;
10003 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
10004 target, 0);
10005 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
10006 target, 0);
10010 return op0;
10013 goto normal_inner_ref;
10015 case BIT_FIELD_REF:
10016 case ARRAY_RANGE_REF:
10017 normal_inner_ref:
10019 enum machine_mode mode1, mode2;
10020 HOST_WIDE_INT bitsize, bitpos;
10021 tree offset;
10022 int volatilep = 0, must_force_mem;
10023 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
10024 &mode1, &unsignedp, &volatilep, true);
10025 rtx orig_op0, memloc;
10026 bool mem_attrs_from_type = false;
10028 /* If we got back the original object, something is wrong. Perhaps
10029 we are evaluating an expression too early. In any event, don't
10030 infinitely recurse. */
10031 gcc_assert (tem != exp);
10033 /* If TEM's type is a union of variable size, pass TARGET to the inner
10034 computation, since it will need a temporary and TARGET is known
10035 to have to do. This occurs in unchecked conversion in Ada. */
10036 orig_op0 = op0
10037 = expand_expr_real (tem,
10038 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10039 && COMPLETE_TYPE_P (TREE_TYPE (tem))
10040 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10041 != INTEGER_CST)
10042 && modifier != EXPAND_STACK_PARM
10043 ? target : NULL_RTX),
10044 VOIDmode,
10045 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10046 NULL, true);
10048 /* If the field has a mode, we want to access it in the
10049 field's mode, not the computed mode.
10050 If a MEM has VOIDmode (external with incomplete type),
10051 use BLKmode for it instead. */
10052 if (MEM_P (op0))
10054 if (mode1 != VOIDmode)
10055 op0 = adjust_address (op0, mode1, 0);
10056 else if (GET_MODE (op0) == VOIDmode)
10057 op0 = adjust_address (op0, BLKmode, 0);
10060 mode2
10061 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10063 /* If we have either an offset, a BLKmode result, or a reference
10064 outside the underlying object, we must force it to memory.
10065 Such a case can occur in Ada if we have unchecked conversion
10066 of an expression from a scalar type to an aggregate type or
10067 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10068 passed a partially uninitialized object or a view-conversion
10069 to a larger size. */
10070 must_force_mem = (offset
10071 || mode1 == BLKmode
10072 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
10074 /* Handle CONCAT first. */
10075 if (GET_CODE (op0) == CONCAT && !must_force_mem)
10077 if (bitpos == 0
10078 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
10079 return op0;
10080 if (bitpos == 0
10081 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10082 && bitsize)
10084 op0 = XEXP (op0, 0);
10085 mode2 = GET_MODE (op0);
10087 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10088 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
10089 && bitpos
10090 && bitsize)
10092 op0 = XEXP (op0, 1);
10093 bitpos = 0;
10094 mode2 = GET_MODE (op0);
10096 else
10097 /* Otherwise force into memory. */
10098 must_force_mem = 1;
10101 /* If this is a constant, put it in a register if it is a legitimate
10102 constant and we don't need a memory reference. */
10103 if (CONSTANT_P (op0)
10104 && mode2 != BLKmode
10105 && targetm.legitimate_constant_p (mode2, op0)
10106 && !must_force_mem)
10107 op0 = force_reg (mode2, op0);
10109 /* Otherwise, if this is a constant, try to force it to the constant
10110 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10111 is a legitimate constant. */
10112 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10113 op0 = validize_mem (memloc);
10115 /* Otherwise, if this is a constant or the object is not in memory
10116 and need be, put it there. */
10117 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10119 memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10120 emit_move_insn (memloc, op0);
10121 op0 = memloc;
10122 mem_attrs_from_type = true;
10125 if (offset)
10127 enum machine_mode address_mode;
10128 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10129 EXPAND_SUM);
10131 gcc_assert (MEM_P (op0));
10133 address_mode = get_address_mode (op0);
10134 if (GET_MODE (offset_rtx) != address_mode)
10135 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10137 if (GET_MODE (op0) == BLKmode
10138 /* The check for a constant address in OP0 not having VOIDmode
10139 is probably no longer necessary. */
10140 && GET_MODE (XEXP (op0, 0)) != VOIDmode
10141 && bitsize != 0
10142 && (bitpos % bitsize) == 0
10143 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
10144 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
10146 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10147 bitpos = 0;
10150 op0 = offset_address (op0, offset_rtx,
10151 highest_pow2_factor (offset));
10154 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10155 record its alignment as BIGGEST_ALIGNMENT. */
10156 if (MEM_P (op0) && bitpos == 0 && offset != 0
10157 && is_aligning_offset (offset, tem))
10158 set_mem_align (op0, BIGGEST_ALIGNMENT);
10160 /* Don't forget about volatility even if this is a bitfield. */
10161 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10163 if (op0 == orig_op0)
10164 op0 = copy_rtx (op0);
10166 MEM_VOLATILE_P (op0) = 1;
10169 /* In cases where an aligned union has an unaligned object
10170 as a field, we might be extracting a BLKmode value from
10171 an integer-mode (e.g., SImode) object. Handle this case
10172 by doing the extract into an object as wide as the field
10173 (which we know to be the width of a basic mode), then
10174 storing into memory, and changing the mode to BLKmode. */
10175 if (mode1 == VOIDmode
10176 || REG_P (op0) || GET_CODE (op0) == SUBREG
10177 || (mode1 != BLKmode && ! direct_load[(int) mode1]
10178 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10179 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10180 && modifier != EXPAND_CONST_ADDRESS
10181 && modifier != EXPAND_INITIALIZER
10182 && modifier != EXPAND_MEMORY)
10183 /* If the bitfield is volatile and the bitsize
10184 is narrower than the access size of the bitfield,
10185 we need to extract bitfields from the access. */
10186 || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10187 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10188 && mode1 != BLKmode
10189 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)
10190 /* If the field isn't aligned enough to fetch as a memref,
10191 fetch it as a bit field. */
10192 || (mode1 != BLKmode
10193 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10194 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10195 || (MEM_P (op0)
10196 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10197 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10198 && modifier != EXPAND_MEMORY
10199 && ((modifier == EXPAND_CONST_ADDRESS
10200 || modifier == EXPAND_INITIALIZER)
10201 ? STRICT_ALIGNMENT
10202 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10203 || (bitpos % BITS_PER_UNIT != 0)))
10204 /* If the type and the field are a constant size and the
10205 size of the type isn't the same size as the bitfield,
10206 we must use bitfield operations. */
10207 || (bitsize >= 0
10208 && TYPE_SIZE (TREE_TYPE (exp))
10209 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10210 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10211 bitsize)))
10213 enum machine_mode ext_mode = mode;
10215 if (ext_mode == BLKmode
10216 && ! (target != 0 && MEM_P (op0)
10217 && MEM_P (target)
10218 && bitpos % BITS_PER_UNIT == 0))
10219 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10221 if (ext_mode == BLKmode)
10223 if (target == 0)
10224 target = assign_temp (type, 1, 1);
10226 /* ??? Unlike the similar test a few lines below, this one is
10227 very likely obsolete. */
10228 if (bitsize == 0)
10229 return target;
10231 /* In this case, BITPOS must start at a byte boundary and
10232 TARGET, if specified, must be a MEM. */
10233 gcc_assert (MEM_P (op0)
10234 && (!target || MEM_P (target))
10235 && !(bitpos % BITS_PER_UNIT));
10237 emit_block_move (target,
10238 adjust_address (op0, VOIDmode,
10239 bitpos / BITS_PER_UNIT),
10240 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10241 / BITS_PER_UNIT),
10242 (modifier == EXPAND_STACK_PARM
10243 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10245 return target;
10248 /* If we have nothing to extract, the result will be 0 for targets
10249 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10250 return 0 for the sake of consistency, as reading a zero-sized
10251 bitfield is valid in Ada and the value is fully specified. */
10252 if (bitsize == 0)
10253 return const0_rtx;
10255 op0 = validize_mem (op0);
10257 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10258 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10260 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10261 (modifier == EXPAND_STACK_PARM
10262 ? NULL_RTX : target),
10263 ext_mode, ext_mode);
10265 /* If the result is a record type and BITSIZE is narrower than
10266 the mode of OP0, an integral mode, and this is a big endian
10267 machine, we must put the field into the high-order bits. */
10268 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10269 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10270 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10271 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10272 GET_MODE_BITSIZE (GET_MODE (op0))
10273 - bitsize, op0, 1);
10275 /* If the result type is BLKmode, store the data into a temporary
10276 of the appropriate type, but with the mode corresponding to the
10277 mode for the data we have (op0's mode). */
10278 if (mode == BLKmode)
10280 rtx new_rtx
10281 = assign_stack_temp_for_type (ext_mode,
10282 GET_MODE_BITSIZE (ext_mode),
10283 type);
10284 emit_move_insn (new_rtx, op0);
10285 op0 = copy_rtx (new_rtx);
10286 PUT_MODE (op0, BLKmode);
10289 return op0;
10292 /* If the result is BLKmode, use that to access the object
10293 now as well. */
10294 if (mode == BLKmode)
10295 mode1 = BLKmode;
10297 /* Get a reference to just this component. */
10298 if (modifier == EXPAND_CONST_ADDRESS
10299 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10300 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10301 else
10302 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10304 if (op0 == orig_op0)
10305 op0 = copy_rtx (op0);
10307 /* If op0 is a temporary because of forcing to memory, pass only the
10308 type to set_mem_attributes so that the original expression is never
10309 marked as ADDRESSABLE through MEM_EXPR of the temporary. */
10310 if (mem_attrs_from_type)
10311 set_mem_attributes (op0, type, 0);
10312 else
10313 set_mem_attributes (op0, exp, 0);
10315 if (REG_P (XEXP (op0, 0)))
10316 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10318 MEM_VOLATILE_P (op0) |= volatilep;
10319 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10320 || modifier == EXPAND_CONST_ADDRESS
10321 || modifier == EXPAND_INITIALIZER)
10322 return op0;
10324 if (target == 0)
10325 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10327 convert_move (target, op0, unsignedp);
10328 return target;
10331 case OBJ_TYPE_REF:
10332 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10334 case CALL_EXPR:
10335 /* All valid uses of __builtin_va_arg_pack () are removed during
10336 inlining. */
10337 if (CALL_EXPR_VA_ARG_PACK (exp))
10338 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10340 tree fndecl = get_callee_fndecl (exp), attr;
10342 if (fndecl
10343 && (attr = lookup_attribute ("error",
10344 DECL_ATTRIBUTES (fndecl))) != NULL)
10345 error ("%Kcall to %qs declared with attribute error: %s",
10346 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10347 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10348 if (fndecl
10349 && (attr = lookup_attribute ("warning",
10350 DECL_ATTRIBUTES (fndecl))) != NULL)
10351 warning_at (tree_nonartificial_location (exp),
10352 0, "%Kcall to %qs declared with attribute warning: %s",
10353 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10354 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10356 /* Check for a built-in function. */
10357 if (fndecl && DECL_BUILT_IN (fndecl))
10359 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10360 return expand_builtin (exp, target, subtarget, tmode, ignore);
10363 return expand_call (exp, target, ignore);
10365 case VIEW_CONVERT_EXPR:
10366 op0 = NULL_RTX;
10368 /* If we are converting to BLKmode, try to avoid an intermediate
10369 temporary by fetching an inner memory reference. */
10370 if (mode == BLKmode
10371 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10372 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10373 && handled_component_p (treeop0))
10375 enum machine_mode mode1;
10376 HOST_WIDE_INT bitsize, bitpos;
10377 tree offset;
10378 int unsignedp;
10379 int volatilep = 0;
10380 tree tem
10381 = get_inner_reference (treeop0, &bitsize, &bitpos,
10382 &offset, &mode1, &unsignedp, &volatilep,
10383 true);
10384 rtx orig_op0;
10386 /* ??? We should work harder and deal with non-zero offsets. */
10387 if (!offset
10388 && (bitpos % BITS_PER_UNIT) == 0
10389 && bitsize >= 0
10390 && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10392 /* See the normal_inner_ref case for the rationale. */
10393 orig_op0
10394 = expand_expr_real (tem,
10395 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10396 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10397 != INTEGER_CST)
10398 && modifier != EXPAND_STACK_PARM
10399 ? target : NULL_RTX),
10400 VOIDmode,
10401 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10402 NULL, true);
10404 if (MEM_P (orig_op0))
10406 op0 = orig_op0;
10408 /* Get a reference to just this component. */
10409 if (modifier == EXPAND_CONST_ADDRESS
10410 || modifier == EXPAND_SUM
10411 || modifier == EXPAND_INITIALIZER)
10412 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10413 else
10414 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10416 if (op0 == orig_op0)
10417 op0 = copy_rtx (op0);
10419 set_mem_attributes (op0, treeop0, 0);
10420 if (REG_P (XEXP (op0, 0)))
10421 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10423 MEM_VOLATILE_P (op0) |= volatilep;
10428 if (!op0)
10429 op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
10430 NULL, inner_reference_p);
10432 /* If the input and output modes are both the same, we are done. */
10433 if (mode == GET_MODE (op0))
10435 /* If neither mode is BLKmode, and both modes are the same size
10436 then we can use gen_lowpart. */
10437 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10438 && (GET_MODE_PRECISION (mode)
10439 == GET_MODE_PRECISION (GET_MODE (op0)))
10440 && !COMPLEX_MODE_P (GET_MODE (op0)))
10442 if (GET_CODE (op0) == SUBREG)
10443 op0 = force_reg (GET_MODE (op0), op0);
10444 temp = gen_lowpart_common (mode, op0);
10445 if (temp)
10446 op0 = temp;
10447 else
10449 if (!REG_P (op0) && !MEM_P (op0))
10450 op0 = force_reg (GET_MODE (op0), op0);
10451 op0 = gen_lowpart (mode, op0);
10454 /* If both types are integral, convert from one mode to the other. */
10455 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10456 op0 = convert_modes (mode, GET_MODE (op0), op0,
10457 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10458 /* If the output type is a bit-field type, do an extraction. */
10459 else if (reduce_bit_field)
10460 return extract_bit_field (op0, TYPE_PRECISION (type), 0,
10461 TYPE_UNSIGNED (type), NULL_RTX,
10462 mode, mode);
10463 /* As a last resort, spill op0 to memory, and reload it in a
10464 different mode. */
10465 else if (!MEM_P (op0))
10467 /* If the operand is not a MEM, force it into memory. Since we
10468 are going to be changing the mode of the MEM, don't call
10469 force_const_mem for constants because we don't allow pool
10470 constants to change mode. */
10471 tree inner_type = TREE_TYPE (treeop0);
10473 gcc_assert (!TREE_ADDRESSABLE (exp));
10475 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10476 target
10477 = assign_stack_temp_for_type
10478 (TYPE_MODE (inner_type),
10479 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10481 emit_move_insn (target, op0);
10482 op0 = target;
10485 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10486 output type is such that the operand is known to be aligned, indicate
10487 that it is. Otherwise, we need only be concerned about alignment for
10488 non-BLKmode results. */
10489 if (MEM_P (op0))
10491 enum insn_code icode;
10493 if (TYPE_ALIGN_OK (type))
10495 /* ??? Copying the MEM without substantially changing it might
10496 run afoul of the code handling volatile memory references in
10497 store_expr, which assumes that TARGET is returned unmodified
10498 if it has been used. */
10499 op0 = copy_rtx (op0);
10500 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10502 else if (modifier != EXPAND_WRITE
10503 && modifier != EXPAND_MEMORY
10504 && !inner_reference_p
10505 && mode != BLKmode
10506 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10508 /* If the target does have special handling for unaligned
10509 loads of mode then use them. */
10510 if ((icode = optab_handler (movmisalign_optab, mode))
10511 != CODE_FOR_nothing)
10513 rtx reg, insn;
10515 op0 = adjust_address (op0, mode, 0);
10516 /* We've already validated the memory, and we're creating a
10517 new pseudo destination. The predicates really can't
10518 fail. */
10519 reg = gen_reg_rtx (mode);
10521 /* Nor can the insn generator. */
10522 insn = GEN_FCN (icode) (reg, op0);
10523 emit_insn (insn);
10524 return reg;
10526 else if (STRICT_ALIGNMENT)
10528 tree inner_type = TREE_TYPE (treeop0);
10529 HOST_WIDE_INT temp_size
10530 = MAX (int_size_in_bytes (inner_type),
10531 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10532 rtx new_rtx
10533 = assign_stack_temp_for_type (mode, temp_size, type);
10534 rtx new_with_op0_mode
10535 = adjust_address (new_rtx, GET_MODE (op0), 0);
10537 gcc_assert (!TREE_ADDRESSABLE (exp));
10539 if (GET_MODE (op0) == BLKmode)
10540 emit_block_move (new_with_op0_mode, op0,
10541 GEN_INT (GET_MODE_SIZE (mode)),
10542 (modifier == EXPAND_STACK_PARM
10543 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10544 else
10545 emit_move_insn (new_with_op0_mode, op0);
10547 op0 = new_rtx;
10551 op0 = adjust_address (op0, mode, 0);
10554 return op0;
10556 case MODIFY_EXPR:
10558 tree lhs = treeop0;
10559 tree rhs = treeop1;
10560 gcc_assert (ignore);
10562 /* Check for |= or &= of a bitfield of size one into another bitfield
10563 of size 1. In this case, (unless we need the result of the
10564 assignment) we can do this more efficiently with a
10565 test followed by an assignment, if necessary.
10567 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10568 things change so we do, this code should be enhanced to
10569 support it. */
10570 if (TREE_CODE (lhs) == COMPONENT_REF
10571 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10572 || TREE_CODE (rhs) == BIT_AND_EXPR)
10573 && TREE_OPERAND (rhs, 0) == lhs
10574 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10575 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10576 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10578 rtx label = gen_label_rtx ();
10579 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10580 do_jump (TREE_OPERAND (rhs, 1),
10581 value ? label : 0,
10582 value ? 0 : label, -1);
10583 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10584 false);
10585 do_pending_stack_adjust ();
10586 emit_label (label);
10587 return const0_rtx;
10590 expand_assignment (lhs, rhs, false);
10591 return const0_rtx;
10594 case ADDR_EXPR:
10595 return expand_expr_addr_expr (exp, target, tmode, modifier);
10597 case REALPART_EXPR:
10598 op0 = expand_normal (treeop0);
10599 return read_complex_part (op0, false);
10601 case IMAGPART_EXPR:
10602 op0 = expand_normal (treeop0);
10603 return read_complex_part (op0, true);
10605 case RETURN_EXPR:
10606 case LABEL_EXPR:
10607 case GOTO_EXPR:
10608 case SWITCH_EXPR:
10609 case ASM_EXPR:
10610 /* Expanded in cfgexpand.c. */
10611 gcc_unreachable ();
10613 case TRY_CATCH_EXPR:
10614 case CATCH_EXPR:
10615 case EH_FILTER_EXPR:
10616 case TRY_FINALLY_EXPR:
10617 /* Lowered by tree-eh.c. */
10618 gcc_unreachable ();
10620 case WITH_CLEANUP_EXPR:
10621 case CLEANUP_POINT_EXPR:
10622 case TARGET_EXPR:
10623 case CASE_LABEL_EXPR:
10624 case VA_ARG_EXPR:
10625 case BIND_EXPR:
10626 case INIT_EXPR:
10627 case CONJ_EXPR:
10628 case COMPOUND_EXPR:
10629 case PREINCREMENT_EXPR:
10630 case PREDECREMENT_EXPR:
10631 case POSTINCREMENT_EXPR:
10632 case POSTDECREMENT_EXPR:
10633 case LOOP_EXPR:
10634 case EXIT_EXPR:
10635 case COMPOUND_LITERAL_EXPR:
10636 /* Lowered by gimplify.c. */
10637 gcc_unreachable ();
10639 case FDESC_EXPR:
10640 /* Function descriptors are not valid except for as
10641 initialization constants, and should not be expanded. */
10642 gcc_unreachable ();
10644 case WITH_SIZE_EXPR:
10645 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10646 have pulled out the size to use in whatever context it needed. */
10647 return expand_expr_real (treeop0, original_target, tmode,
10648 modifier, alt_rtl, inner_reference_p);
10650 default:
10651 return expand_expr_real_2 (&ops, target, tmode, modifier);
10655 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10656 signedness of TYPE), possibly returning the result in TARGET. */
10657 static rtx
10658 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10660 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10661 if (target && GET_MODE (target) != GET_MODE (exp))
10662 target = 0;
10663 /* For constant values, reduce using build_int_cst_type. */
10664 if (CONST_INT_P (exp))
10666 HOST_WIDE_INT value = INTVAL (exp);
10667 tree t = build_int_cst_type (type, value);
10668 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10670 else if (TYPE_UNSIGNED (type))
10672 rtx mask = immed_double_int_const (double_int::mask (prec),
10673 GET_MODE (exp));
10674 return expand_and (GET_MODE (exp), exp, mask, target);
10676 else
10678 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10679 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10680 exp, count, target, 0);
10681 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10682 exp, count, target, 0);
10686 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10687 when applied to the address of EXP produces an address known to be
10688 aligned more than BIGGEST_ALIGNMENT. */
10690 static int
10691 is_aligning_offset (const_tree offset, const_tree exp)
10693 /* Strip off any conversions. */
10694 while (CONVERT_EXPR_P (offset))
10695 offset = TREE_OPERAND (offset, 0);
10697 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10698 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10699 if (TREE_CODE (offset) != BIT_AND_EXPR
10700 || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
10701 || compare_tree_int (TREE_OPERAND (offset, 1),
10702 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10703 || !exact_log2 (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1) < 0)
10704 return 0;
10706 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10707 It must be NEGATE_EXPR. Then strip any more conversions. */
10708 offset = TREE_OPERAND (offset, 0);
10709 while (CONVERT_EXPR_P (offset))
10710 offset = TREE_OPERAND (offset, 0);
10712 if (TREE_CODE (offset) != NEGATE_EXPR)
10713 return 0;
10715 offset = TREE_OPERAND (offset, 0);
10716 while (CONVERT_EXPR_P (offset))
10717 offset = TREE_OPERAND (offset, 0);
10719 /* This must now be the address of EXP. */
10720 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10723 /* Return the tree node if an ARG corresponds to a string constant or zero
10724 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10725 in bytes within the string that ARG is accessing. The type of the
10726 offset will be `sizetype'. */
10728 tree
10729 string_constant (tree arg, tree *ptr_offset)
10731 tree array, offset, lower_bound;
10732 STRIP_NOPS (arg);
10734 if (TREE_CODE (arg) == ADDR_EXPR)
10736 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10738 *ptr_offset = size_zero_node;
10739 return TREE_OPERAND (arg, 0);
10741 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10743 array = TREE_OPERAND (arg, 0);
10744 offset = size_zero_node;
10746 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10748 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10749 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10750 if (TREE_CODE (array) != STRING_CST
10751 && TREE_CODE (array) != VAR_DECL)
10752 return 0;
10754 /* Check if the array has a nonzero lower bound. */
10755 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10756 if (!integer_zerop (lower_bound))
10758 /* If the offset and base aren't both constants, return 0. */
10759 if (TREE_CODE (lower_bound) != INTEGER_CST)
10760 return 0;
10761 if (TREE_CODE (offset) != INTEGER_CST)
10762 return 0;
10763 /* Adjust offset by the lower bound. */
10764 offset = size_diffop (fold_convert (sizetype, offset),
10765 fold_convert (sizetype, lower_bound));
10768 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10770 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10771 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10772 if (TREE_CODE (array) != ADDR_EXPR)
10773 return 0;
10774 array = TREE_OPERAND (array, 0);
10775 if (TREE_CODE (array) != STRING_CST
10776 && TREE_CODE (array) != VAR_DECL)
10777 return 0;
10779 else
10780 return 0;
10782 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10784 tree arg0 = TREE_OPERAND (arg, 0);
10785 tree arg1 = TREE_OPERAND (arg, 1);
10787 STRIP_NOPS (arg0);
10788 STRIP_NOPS (arg1);
10790 if (TREE_CODE (arg0) == ADDR_EXPR
10791 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10792 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10794 array = TREE_OPERAND (arg0, 0);
10795 offset = arg1;
10797 else if (TREE_CODE (arg1) == ADDR_EXPR
10798 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10799 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10801 array = TREE_OPERAND (arg1, 0);
10802 offset = arg0;
10804 else
10805 return 0;
10807 else
10808 return 0;
10810 if (TREE_CODE (array) == STRING_CST)
10812 *ptr_offset = fold_convert (sizetype, offset);
10813 return array;
10815 else if (TREE_CODE (array) == VAR_DECL
10816 || TREE_CODE (array) == CONST_DECL)
10818 int length;
10819 tree init = ctor_for_folding (array);
10821 /* Variables initialized to string literals can be handled too. */
10822 if (init == error_mark_node
10823 || !init
10824 || TREE_CODE (init) != STRING_CST)
10825 return 0;
10827 /* Avoid const char foo[4] = "abcde"; */
10828 if (DECL_SIZE_UNIT (array) == NULL_TREE
10829 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10830 || (length = TREE_STRING_LENGTH (init)) <= 0
10831 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10832 return 0;
10834 /* If variable is bigger than the string literal, OFFSET must be constant
10835 and inside of the bounds of the string literal. */
10836 offset = fold_convert (sizetype, offset);
10837 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10838 && (! tree_fits_uhwi_p (offset)
10839 || compare_tree_int (offset, length) >= 0))
10840 return 0;
10842 *ptr_offset = offset;
10843 return init;
10846 return 0;
10849 /* Generate code to calculate OPS, and exploded expression
10850 using a store-flag instruction and return an rtx for the result.
10851 OPS reflects a comparison.
10853 If TARGET is nonzero, store the result there if convenient.
10855 Return zero if there is no suitable set-flag instruction
10856 available on this machine.
10858 Once expand_expr has been called on the arguments of the comparison,
10859 we are committed to doing the store flag, since it is not safe to
10860 re-evaluate the expression. We emit the store-flag insn by calling
10861 emit_store_flag, but only expand the arguments if we have a reason
10862 to believe that emit_store_flag will be successful. If we think that
10863 it will, but it isn't, we have to simulate the store-flag with a
10864 set/jump/set sequence. */
10866 static rtx
10867 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
10869 enum rtx_code code;
10870 tree arg0, arg1, type;
10871 tree tem;
10872 enum machine_mode operand_mode;
10873 int unsignedp;
10874 rtx op0, op1;
10875 rtx subtarget = target;
10876 location_t loc = ops->location;
10878 arg0 = ops->op0;
10879 arg1 = ops->op1;
10881 /* Don't crash if the comparison was erroneous. */
10882 if (arg0 == error_mark_node || arg1 == error_mark_node)
10883 return const0_rtx;
10885 type = TREE_TYPE (arg0);
10886 operand_mode = TYPE_MODE (type);
10887 unsignedp = TYPE_UNSIGNED (type);
10889 /* We won't bother with BLKmode store-flag operations because it would mean
10890 passing a lot of information to emit_store_flag. */
10891 if (operand_mode == BLKmode)
10892 return 0;
10894 /* We won't bother with store-flag operations involving function pointers
10895 when function pointers must be canonicalized before comparisons. */
10896 #ifdef HAVE_canonicalize_funcptr_for_compare
10897 if (HAVE_canonicalize_funcptr_for_compare
10898 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
10899 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
10900 == FUNCTION_TYPE))
10901 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
10902 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
10903 == FUNCTION_TYPE))))
10904 return 0;
10905 #endif
10907 STRIP_NOPS (arg0);
10908 STRIP_NOPS (arg1);
10910 /* For vector typed comparisons emit code to generate the desired
10911 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10912 expander for this. */
10913 if (TREE_CODE (ops->type) == VECTOR_TYPE)
10915 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10916 tree if_true = constant_boolean_node (true, ops->type);
10917 tree if_false = constant_boolean_node (false, ops->type);
10918 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10921 /* Get the rtx comparison code to use. We know that EXP is a comparison
10922 operation of some type. Some comparisons against 1 and -1 can be
10923 converted to comparisons with zero. Do so here so that the tests
10924 below will be aware that we have a comparison with zero. These
10925 tests will not catch constants in the first operand, but constants
10926 are rarely passed as the first operand. */
10928 switch (ops->code)
10930 case EQ_EXPR:
10931 code = EQ;
10932 break;
10933 case NE_EXPR:
10934 code = NE;
10935 break;
10936 case LT_EXPR:
10937 if (integer_onep (arg1))
10938 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10939 else
10940 code = unsignedp ? LTU : LT;
10941 break;
10942 case LE_EXPR:
10943 if (! unsignedp && integer_all_onesp (arg1))
10944 arg1 = integer_zero_node, code = LT;
10945 else
10946 code = unsignedp ? LEU : LE;
10947 break;
10948 case GT_EXPR:
10949 if (! unsignedp && integer_all_onesp (arg1))
10950 arg1 = integer_zero_node, code = GE;
10951 else
10952 code = unsignedp ? GTU : GT;
10953 break;
10954 case GE_EXPR:
10955 if (integer_onep (arg1))
10956 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10957 else
10958 code = unsignedp ? GEU : GE;
10959 break;
10961 case UNORDERED_EXPR:
10962 code = UNORDERED;
10963 break;
10964 case ORDERED_EXPR:
10965 code = ORDERED;
10966 break;
10967 case UNLT_EXPR:
10968 code = UNLT;
10969 break;
10970 case UNLE_EXPR:
10971 code = UNLE;
10972 break;
10973 case UNGT_EXPR:
10974 code = UNGT;
10975 break;
10976 case UNGE_EXPR:
10977 code = UNGE;
10978 break;
10979 case UNEQ_EXPR:
10980 code = UNEQ;
10981 break;
10982 case LTGT_EXPR:
10983 code = LTGT;
10984 break;
10986 default:
10987 gcc_unreachable ();
10990 /* Put a constant second. */
10991 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
10992 || TREE_CODE (arg0) == FIXED_CST)
10994 tem = arg0; arg0 = arg1; arg1 = tem;
10995 code = swap_condition (code);
10998 /* If this is an equality or inequality test of a single bit, we can
10999 do this by shifting the bit being tested to the low-order bit and
11000 masking the result with the constant 1. If the condition was EQ,
11001 we xor it with 1. This does not require an scc insn and is faster
11002 than an scc insn even if we have it.
11004 The code to make this transformation was moved into fold_single_bit_test,
11005 so we just call into the folder and expand its result. */
11007 if ((code == NE || code == EQ)
11008 && integer_zerop (arg1)
11009 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
11011 gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
11012 if (srcstmt
11013 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
11015 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
11016 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
11017 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
11018 gimple_assign_rhs1 (srcstmt),
11019 gimple_assign_rhs2 (srcstmt));
11020 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
11021 if (temp)
11022 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
11026 if (! get_subtarget (target)
11027 || GET_MODE (subtarget) != operand_mode)
11028 subtarget = 0;
11030 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
11032 if (target == 0)
11033 target = gen_reg_rtx (mode);
11035 /* Try a cstore if possible. */
11036 return emit_store_flag_force (target, code, op0, op1,
11037 operand_mode, unsignedp,
11038 (TYPE_PRECISION (ops->type) == 1
11039 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
11043 /* Stubs in case we haven't got a casesi insn. */
11044 #ifndef HAVE_casesi
11045 # define HAVE_casesi 0
11046 # define gen_casesi(a, b, c, d, e) (0)
11047 # define CODE_FOR_casesi CODE_FOR_nothing
11048 #endif
11050 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11051 0 otherwise (i.e. if there is no casesi instruction).
11053 DEFAULT_PROBABILITY is the probability of jumping to the default
11054 label. */
11056 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
11057 rtx table_label, rtx default_label, rtx fallback_label,
11058 int default_probability)
11060 struct expand_operand ops[5];
11061 enum machine_mode index_mode = SImode;
11062 rtx op1, op2, index;
11064 if (! HAVE_casesi)
11065 return 0;
11067 /* Convert the index to SImode. */
11068 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11070 enum machine_mode omode = TYPE_MODE (index_type);
11071 rtx rangertx = expand_normal (range);
11073 /* We must handle the endpoints in the original mode. */
11074 index_expr = build2 (MINUS_EXPR, index_type,
11075 index_expr, minval);
11076 minval = integer_zero_node;
11077 index = expand_normal (index_expr);
11078 if (default_label)
11079 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11080 omode, 1, default_label,
11081 default_probability);
11082 /* Now we can safely truncate. */
11083 index = convert_to_mode (index_mode, index, 0);
11085 else
11087 if (TYPE_MODE (index_type) != index_mode)
11089 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
11090 index_expr = fold_convert (index_type, index_expr);
11093 index = expand_normal (index_expr);
11096 do_pending_stack_adjust ();
11098 op1 = expand_normal (minval);
11099 op2 = expand_normal (range);
11101 create_input_operand (&ops[0], index, index_mode);
11102 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11103 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11104 create_fixed_operand (&ops[3], table_label);
11105 create_fixed_operand (&ops[4], (default_label
11106 ? default_label
11107 : fallback_label));
11108 expand_jump_insn (CODE_FOR_casesi, 5, ops);
11109 return 1;
11112 /* Attempt to generate a tablejump instruction; same concept. */
11113 #ifndef HAVE_tablejump
11114 #define HAVE_tablejump 0
11115 #define gen_tablejump(x, y) (0)
11116 #endif
11118 /* Subroutine of the next function.
11120 INDEX is the value being switched on, with the lowest value
11121 in the table already subtracted.
11122 MODE is its expected mode (needed if INDEX is constant).
11123 RANGE is the length of the jump table.
11124 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11126 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11127 index value is out of range.
11128 DEFAULT_PROBABILITY is the probability of jumping to
11129 the default label. */
11131 static void
11132 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
11133 rtx default_label, int default_probability)
11135 rtx temp, vector;
11137 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11138 cfun->cfg->max_jumptable_ents = INTVAL (range);
11140 /* Do an unsigned comparison (in the proper mode) between the index
11141 expression and the value which represents the length of the range.
11142 Since we just finished subtracting the lower bound of the range
11143 from the index expression, this comparison allows us to simultaneously
11144 check that the original index expression value is both greater than
11145 or equal to the minimum value of the range and less than or equal to
11146 the maximum value of the range. */
11148 if (default_label)
11149 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11150 default_label, default_probability);
11153 /* If index is in range, it must fit in Pmode.
11154 Convert to Pmode so we can index with it. */
11155 if (mode != Pmode)
11156 index = convert_to_mode (Pmode, index, 1);
11158 /* Don't let a MEM slip through, because then INDEX that comes
11159 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11160 and break_out_memory_refs will go to work on it and mess it up. */
11161 #ifdef PIC_CASE_VECTOR_ADDRESS
11162 if (flag_pic && !REG_P (index))
11163 index = copy_to_mode_reg (Pmode, index);
11164 #endif
11166 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11167 GET_MODE_SIZE, because this indicates how large insns are. The other
11168 uses should all be Pmode, because they are addresses. This code
11169 could fail if addresses and insns are not the same size. */
11170 index = simplify_gen_binary (MULT, Pmode, index,
11171 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
11172 Pmode));
11173 index = simplify_gen_binary (PLUS, Pmode, index,
11174 gen_rtx_LABEL_REF (Pmode, table_label));
11176 #ifdef PIC_CASE_VECTOR_ADDRESS
11177 if (flag_pic)
11178 index = PIC_CASE_VECTOR_ADDRESS (index);
11179 else
11180 #endif
11181 index = memory_address (CASE_VECTOR_MODE, index);
11182 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11183 vector = gen_const_mem (CASE_VECTOR_MODE, index);
11184 convert_move (temp, vector, 0);
11186 emit_jump_insn (gen_tablejump (temp, table_label));
11188 /* If we are generating PIC code or if the table is PC-relative, the
11189 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11190 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11191 emit_barrier ();
11195 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11196 rtx table_label, rtx default_label, int default_probability)
11198 rtx index;
11200 if (! HAVE_tablejump)
11201 return 0;
11203 index_expr = fold_build2 (MINUS_EXPR, index_type,
11204 fold_convert (index_type, index_expr),
11205 fold_convert (index_type, minval));
11206 index = expand_normal (index_expr);
11207 do_pending_stack_adjust ();
11209 do_tablejump (index, TYPE_MODE (index_type),
11210 convert_modes (TYPE_MODE (index_type),
11211 TYPE_MODE (TREE_TYPE (range)),
11212 expand_normal (range),
11213 TYPE_UNSIGNED (TREE_TYPE (range))),
11214 table_label, default_label, default_probability);
11215 return 1;
11218 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11219 static rtx
11220 const_vector_from_tree (tree exp)
11222 rtvec v;
11223 unsigned i;
11224 int units;
11225 tree elt;
11226 enum machine_mode inner, mode;
11228 mode = TYPE_MODE (TREE_TYPE (exp));
11230 if (initializer_zerop (exp))
11231 return CONST0_RTX (mode);
11233 units = GET_MODE_NUNITS (mode);
11234 inner = GET_MODE_INNER (mode);
11236 v = rtvec_alloc (units);
11238 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11240 elt = VECTOR_CST_ELT (exp, i);
11242 if (TREE_CODE (elt) == REAL_CST)
11243 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11244 inner);
11245 else if (TREE_CODE (elt) == FIXED_CST)
11246 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11247 inner);
11248 else
11249 RTVEC_ELT (v, i) = immed_double_int_const (tree_to_double_int (elt),
11250 inner);
11253 return gen_rtx_CONST_VECTOR (mode, v);
11256 /* Build a decl for a personality function given a language prefix. */
11258 tree
11259 build_personality_function (const char *lang)
11261 const char *unwind_and_version;
11262 tree decl, type;
11263 char *name;
11265 switch (targetm_common.except_unwind_info (&global_options))
11267 case UI_NONE:
11268 return NULL;
11269 case UI_SJLJ:
11270 unwind_and_version = "_sj0";
11271 break;
11272 case UI_DWARF2:
11273 case UI_TARGET:
11274 unwind_and_version = "_v0";
11275 break;
11276 case UI_SEH:
11277 unwind_and_version = "_seh0";
11278 break;
11279 default:
11280 gcc_unreachable ();
11283 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11285 type = build_function_type_list (integer_type_node, integer_type_node,
11286 long_long_unsigned_type_node,
11287 ptr_type_node, ptr_type_node, NULL_TREE);
11288 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11289 get_identifier (name), type);
11290 DECL_ARTIFICIAL (decl) = 1;
11291 DECL_EXTERNAL (decl) = 1;
11292 TREE_PUBLIC (decl) = 1;
11294 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11295 are the flags assigned by targetm.encode_section_info. */
11296 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11298 return decl;
11301 /* Extracts the personality function of DECL and returns the corresponding
11302 libfunc. */
11305 get_personality_function (tree decl)
11307 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11308 enum eh_personality_kind pk;
11310 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11311 if (pk == eh_personality_none)
11312 return NULL;
11314 if (!personality
11315 && pk == eh_personality_any)
11316 personality = lang_hooks.eh_personality ();
11318 if (pk == eh_personality_lang)
11319 gcc_assert (personality != NULL_TREE);
11321 return XEXP (DECL_RTL (personality), 0);
11324 #include "gt-expr.h"