* gcc.dg/c11-complex-1.c: Use dg-add-options ieee.
[official-gcc.git] / gcc / expr.c
blobece5d0fc4431f6515dae6cd2ca7611ce87a4886b
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "flags.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "except.h"
31 #include "function.h"
32 #include "insn-config.h"
33 #include "insn-attr.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
35 #include "expr.h"
36 #include "optabs.h"
37 #include "libfuncs.h"
38 #include "recog.h"
39 #include "reload.h"
40 #include "typeclass.h"
41 #include "toplev.h"
42 #include "langhooks.h"
43 #include "intl.h"
44 #include "tm_p.h"
45 #include "tree-iterator.h"
46 #include "gimple.h"
47 #include "gimple-ssa.h"
48 #include "cgraph.h"
49 #include "tree-ssanames.h"
50 #include "target.h"
51 #include "common/common-target.h"
52 #include "timevar.h"
53 #include "df.h"
54 #include "diagnostic.h"
55 #include "tree-ssa-live.h"
56 #include "tree-outof-ssa.h"
57 #include "target-globals.h"
58 #include "params.h"
59 #include "tree-ssa-address.h"
60 #include "cfgexpand.h"
62 /* Decide whether a function's arguments should be processed
63 from first to last or from last to first.
65 They should if the stack and args grow in opposite directions, but
66 only if we have push insns. */
68 #ifdef PUSH_ROUNDING
70 #ifndef PUSH_ARGS_REVERSED
71 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
72 #define PUSH_ARGS_REVERSED /* If it's last to first. */
73 #endif
74 #endif
76 #endif
78 #ifndef STACK_PUSH_CODE
79 #ifdef STACK_GROWS_DOWNWARD
80 #define STACK_PUSH_CODE PRE_DEC
81 #else
82 #define STACK_PUSH_CODE PRE_INC
83 #endif
84 #endif
87 /* If this is nonzero, we do not bother generating VOLATILE
88 around volatile memory references, and we are willing to
89 output indirect addresses. If cse is to follow, we reject
90 indirect addresses so a useful potential cse is generated;
91 if it is used only once, instruction combination will produce
92 the same indirect address eventually. */
93 int cse_not_expected;
95 /* This structure is used by move_by_pieces to describe the move to
96 be performed. */
97 struct move_by_pieces_d
99 rtx to;
100 rtx to_addr;
101 int autinc_to;
102 int explicit_inc_to;
103 rtx from;
104 rtx from_addr;
105 int autinc_from;
106 int explicit_inc_from;
107 unsigned HOST_WIDE_INT len;
108 HOST_WIDE_INT offset;
109 int reverse;
112 /* This structure is used by store_by_pieces to describe the clear to
113 be performed. */
115 struct store_by_pieces_d
117 rtx to;
118 rtx to_addr;
119 int autinc_to;
120 int explicit_inc_to;
121 unsigned HOST_WIDE_INT len;
122 HOST_WIDE_INT offset;
123 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
124 void *constfundata;
125 int reverse;
128 static void move_by_pieces_1 (insn_gen_fn, machine_mode,
129 struct move_by_pieces_d *);
130 static bool block_move_libcall_safe_for_call_parm (void);
131 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
132 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
133 unsigned HOST_WIDE_INT);
134 static tree emit_block_move_libcall_fn (int);
135 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
136 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
137 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
138 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
139 static void store_by_pieces_2 (insn_gen_fn, machine_mode,
140 struct store_by_pieces_d *);
141 static tree clear_storage_libcall_fn (int);
142 static rtx compress_float_constant (rtx, rtx);
143 static rtx get_subtarget (rtx);
144 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
145 HOST_WIDE_INT, enum machine_mode,
146 tree, int, alias_set_type);
147 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
148 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
149 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
150 enum machine_mode, tree, alias_set_type, bool);
152 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
154 static int is_aligning_offset (const_tree, const_tree);
155 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
156 enum expand_modifier);
157 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
158 static rtx do_store_flag (sepops, rtx, enum machine_mode);
159 #ifdef PUSH_ROUNDING
160 static void emit_single_push_insn (enum machine_mode, rtx, tree);
161 #endif
162 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx, int);
163 static rtx const_vector_from_tree (tree);
164 static void write_complex_part (rtx, rtx, bool);
166 /* This macro is used to determine whether move_by_pieces should be called
167 to perform a structure copy. */
168 #ifndef MOVE_BY_PIECES_P
169 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
170 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
171 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
172 #endif
174 /* This macro is used to determine whether clear_by_pieces should be
175 called to clear storage. */
176 #ifndef CLEAR_BY_PIECES_P
177 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
179 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
180 #endif
182 /* This macro is used to determine whether store_by_pieces should be
183 called to "memset" storage with byte values other than zero. */
184 #ifndef SET_BY_PIECES_P
185 #define SET_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
188 #endif
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memcpy" storage when the source is a constant string. */
192 #ifndef STORE_BY_PIECES_P
193 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
194 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
195 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
196 #endif
198 /* This is run to set up which modes can be used
199 directly in memory and to initialize the block move optab. It is run
200 at the beginning of compilation and when the target is reinitialized. */
202 void
203 init_expr_target (void)
205 rtx insn, pat;
206 enum machine_mode mode;
207 int num_clobbers;
208 rtx mem, mem1;
209 rtx reg;
211 /* Try indexing by frame ptr and try by stack ptr.
212 It is known that on the Convex the stack ptr isn't a valid index.
213 With luck, one or the other is valid on any machine. */
214 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
215 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
217 /* A scratch register we can modify in-place below to avoid
218 useless RTL allocations. */
219 reg = gen_rtx_REG (VOIDmode, -1);
221 insn = rtx_alloc (INSN);
222 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
223 PATTERN (insn) = pat;
225 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
226 mode = (enum machine_mode) ((int) mode + 1))
228 int regno;
230 direct_load[(int) mode] = direct_store[(int) mode] = 0;
231 PUT_MODE (mem, mode);
232 PUT_MODE (mem1, mode);
233 PUT_MODE (reg, mode);
235 /* See if there is some register that can be used in this mode and
236 directly loaded or stored from memory. */
238 if (mode != VOIDmode && mode != BLKmode)
239 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
240 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
241 regno++)
243 if (! HARD_REGNO_MODE_OK (regno, mode))
244 continue;
246 SET_REGNO (reg, regno);
248 SET_SRC (pat) = mem;
249 SET_DEST (pat) = reg;
250 if (recog (pat, insn, &num_clobbers) >= 0)
251 direct_load[(int) mode] = 1;
253 SET_SRC (pat) = mem1;
254 SET_DEST (pat) = reg;
255 if (recog (pat, insn, &num_clobbers) >= 0)
256 direct_load[(int) mode] = 1;
258 SET_SRC (pat) = reg;
259 SET_DEST (pat) = mem;
260 if (recog (pat, insn, &num_clobbers) >= 0)
261 direct_store[(int) mode] = 1;
263 SET_SRC (pat) = reg;
264 SET_DEST (pat) = mem1;
265 if (recog (pat, insn, &num_clobbers) >= 0)
266 direct_store[(int) mode] = 1;
270 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
272 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
273 mode = GET_MODE_WIDER_MODE (mode))
275 enum machine_mode srcmode;
276 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
277 srcmode = GET_MODE_WIDER_MODE (srcmode))
279 enum insn_code ic;
281 ic = can_extend_p (mode, srcmode, 0);
282 if (ic == CODE_FOR_nothing)
283 continue;
285 PUT_MODE (mem, srcmode);
287 if (insn_operand_matches (ic, 1, mem))
288 float_extend_from_mem[mode][srcmode] = true;
293 /* This is run at the start of compiling a function. */
295 void
296 init_expr (void)
298 memset (&crtl->expr, 0, sizeof (crtl->expr));
301 /* Copy data from FROM to TO, where the machine modes are not the same.
302 Both modes may be integer, or both may be floating, or both may be
303 fixed-point.
304 UNSIGNEDP should be nonzero if FROM is an unsigned type.
305 This causes zero-extension instead of sign-extension. */
307 void
308 convert_move (rtx to, rtx from, int unsignedp)
310 enum machine_mode to_mode = GET_MODE (to);
311 enum machine_mode from_mode = GET_MODE (from);
312 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
313 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
314 enum insn_code code;
315 rtx libcall;
317 /* rtx code for making an equivalent value. */
318 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
319 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
322 gcc_assert (to_real == from_real);
323 gcc_assert (to_mode != BLKmode);
324 gcc_assert (from_mode != BLKmode);
326 /* If the source and destination are already the same, then there's
327 nothing to do. */
328 if (to == from)
329 return;
331 /* If FROM is a SUBREG that indicates that we have already done at least
332 the required extension, strip it. We don't handle such SUBREGs as
333 TO here. */
335 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
336 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
337 >= GET_MODE_PRECISION (to_mode))
338 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
339 from = gen_lowpart (to_mode, from), from_mode = to_mode;
341 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
343 if (to_mode == from_mode
344 || (from_mode == VOIDmode && CONSTANT_P (from)))
346 emit_move_insn (to, from);
347 return;
350 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
352 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
354 if (VECTOR_MODE_P (to_mode))
355 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
356 else
357 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
359 emit_move_insn (to, from);
360 return;
363 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
365 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
366 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
367 return;
370 if (to_real)
372 rtx value, insns;
373 convert_optab tab;
375 gcc_assert ((GET_MODE_PRECISION (from_mode)
376 != GET_MODE_PRECISION (to_mode))
377 || (DECIMAL_FLOAT_MODE_P (from_mode)
378 != DECIMAL_FLOAT_MODE_P (to_mode)));
380 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
381 /* Conversion between decimal float and binary float, same size. */
382 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
383 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
384 tab = sext_optab;
385 else
386 tab = trunc_optab;
388 /* Try converting directly if the insn is supported. */
390 code = convert_optab_handler (tab, to_mode, from_mode);
391 if (code != CODE_FOR_nothing)
393 emit_unop_insn (code, to, from,
394 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
395 return;
398 /* Otherwise use a libcall. */
399 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
401 /* Is this conversion implemented yet? */
402 gcc_assert (libcall);
404 start_sequence ();
405 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
406 1, from, from_mode);
407 insns = get_insns ();
408 end_sequence ();
409 emit_libcall_block (insns, to, value,
410 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
411 from)
412 : gen_rtx_FLOAT_EXTEND (to_mode, from));
413 return;
416 /* Handle pointer conversion. */ /* SPEE 900220. */
417 /* Targets are expected to provide conversion insns between PxImode and
418 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
419 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
421 enum machine_mode full_mode
422 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
424 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
425 != CODE_FOR_nothing);
427 if (full_mode != from_mode)
428 from = convert_to_mode (full_mode, from, unsignedp);
429 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
430 to, from, UNKNOWN);
431 return;
433 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
435 rtx new_from;
436 enum machine_mode full_mode
437 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
438 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
439 enum insn_code icode;
441 icode = convert_optab_handler (ctab, full_mode, from_mode);
442 gcc_assert (icode != CODE_FOR_nothing);
444 if (to_mode == full_mode)
446 emit_unop_insn (icode, to, from, UNKNOWN);
447 return;
450 new_from = gen_reg_rtx (full_mode);
451 emit_unop_insn (icode, new_from, from, UNKNOWN);
453 /* else proceed to integer conversions below. */
454 from_mode = full_mode;
455 from = new_from;
458 /* Make sure both are fixed-point modes or both are not. */
459 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
460 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
461 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
463 /* If we widen from_mode to to_mode and they are in the same class,
464 we won't saturate the result.
465 Otherwise, always saturate the result to play safe. */
466 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
467 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
468 expand_fixed_convert (to, from, 0, 0);
469 else
470 expand_fixed_convert (to, from, 0, 1);
471 return;
474 /* Now both modes are integers. */
476 /* Handle expanding beyond a word. */
477 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
478 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
480 rtx insns;
481 rtx lowpart;
482 rtx fill_value;
483 rtx lowfrom;
484 int i;
485 enum machine_mode lowpart_mode;
486 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
488 /* Try converting directly if the insn is supported. */
489 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
490 != CODE_FOR_nothing)
492 /* If FROM is a SUBREG, put it into a register. Do this
493 so that we always generate the same set of insns for
494 better cse'ing; if an intermediate assignment occurred,
495 we won't be doing the operation directly on the SUBREG. */
496 if (optimize > 0 && GET_CODE (from) == SUBREG)
497 from = force_reg (from_mode, from);
498 emit_unop_insn (code, to, from, equiv_code);
499 return;
501 /* Next, try converting via full word. */
502 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
503 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
504 != CODE_FOR_nothing))
506 rtx word_to = gen_reg_rtx (word_mode);
507 if (REG_P (to))
509 if (reg_overlap_mentioned_p (to, from))
510 from = force_reg (from_mode, from);
511 emit_clobber (to);
513 convert_move (word_to, from, unsignedp);
514 emit_unop_insn (code, to, word_to, equiv_code);
515 return;
518 /* No special multiword conversion insn; do it by hand. */
519 start_sequence ();
521 /* Since we will turn this into a no conflict block, we must ensure the
522 the source does not overlap the target so force it into an isolated
523 register when maybe so. Likewise for any MEM input, since the
524 conversion sequence might require several references to it and we
525 must ensure we're getting the same value every time. */
527 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
528 from = force_reg (from_mode, from);
530 /* Get a copy of FROM widened to a word, if necessary. */
531 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
532 lowpart_mode = word_mode;
533 else
534 lowpart_mode = from_mode;
536 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
538 lowpart = gen_lowpart (lowpart_mode, to);
539 emit_move_insn (lowpart, lowfrom);
541 /* Compute the value to put in each remaining word. */
542 if (unsignedp)
543 fill_value = const0_rtx;
544 else
545 fill_value = emit_store_flag (gen_reg_rtx (word_mode),
546 LT, lowfrom, const0_rtx,
547 VOIDmode, 0, -1);
549 /* Fill the remaining words. */
550 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
552 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
553 rtx subword = operand_subword (to, index, 1, to_mode);
555 gcc_assert (subword);
557 if (fill_value != subword)
558 emit_move_insn (subword, fill_value);
561 insns = get_insns ();
562 end_sequence ();
564 emit_insn (insns);
565 return;
568 /* Truncating multi-word to a word or less. */
569 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
570 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
572 if (!((MEM_P (from)
573 && ! MEM_VOLATILE_P (from)
574 && direct_load[(int) to_mode]
575 && ! mode_dependent_address_p (XEXP (from, 0),
576 MEM_ADDR_SPACE (from)))
577 || REG_P (from)
578 || GET_CODE (from) == SUBREG))
579 from = force_reg (from_mode, from);
580 convert_move (to, gen_lowpart (word_mode, from), 0);
581 return;
584 /* Now follow all the conversions between integers
585 no more than a word long. */
587 /* For truncation, usually we can just refer to FROM in a narrower mode. */
588 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
589 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
591 if (!((MEM_P (from)
592 && ! MEM_VOLATILE_P (from)
593 && direct_load[(int) to_mode]
594 && ! mode_dependent_address_p (XEXP (from, 0),
595 MEM_ADDR_SPACE (from)))
596 || REG_P (from)
597 || GET_CODE (from) == SUBREG))
598 from = force_reg (from_mode, from);
599 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
600 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
601 from = copy_to_reg (from);
602 emit_move_insn (to, gen_lowpart (to_mode, from));
603 return;
606 /* Handle extension. */
607 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
609 /* Convert directly if that works. */
610 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
611 != CODE_FOR_nothing)
613 emit_unop_insn (code, to, from, equiv_code);
614 return;
616 else
618 enum machine_mode intermediate;
619 rtx tmp;
620 int shift_amount;
622 /* Search for a mode to convert via. */
623 for (intermediate = from_mode; intermediate != VOIDmode;
624 intermediate = GET_MODE_WIDER_MODE (intermediate))
625 if (((can_extend_p (to_mode, intermediate, unsignedp)
626 != CODE_FOR_nothing)
627 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
628 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
629 && (can_extend_p (intermediate, from_mode, unsignedp)
630 != CODE_FOR_nothing))
632 convert_move (to, convert_to_mode (intermediate, from,
633 unsignedp), unsignedp);
634 return;
637 /* No suitable intermediate mode.
638 Generate what we need with shifts. */
639 shift_amount = (GET_MODE_PRECISION (to_mode)
640 - GET_MODE_PRECISION (from_mode));
641 from = gen_lowpart (to_mode, force_reg (from_mode, from));
642 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
643 to, unsignedp);
644 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
645 to, unsignedp);
646 if (tmp != to)
647 emit_move_insn (to, tmp);
648 return;
652 /* Support special truncate insns for certain modes. */
653 if (convert_optab_handler (trunc_optab, to_mode,
654 from_mode) != CODE_FOR_nothing)
656 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
657 to, from, UNKNOWN);
658 return;
661 /* Handle truncation of volatile memrefs, and so on;
662 the things that couldn't be truncated directly,
663 and for which there was no special instruction.
665 ??? Code above formerly short-circuited this, for most integer
666 mode pairs, with a force_reg in from_mode followed by a recursive
667 call to this routine. Appears always to have been wrong. */
668 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
670 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
671 emit_move_insn (to, temp);
672 return;
675 /* Mode combination is not recognized. */
676 gcc_unreachable ();
679 /* Return an rtx for a value that would result
680 from converting X to mode MODE.
681 Both X and MODE may be floating, or both integer.
682 UNSIGNEDP is nonzero if X is an unsigned value.
683 This can be done by referring to a part of X in place
684 or by copying to a new temporary with conversion. */
687 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
689 return convert_modes (mode, VOIDmode, x, unsignedp);
692 /* Return an rtx for a value that would result
693 from converting X from mode OLDMODE to mode MODE.
694 Both modes may be floating, or both integer.
695 UNSIGNEDP is nonzero if X is an unsigned value.
697 This can be done by referring to a part of X in place
698 or by copying to a new temporary with conversion.
700 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
703 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
705 rtx temp;
707 /* If FROM is a SUBREG that indicates that we have already done at least
708 the required extension, strip it. */
710 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
711 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
712 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
713 x = gen_lowpart (mode, x);
715 if (GET_MODE (x) != VOIDmode)
716 oldmode = GET_MODE (x);
718 if (mode == oldmode)
719 return x;
721 /* There is one case that we must handle specially: If we are converting
722 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
723 we are to interpret the constant as unsigned, gen_lowpart will do
724 the wrong if the constant appears negative. What we want to do is
725 make the high-order word of the constant zero, not all ones. */
727 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
728 && GET_MODE_BITSIZE (mode) == HOST_BITS_PER_DOUBLE_INT
729 && CONST_INT_P (x) && INTVAL (x) < 0)
731 double_int val = double_int::from_uhwi (INTVAL (x));
733 /* We need to zero extend VAL. */
734 if (oldmode != VOIDmode)
735 val = val.zext (GET_MODE_BITSIZE (oldmode));
737 return immed_double_int_const (val, mode);
740 /* We can do this with a gen_lowpart if both desired and current modes
741 are integer, and this is either a constant integer, a register, or a
742 non-volatile MEM. Except for the constant case where MODE is no
743 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
745 if ((CONST_INT_P (x)
746 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT)
747 || (GET_MODE_CLASS (mode) == MODE_INT
748 && GET_MODE_CLASS (oldmode) == MODE_INT
749 && (CONST_DOUBLE_AS_INT_P (x)
750 || (GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
751 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
752 && direct_load[(int) mode])
753 || (REG_P (x)
754 && (! HARD_REGISTER_P (x)
755 || HARD_REGNO_MODE_OK (REGNO (x), mode))
756 && TRULY_NOOP_TRUNCATION_MODES_P (mode,
757 GET_MODE (x))))))))
759 /* ?? If we don't know OLDMODE, we have to assume here that
760 X does not need sign- or zero-extension. This may not be
761 the case, but it's the best we can do. */
762 if (CONST_INT_P (x) && oldmode != VOIDmode
763 && GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (oldmode))
765 HOST_WIDE_INT val = INTVAL (x);
767 /* We must sign or zero-extend in this case. Start by
768 zero-extending, then sign extend if we need to. */
769 val &= GET_MODE_MASK (oldmode);
770 if (! unsignedp
771 && val_signbit_known_set_p (oldmode, val))
772 val |= ~GET_MODE_MASK (oldmode);
774 return gen_int_mode (val, mode);
777 return gen_lowpart (mode, x);
780 /* Converting from integer constant into mode is always equivalent to an
781 subreg operation. */
782 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
784 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
785 return simplify_gen_subreg (mode, x, oldmode, 0);
788 temp = gen_reg_rtx (mode);
789 convert_move (temp, x, unsignedp);
790 return temp;
793 /* Return the largest alignment we can use for doing a move (or store)
794 of MAX_PIECES. ALIGN is the largest alignment we could use. */
796 static unsigned int
797 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
799 enum machine_mode tmode;
801 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
802 if (align >= GET_MODE_ALIGNMENT (tmode))
803 align = GET_MODE_ALIGNMENT (tmode);
804 else
806 enum machine_mode tmode, xmode;
808 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
809 tmode != VOIDmode;
810 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
811 if (GET_MODE_SIZE (tmode) > max_pieces
812 || SLOW_UNALIGNED_ACCESS (tmode, align))
813 break;
815 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
818 return align;
821 /* Return the widest integer mode no wider than SIZE. If no such mode
822 can be found, return VOIDmode. */
824 static enum machine_mode
825 widest_int_mode_for_size (unsigned int size)
827 enum machine_mode tmode, mode = VOIDmode;
829 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
830 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
831 if (GET_MODE_SIZE (tmode) < size)
832 mode = tmode;
834 return mode;
837 /* STORE_MAX_PIECES is the number of bytes at a time that we can
838 store efficiently. Due to internal GCC limitations, this is
839 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
840 for an immediate constant. */
842 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
844 /* Determine whether the LEN bytes can be moved by using several move
845 instructions. Return nonzero if a call to move_by_pieces should
846 succeed. */
849 can_move_by_pieces (unsigned HOST_WIDE_INT len ATTRIBUTE_UNUSED,
850 unsigned int align ATTRIBUTE_UNUSED)
852 return MOVE_BY_PIECES_P (len, align);
855 /* Generate several move instructions to copy LEN bytes from block FROM to
856 block TO. (These are MEM rtx's with BLKmode).
858 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
859 used to push FROM to the stack.
861 ALIGN is maximum stack alignment we can assume.
863 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
864 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
865 stpcpy. */
868 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
869 unsigned int align, int endp)
871 struct move_by_pieces_d data;
872 enum machine_mode to_addr_mode;
873 enum machine_mode from_addr_mode = get_address_mode (from);
874 rtx to_addr, from_addr = XEXP (from, 0);
875 unsigned int max_size = MOVE_MAX_PIECES + 1;
876 enum insn_code icode;
878 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
880 data.offset = 0;
881 data.from_addr = from_addr;
882 if (to)
884 to_addr_mode = get_address_mode (to);
885 to_addr = XEXP (to, 0);
886 data.to = to;
887 data.autinc_to
888 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
889 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
890 data.reverse
891 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
893 else
895 to_addr_mode = VOIDmode;
896 to_addr = NULL_RTX;
897 data.to = NULL_RTX;
898 data.autinc_to = 1;
899 #ifdef STACK_GROWS_DOWNWARD
900 data.reverse = 1;
901 #else
902 data.reverse = 0;
903 #endif
905 data.to_addr = to_addr;
906 data.from = from;
907 data.autinc_from
908 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
909 || GET_CODE (from_addr) == POST_INC
910 || GET_CODE (from_addr) == POST_DEC);
912 data.explicit_inc_from = 0;
913 data.explicit_inc_to = 0;
914 if (data.reverse) data.offset = len;
915 data.len = len;
917 /* If copying requires more than two move insns,
918 copy addresses to registers (to make displacements shorter)
919 and use post-increment if available. */
920 if (!(data.autinc_from && data.autinc_to)
921 && move_by_pieces_ninsns (len, align, max_size) > 2)
923 /* Find the mode of the largest move...
924 MODE might not be used depending on the definitions of the
925 USE_* macros below. */
926 enum machine_mode mode ATTRIBUTE_UNUSED
927 = widest_int_mode_for_size (max_size);
929 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
931 data.from_addr = copy_to_mode_reg (from_addr_mode,
932 plus_constant (from_addr_mode,
933 from_addr, len));
934 data.autinc_from = 1;
935 data.explicit_inc_from = -1;
937 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
939 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
940 data.autinc_from = 1;
941 data.explicit_inc_from = 1;
943 if (!data.autinc_from && CONSTANT_P (from_addr))
944 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
945 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
947 data.to_addr = copy_to_mode_reg (to_addr_mode,
948 plus_constant (to_addr_mode,
949 to_addr, len));
950 data.autinc_to = 1;
951 data.explicit_inc_to = -1;
953 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
955 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
956 data.autinc_to = 1;
957 data.explicit_inc_to = 1;
959 if (!data.autinc_to && CONSTANT_P (to_addr))
960 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
963 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
965 /* First move what we can in the largest integer mode, then go to
966 successively smaller modes. */
968 while (max_size > 1 && data.len > 0)
970 enum machine_mode mode = widest_int_mode_for_size (max_size);
972 if (mode == VOIDmode)
973 break;
975 icode = optab_handler (mov_optab, mode);
976 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
977 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
979 max_size = GET_MODE_SIZE (mode);
982 /* The code above should have handled everything. */
983 gcc_assert (!data.len);
985 if (endp)
987 rtx to1;
989 gcc_assert (!data.reverse);
990 if (data.autinc_to)
992 if (endp == 2)
994 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
995 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
996 else
997 data.to_addr = copy_to_mode_reg (to_addr_mode,
998 plus_constant (to_addr_mode,
999 data.to_addr,
1000 -1));
1002 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1003 data.offset);
1005 else
1007 if (endp == 2)
1008 --data.offset;
1009 to1 = adjust_address (data.to, QImode, data.offset);
1011 return to1;
1013 else
1014 return data.to;
1017 /* Return number of insns required to move L bytes by pieces.
1018 ALIGN (in bits) is maximum alignment we can assume. */
1020 unsigned HOST_WIDE_INT
1021 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1022 unsigned int max_size)
1024 unsigned HOST_WIDE_INT n_insns = 0;
1026 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1028 while (max_size > 1 && l > 0)
1030 enum machine_mode mode;
1031 enum insn_code icode;
1033 mode = widest_int_mode_for_size (max_size);
1035 if (mode == VOIDmode)
1036 break;
1038 icode = optab_handler (mov_optab, mode);
1039 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1040 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1042 max_size = GET_MODE_SIZE (mode);
1045 gcc_assert (!l);
1046 return n_insns;
1049 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1050 with move instructions for mode MODE. GENFUN is the gen_... function
1051 to make a move insn for that mode. DATA has all the other info. */
1053 static void
1054 move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
1055 struct move_by_pieces_d *data)
1057 unsigned int size = GET_MODE_SIZE (mode);
1058 rtx to1 = NULL_RTX, from1;
1060 while (data->len >= size)
1062 if (data->reverse)
1063 data->offset -= size;
1065 if (data->to)
1067 if (data->autinc_to)
1068 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1069 data->offset);
1070 else
1071 to1 = adjust_address (data->to, mode, data->offset);
1074 if (data->autinc_from)
1075 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1076 data->offset);
1077 else
1078 from1 = adjust_address (data->from, mode, data->offset);
1080 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1081 emit_insn (gen_add2_insn (data->to_addr,
1082 gen_int_mode (-(HOST_WIDE_INT) size,
1083 GET_MODE (data->to_addr))));
1084 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1085 emit_insn (gen_add2_insn (data->from_addr,
1086 gen_int_mode (-(HOST_WIDE_INT) size,
1087 GET_MODE (data->from_addr))));
1089 if (data->to)
1090 emit_insn ((*genfun) (to1, from1));
1091 else
1093 #ifdef PUSH_ROUNDING
1094 emit_single_push_insn (mode, from1, NULL);
1095 #else
1096 gcc_unreachable ();
1097 #endif
1100 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1101 emit_insn (gen_add2_insn (data->to_addr,
1102 gen_int_mode (size,
1103 GET_MODE (data->to_addr))));
1104 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1105 emit_insn (gen_add2_insn (data->from_addr,
1106 gen_int_mode (size,
1107 GET_MODE (data->from_addr))));
1109 if (! data->reverse)
1110 data->offset += size;
1112 data->len -= size;
1116 /* Emit code to move a block Y to a block X. This may be done with
1117 string-move instructions, with multiple scalar move instructions,
1118 or with a library call.
1120 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1121 SIZE is an rtx that says how long they are.
1122 ALIGN is the maximum alignment we can assume they have.
1123 METHOD describes what kind of copy this is, and what mechanisms may be used.
1124 MIN_SIZE is the minimal size of block to move
1125 MAX_SIZE is the maximal size of block to move, if it can not be represented
1126 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1128 Return the address of the new block, if memcpy is called and returns it,
1129 0 otherwise. */
1132 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1133 unsigned int expected_align, HOST_WIDE_INT expected_size,
1134 unsigned HOST_WIDE_INT min_size,
1135 unsigned HOST_WIDE_INT max_size,
1136 unsigned HOST_WIDE_INT probable_max_size)
1138 bool may_use_call;
1139 rtx retval = 0;
1140 unsigned int align;
1142 gcc_assert (size);
1143 if (CONST_INT_P (size)
1144 && INTVAL (size) == 0)
1145 return 0;
1147 switch (method)
1149 case BLOCK_OP_NORMAL:
1150 case BLOCK_OP_TAILCALL:
1151 may_use_call = true;
1152 break;
1154 case BLOCK_OP_CALL_PARM:
1155 may_use_call = block_move_libcall_safe_for_call_parm ();
1157 /* Make inhibit_defer_pop nonzero around the library call
1158 to force it to pop the arguments right away. */
1159 NO_DEFER_POP;
1160 break;
1162 case BLOCK_OP_NO_LIBCALL:
1163 may_use_call = false;
1164 break;
1166 default:
1167 gcc_unreachable ();
1170 gcc_assert (MEM_P (x) && MEM_P (y));
1171 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1172 gcc_assert (align >= BITS_PER_UNIT);
1174 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1175 block copy is more efficient for other large modes, e.g. DCmode. */
1176 x = adjust_address (x, BLKmode, 0);
1177 y = adjust_address (y, BLKmode, 0);
1179 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1180 can be incorrect is coming from __builtin_memcpy. */
1181 if (CONST_INT_P (size))
1183 x = shallow_copy_rtx (x);
1184 y = shallow_copy_rtx (y);
1185 set_mem_size (x, INTVAL (size));
1186 set_mem_size (y, INTVAL (size));
1189 if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1190 move_by_pieces (x, y, INTVAL (size), align, 0);
1191 else if (emit_block_move_via_movmem (x, y, size, align,
1192 expected_align, expected_size,
1193 min_size, max_size, probable_max_size))
1195 else if (may_use_call
1196 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1197 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1199 /* Since x and y are passed to a libcall, mark the corresponding
1200 tree EXPR as addressable. */
1201 tree y_expr = MEM_EXPR (y);
1202 tree x_expr = MEM_EXPR (x);
1203 if (y_expr)
1204 mark_addressable (y_expr);
1205 if (x_expr)
1206 mark_addressable (x_expr);
1207 retval = emit_block_move_via_libcall (x, y, size,
1208 method == BLOCK_OP_TAILCALL);
1211 else
1212 emit_block_move_via_loop (x, y, size, align);
1214 if (method == BLOCK_OP_CALL_PARM)
1215 OK_DEFER_POP;
1217 return retval;
1221 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1223 unsigned HOST_WIDE_INT max, min = 0;
1224 if (GET_CODE (size) == CONST_INT)
1225 min = max = UINTVAL (size);
1226 else
1227 max = GET_MODE_MASK (GET_MODE (size));
1228 return emit_block_move_hints (x, y, size, method, 0, -1,
1229 min, max, max);
1232 /* A subroutine of emit_block_move. Returns true if calling the
1233 block move libcall will not clobber any parameters which may have
1234 already been placed on the stack. */
1236 static bool
1237 block_move_libcall_safe_for_call_parm (void)
1239 #if defined (REG_PARM_STACK_SPACE)
1240 tree fn;
1241 #endif
1243 /* If arguments are pushed on the stack, then they're safe. */
1244 if (PUSH_ARGS)
1245 return true;
1247 /* If registers go on the stack anyway, any argument is sure to clobber
1248 an outgoing argument. */
1249 #if defined (REG_PARM_STACK_SPACE)
1250 fn = emit_block_move_libcall_fn (false);
1251 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1252 depend on its argument. */
1253 (void) fn;
1254 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1255 && REG_PARM_STACK_SPACE (fn) != 0)
1256 return false;
1257 #endif
1259 /* If any argument goes in memory, then it might clobber an outgoing
1260 argument. */
1262 CUMULATIVE_ARGS args_so_far_v;
1263 cumulative_args_t args_so_far;
1264 tree fn, arg;
1266 fn = emit_block_move_libcall_fn (false);
1267 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1268 args_so_far = pack_cumulative_args (&args_so_far_v);
1270 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1271 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1273 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1274 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1275 NULL_TREE, true);
1276 if (!tmp || !REG_P (tmp))
1277 return false;
1278 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1279 return false;
1280 targetm.calls.function_arg_advance (args_so_far, mode,
1281 NULL_TREE, true);
1284 return true;
1287 /* A subroutine of emit_block_move. Expand a movmem pattern;
1288 return true if successful. */
1290 static bool
1291 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1292 unsigned int expected_align, HOST_WIDE_INT expected_size,
1293 unsigned HOST_WIDE_INT min_size,
1294 unsigned HOST_WIDE_INT max_size,
1295 unsigned HOST_WIDE_INT probable_max_size)
1297 int save_volatile_ok = volatile_ok;
1298 enum machine_mode mode;
1300 if (expected_align < align)
1301 expected_align = align;
1302 if (expected_size != -1)
1304 if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1305 expected_size = probable_max_size;
1306 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1307 expected_size = min_size;
1310 /* Since this is a move insn, we don't care about volatility. */
1311 volatile_ok = 1;
1313 /* Try the most limited insn first, because there's no point
1314 including more than one in the machine description unless
1315 the more limited one has some advantage. */
1317 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1318 mode = GET_MODE_WIDER_MODE (mode))
1320 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1322 if (code != CODE_FOR_nothing
1323 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1324 here because if SIZE is less than the mode mask, as it is
1325 returned by the macro, it will definitely be less than the
1326 actual mode mask. Since SIZE is within the Pmode address
1327 space, we limit MODE to Pmode. */
1328 && ((CONST_INT_P (size)
1329 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1330 <= (GET_MODE_MASK (mode) >> 1)))
1331 || max_size <= (GET_MODE_MASK (mode) >> 1)
1332 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1334 struct expand_operand ops[9];
1335 unsigned int nops;
1337 /* ??? When called via emit_block_move_for_call, it'd be
1338 nice if there were some way to inform the backend, so
1339 that it doesn't fail the expansion because it thinks
1340 emitting the libcall would be more efficient. */
1341 nops = insn_data[(int) code].n_generator_args;
1342 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1344 create_fixed_operand (&ops[0], x);
1345 create_fixed_operand (&ops[1], y);
1346 /* The check above guarantees that this size conversion is valid. */
1347 create_convert_operand_to (&ops[2], size, mode, true);
1348 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1349 if (nops >= 6)
1351 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1352 create_integer_operand (&ops[5], expected_size);
1354 if (nops >= 8)
1356 create_integer_operand (&ops[6], min_size);
1357 /* If we can not represent the maximal size,
1358 make parameter NULL. */
1359 if ((HOST_WIDE_INT) max_size != -1)
1360 create_integer_operand (&ops[7], max_size);
1361 else
1362 create_fixed_operand (&ops[7], NULL);
1364 if (nops == 9)
1366 /* If we can not represent the maximal size,
1367 make parameter NULL. */
1368 if ((HOST_WIDE_INT) probable_max_size != -1)
1369 create_integer_operand (&ops[8], probable_max_size);
1370 else
1371 create_fixed_operand (&ops[8], NULL);
1373 if (maybe_expand_insn (code, nops, ops))
1375 volatile_ok = save_volatile_ok;
1376 return true;
1381 volatile_ok = save_volatile_ok;
1382 return false;
1385 /* A subroutine of emit_block_move. Expand a call to memcpy.
1386 Return the return value from memcpy, 0 otherwise. */
1389 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1391 rtx dst_addr, src_addr;
1392 tree call_expr, fn, src_tree, dst_tree, size_tree;
1393 enum machine_mode size_mode;
1394 rtx retval;
1396 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1397 pseudos. We can then place those new pseudos into a VAR_DECL and
1398 use them later. */
1400 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1401 src_addr = copy_addr_to_reg (XEXP (src, 0));
1403 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1404 src_addr = convert_memory_address (ptr_mode, src_addr);
1406 dst_tree = make_tree (ptr_type_node, dst_addr);
1407 src_tree = make_tree (ptr_type_node, src_addr);
1409 size_mode = TYPE_MODE (sizetype);
1411 size = convert_to_mode (size_mode, size, 1);
1412 size = copy_to_mode_reg (size_mode, size);
1414 /* It is incorrect to use the libcall calling conventions to call
1415 memcpy in this context. This could be a user call to memcpy and
1416 the user may wish to examine the return value from memcpy. For
1417 targets where libcalls and normal calls have different conventions
1418 for returning pointers, we could end up generating incorrect code. */
1420 size_tree = make_tree (sizetype, size);
1422 fn = emit_block_move_libcall_fn (true);
1423 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1424 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1426 retval = expand_normal (call_expr);
1428 return retval;
1431 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1432 for the function we use for block copies. */
1434 static GTY(()) tree block_move_fn;
1436 void
1437 init_block_move_fn (const char *asmspec)
1439 if (!block_move_fn)
1441 tree args, fn, attrs, attr_args;
1443 fn = get_identifier ("memcpy");
1444 args = build_function_type_list (ptr_type_node, ptr_type_node,
1445 const_ptr_type_node, sizetype,
1446 NULL_TREE);
1448 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1449 DECL_EXTERNAL (fn) = 1;
1450 TREE_PUBLIC (fn) = 1;
1451 DECL_ARTIFICIAL (fn) = 1;
1452 TREE_NOTHROW (fn) = 1;
1453 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1454 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1456 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1457 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1459 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1461 block_move_fn = fn;
1464 if (asmspec)
1465 set_user_assembler_name (block_move_fn, asmspec);
1468 static tree
1469 emit_block_move_libcall_fn (int for_call)
1471 static bool emitted_extern;
1473 if (!block_move_fn)
1474 init_block_move_fn (NULL);
1476 if (for_call && !emitted_extern)
1478 emitted_extern = true;
1479 make_decl_rtl (block_move_fn);
1482 return block_move_fn;
1485 /* A subroutine of emit_block_move. Copy the data via an explicit
1486 loop. This is used only when libcalls are forbidden. */
1487 /* ??? It'd be nice to copy in hunks larger than QImode. */
1489 static void
1490 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1491 unsigned int align ATTRIBUTE_UNUSED)
1493 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1494 enum machine_mode x_addr_mode = get_address_mode (x);
1495 enum machine_mode y_addr_mode = get_address_mode (y);
1496 enum machine_mode iter_mode;
1498 iter_mode = GET_MODE (size);
1499 if (iter_mode == VOIDmode)
1500 iter_mode = word_mode;
1502 top_label = gen_label_rtx ();
1503 cmp_label = gen_label_rtx ();
1504 iter = gen_reg_rtx (iter_mode);
1506 emit_move_insn (iter, const0_rtx);
1508 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1509 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1510 do_pending_stack_adjust ();
1512 emit_jump (cmp_label);
1513 emit_label (top_label);
1515 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1516 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1518 if (x_addr_mode != y_addr_mode)
1519 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1520 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1522 x = change_address (x, QImode, x_addr);
1523 y = change_address (y, QImode, y_addr);
1525 emit_move_insn (x, y);
1527 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1528 true, OPTAB_LIB_WIDEN);
1529 if (tmp != iter)
1530 emit_move_insn (iter, tmp);
1532 emit_label (cmp_label);
1534 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1535 true, top_label, REG_BR_PROB_BASE * 90 / 100);
1538 /* Copy all or part of a value X into registers starting at REGNO.
1539 The number of registers to be filled is NREGS. */
1541 void
1542 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1544 int i;
1545 #ifdef HAVE_load_multiple
1546 rtx pat;
1547 rtx last;
1548 #endif
1550 if (nregs == 0)
1551 return;
1553 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1554 x = validize_mem (force_const_mem (mode, x));
1556 /* See if the machine can do this with a load multiple insn. */
1557 #ifdef HAVE_load_multiple
1558 if (HAVE_load_multiple)
1560 last = get_last_insn ();
1561 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1562 GEN_INT (nregs));
1563 if (pat)
1565 emit_insn (pat);
1566 return;
1568 else
1569 delete_insns_since (last);
1571 #endif
1573 for (i = 0; i < nregs; i++)
1574 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1575 operand_subword_force (x, i, mode));
1578 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1579 The number of registers to be filled is NREGS. */
1581 void
1582 move_block_from_reg (int regno, rtx x, int nregs)
1584 int i;
1586 if (nregs == 0)
1587 return;
1589 /* See if the machine can do this with a store multiple insn. */
1590 #ifdef HAVE_store_multiple
1591 if (HAVE_store_multiple)
1593 rtx last = get_last_insn ();
1594 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1595 GEN_INT (nregs));
1596 if (pat)
1598 emit_insn (pat);
1599 return;
1601 else
1602 delete_insns_since (last);
1604 #endif
1606 for (i = 0; i < nregs; i++)
1608 rtx tem = operand_subword (x, i, 1, BLKmode);
1610 gcc_assert (tem);
1612 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1616 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1617 ORIG, where ORIG is a non-consecutive group of registers represented by
1618 a PARALLEL. The clone is identical to the original except in that the
1619 original set of registers is replaced by a new set of pseudo registers.
1620 The new set has the same modes as the original set. */
1623 gen_group_rtx (rtx orig)
1625 int i, length;
1626 rtx *tmps;
1628 gcc_assert (GET_CODE (orig) == PARALLEL);
1630 length = XVECLEN (orig, 0);
1631 tmps = XALLOCAVEC (rtx, length);
1633 /* Skip a NULL entry in first slot. */
1634 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1636 if (i)
1637 tmps[0] = 0;
1639 for (; i < length; i++)
1641 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1642 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1644 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1647 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1650 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1651 except that values are placed in TMPS[i], and must later be moved
1652 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1654 static void
1655 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1657 rtx src;
1658 int start, i;
1659 enum machine_mode m = GET_MODE (orig_src);
1661 gcc_assert (GET_CODE (dst) == PARALLEL);
1663 if (m != VOIDmode
1664 && !SCALAR_INT_MODE_P (m)
1665 && !MEM_P (orig_src)
1666 && GET_CODE (orig_src) != CONCAT)
1668 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1669 if (imode == BLKmode)
1670 src = assign_stack_temp (GET_MODE (orig_src), ssize);
1671 else
1672 src = gen_reg_rtx (imode);
1673 if (imode != BLKmode)
1674 src = gen_lowpart (GET_MODE (orig_src), src);
1675 emit_move_insn (src, orig_src);
1676 /* ...and back again. */
1677 if (imode != BLKmode)
1678 src = gen_lowpart (imode, src);
1679 emit_group_load_1 (tmps, dst, src, type, ssize);
1680 return;
1683 /* Check for a NULL entry, used to indicate that the parameter goes
1684 both on the stack and in registers. */
1685 if (XEXP (XVECEXP (dst, 0, 0), 0))
1686 start = 0;
1687 else
1688 start = 1;
1690 /* Process the pieces. */
1691 for (i = start; i < XVECLEN (dst, 0); i++)
1693 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1694 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1695 unsigned int bytelen = GET_MODE_SIZE (mode);
1696 int shift = 0;
1698 /* Handle trailing fragments that run over the size of the struct. */
1699 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1701 /* Arrange to shift the fragment to where it belongs.
1702 extract_bit_field loads to the lsb of the reg. */
1703 if (
1704 #ifdef BLOCK_REG_PADDING
1705 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1706 == (BYTES_BIG_ENDIAN ? upward : downward)
1707 #else
1708 BYTES_BIG_ENDIAN
1709 #endif
1711 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1712 bytelen = ssize - bytepos;
1713 gcc_assert (bytelen > 0);
1716 /* If we won't be loading directly from memory, protect the real source
1717 from strange tricks we might play; but make sure that the source can
1718 be loaded directly into the destination. */
1719 src = orig_src;
1720 if (!MEM_P (orig_src)
1721 && (!CONSTANT_P (orig_src)
1722 || (GET_MODE (orig_src) != mode
1723 && GET_MODE (orig_src) != VOIDmode)))
1725 if (GET_MODE (orig_src) == VOIDmode)
1726 src = gen_reg_rtx (mode);
1727 else
1728 src = gen_reg_rtx (GET_MODE (orig_src));
1730 emit_move_insn (src, orig_src);
1733 /* Optimize the access just a bit. */
1734 if (MEM_P (src)
1735 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1736 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1737 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1738 && bytelen == GET_MODE_SIZE (mode))
1740 tmps[i] = gen_reg_rtx (mode);
1741 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1743 else if (COMPLEX_MODE_P (mode)
1744 && GET_MODE (src) == mode
1745 && bytelen == GET_MODE_SIZE (mode))
1746 /* Let emit_move_complex do the bulk of the work. */
1747 tmps[i] = src;
1748 else if (GET_CODE (src) == CONCAT)
1750 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1751 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1753 if ((bytepos == 0 && bytelen == slen0)
1754 || (bytepos != 0 && bytepos + bytelen <= slen))
1756 /* The following assumes that the concatenated objects all
1757 have the same size. In this case, a simple calculation
1758 can be used to determine the object and the bit field
1759 to be extracted. */
1760 tmps[i] = XEXP (src, bytepos / slen0);
1761 if (! CONSTANT_P (tmps[i])
1762 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1763 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1764 (bytepos % slen0) * BITS_PER_UNIT,
1765 1, NULL_RTX, mode, mode);
1767 else
1769 rtx mem;
1771 gcc_assert (!bytepos);
1772 mem = assign_stack_temp (GET_MODE (src), slen);
1773 emit_move_insn (mem, src);
1774 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1775 0, 1, NULL_RTX, mode, mode);
1778 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1779 SIMD register, which is currently broken. While we get GCC
1780 to emit proper RTL for these cases, let's dump to memory. */
1781 else if (VECTOR_MODE_P (GET_MODE (dst))
1782 && REG_P (src))
1784 int slen = GET_MODE_SIZE (GET_MODE (src));
1785 rtx mem;
1787 mem = assign_stack_temp (GET_MODE (src), slen);
1788 emit_move_insn (mem, src);
1789 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1791 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1792 && XVECLEN (dst, 0) > 1)
1793 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
1794 else if (CONSTANT_P (src))
1796 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1798 if (len == ssize)
1799 tmps[i] = src;
1800 else
1802 rtx first, second;
1804 gcc_assert (2 * len == ssize);
1805 split_double (src, &first, &second);
1806 if (i)
1807 tmps[i] = second;
1808 else
1809 tmps[i] = first;
1812 else if (REG_P (src) && GET_MODE (src) == mode)
1813 tmps[i] = src;
1814 else
1815 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1816 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1817 mode, mode);
1819 if (shift)
1820 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1821 shift, tmps[i], 0);
1825 /* Emit code to move a block SRC of type TYPE to a block DST,
1826 where DST is non-consecutive registers represented by a PARALLEL.
1827 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1828 if not known. */
1830 void
1831 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1833 rtx *tmps;
1834 int i;
1836 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1837 emit_group_load_1 (tmps, dst, src, type, ssize);
1839 /* Copy the extracted pieces into the proper (probable) hard regs. */
1840 for (i = 0; i < XVECLEN (dst, 0); i++)
1842 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1843 if (d == NULL)
1844 continue;
1845 emit_move_insn (d, tmps[i]);
1849 /* Similar, but load SRC into new pseudos in a format that looks like
1850 PARALLEL. This can later be fed to emit_group_move to get things
1851 in the right place. */
1854 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1856 rtvec vec;
1857 int i;
1859 vec = rtvec_alloc (XVECLEN (parallel, 0));
1860 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1862 /* Convert the vector to look just like the original PARALLEL, except
1863 with the computed values. */
1864 for (i = 0; i < XVECLEN (parallel, 0); i++)
1866 rtx e = XVECEXP (parallel, 0, i);
1867 rtx d = XEXP (e, 0);
1869 if (d)
1871 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1872 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1874 RTVEC_ELT (vec, i) = e;
1877 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1880 /* Emit code to move a block SRC to block DST, where SRC and DST are
1881 non-consecutive groups of registers, each represented by a PARALLEL. */
1883 void
1884 emit_group_move (rtx dst, rtx src)
1886 int i;
1888 gcc_assert (GET_CODE (src) == PARALLEL
1889 && GET_CODE (dst) == PARALLEL
1890 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1892 /* Skip first entry if NULL. */
1893 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1894 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1895 XEXP (XVECEXP (src, 0, i), 0));
1898 /* Move a group of registers represented by a PARALLEL into pseudos. */
1901 emit_group_move_into_temps (rtx src)
1903 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1904 int i;
1906 for (i = 0; i < XVECLEN (src, 0); i++)
1908 rtx e = XVECEXP (src, 0, i);
1909 rtx d = XEXP (e, 0);
1911 if (d)
1912 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1913 RTVEC_ELT (vec, i) = e;
1916 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1919 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1920 where SRC is non-consecutive registers represented by a PARALLEL.
1921 SSIZE represents the total size of block ORIG_DST, or -1 if not
1922 known. */
1924 void
1925 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1927 rtx *tmps, dst;
1928 int start, finish, i;
1929 enum machine_mode m = GET_MODE (orig_dst);
1931 gcc_assert (GET_CODE (src) == PARALLEL);
1933 if (!SCALAR_INT_MODE_P (m)
1934 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1936 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1937 if (imode == BLKmode)
1938 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1939 else
1940 dst = gen_reg_rtx (imode);
1941 emit_group_store (dst, src, type, ssize);
1942 if (imode != BLKmode)
1943 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1944 emit_move_insn (orig_dst, dst);
1945 return;
1948 /* Check for a NULL entry, used to indicate that the parameter goes
1949 both on the stack and in registers. */
1950 if (XEXP (XVECEXP (src, 0, 0), 0))
1951 start = 0;
1952 else
1953 start = 1;
1954 finish = XVECLEN (src, 0);
1956 tmps = XALLOCAVEC (rtx, finish);
1958 /* Copy the (probable) hard regs into pseudos. */
1959 for (i = start; i < finish; i++)
1961 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1962 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1964 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1965 emit_move_insn (tmps[i], reg);
1967 else
1968 tmps[i] = reg;
1971 /* If we won't be storing directly into memory, protect the real destination
1972 from strange tricks we might play. */
1973 dst = orig_dst;
1974 if (GET_CODE (dst) == PARALLEL)
1976 rtx temp;
1978 /* We can get a PARALLEL dst if there is a conditional expression in
1979 a return statement. In that case, the dst and src are the same,
1980 so no action is necessary. */
1981 if (rtx_equal_p (dst, src))
1982 return;
1984 /* It is unclear if we can ever reach here, but we may as well handle
1985 it. Allocate a temporary, and split this into a store/load to/from
1986 the temporary. */
1988 temp = assign_stack_temp (GET_MODE (dst), ssize);
1989 emit_group_store (temp, src, type, ssize);
1990 emit_group_load (dst, temp, type, ssize);
1991 return;
1993 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1995 enum machine_mode outer = GET_MODE (dst);
1996 enum machine_mode inner;
1997 HOST_WIDE_INT bytepos;
1998 bool done = false;
1999 rtx temp;
2001 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
2002 dst = gen_reg_rtx (outer);
2004 /* Make life a bit easier for combine. */
2005 /* If the first element of the vector is the low part
2006 of the destination mode, use a paradoxical subreg to
2007 initialize the destination. */
2008 if (start < finish)
2010 inner = GET_MODE (tmps[start]);
2011 bytepos = subreg_lowpart_offset (inner, outer);
2012 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
2014 temp = simplify_gen_subreg (outer, tmps[start],
2015 inner, 0);
2016 if (temp)
2018 emit_move_insn (dst, temp);
2019 done = true;
2020 start++;
2025 /* If the first element wasn't the low part, try the last. */
2026 if (!done
2027 && start < finish - 1)
2029 inner = GET_MODE (tmps[finish - 1]);
2030 bytepos = subreg_lowpart_offset (inner, outer);
2031 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
2033 temp = simplify_gen_subreg (outer, tmps[finish - 1],
2034 inner, 0);
2035 if (temp)
2037 emit_move_insn (dst, temp);
2038 done = true;
2039 finish--;
2044 /* Otherwise, simply initialize the result to zero. */
2045 if (!done)
2046 emit_move_insn (dst, CONST0_RTX (outer));
2049 /* Process the pieces. */
2050 for (i = start; i < finish; i++)
2052 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2053 enum machine_mode mode = GET_MODE (tmps[i]);
2054 unsigned int bytelen = GET_MODE_SIZE (mode);
2055 unsigned int adj_bytelen = bytelen;
2056 rtx dest = dst;
2058 /* Handle trailing fragments that run over the size of the struct. */
2059 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2060 adj_bytelen = ssize - bytepos;
2062 if (GET_CODE (dst) == CONCAT)
2064 if (bytepos + adj_bytelen
2065 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2066 dest = XEXP (dst, 0);
2067 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2069 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2070 dest = XEXP (dst, 1);
2072 else
2074 enum machine_mode dest_mode = GET_MODE (dest);
2075 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2077 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2079 if (GET_MODE_ALIGNMENT (dest_mode)
2080 >= GET_MODE_ALIGNMENT (tmp_mode))
2082 dest = assign_stack_temp (dest_mode,
2083 GET_MODE_SIZE (dest_mode));
2084 emit_move_insn (adjust_address (dest,
2085 tmp_mode,
2086 bytepos),
2087 tmps[i]);
2088 dst = dest;
2090 else
2092 dest = assign_stack_temp (tmp_mode,
2093 GET_MODE_SIZE (tmp_mode));
2094 emit_move_insn (dest, tmps[i]);
2095 dst = adjust_address (dest, dest_mode, bytepos);
2097 break;
2101 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2103 /* store_bit_field always takes its value from the lsb.
2104 Move the fragment to the lsb if it's not already there. */
2105 if (
2106 #ifdef BLOCK_REG_PADDING
2107 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2108 == (BYTES_BIG_ENDIAN ? upward : downward)
2109 #else
2110 BYTES_BIG_ENDIAN
2111 #endif
2114 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2115 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2116 shift, tmps[i], 0);
2118 bytelen = adj_bytelen;
2121 /* Optimize the access just a bit. */
2122 if (MEM_P (dest)
2123 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2124 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2125 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2126 && bytelen == GET_MODE_SIZE (mode))
2127 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2128 else
2129 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2130 0, 0, mode, tmps[i]);
2133 /* Copy from the pseudo into the (probable) hard reg. */
2134 if (orig_dst != dst)
2135 emit_move_insn (orig_dst, dst);
2138 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2139 of the value stored in X. */
2142 maybe_emit_group_store (rtx x, tree type)
2144 enum machine_mode mode = TYPE_MODE (type);
2145 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2146 if (GET_CODE (x) == PARALLEL)
2148 rtx result = gen_reg_rtx (mode);
2149 emit_group_store (result, x, type, int_size_in_bytes (type));
2150 return result;
2152 return x;
2155 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2157 This is used on targets that return BLKmode values in registers. */
2159 void
2160 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2162 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2163 rtx src = NULL, dst = NULL;
2164 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2165 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2166 enum machine_mode mode = GET_MODE (srcreg);
2167 enum machine_mode tmode = GET_MODE (target);
2168 enum machine_mode copy_mode;
2170 /* BLKmode registers created in the back-end shouldn't have survived. */
2171 gcc_assert (mode != BLKmode);
2173 /* If the structure doesn't take up a whole number of words, see whether
2174 SRCREG is padded on the left or on the right. If it's on the left,
2175 set PADDING_CORRECTION to the number of bits to skip.
2177 In most ABIs, the structure will be returned at the least end of
2178 the register, which translates to right padding on little-endian
2179 targets and left padding on big-endian targets. The opposite
2180 holds if the structure is returned at the most significant
2181 end of the register. */
2182 if (bytes % UNITS_PER_WORD != 0
2183 && (targetm.calls.return_in_msb (type)
2184 ? !BYTES_BIG_ENDIAN
2185 : BYTES_BIG_ENDIAN))
2186 padding_correction
2187 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2189 /* We can use a single move if we have an exact mode for the size. */
2190 else if (MEM_P (target)
2191 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2192 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2193 && bytes == GET_MODE_SIZE (mode))
2195 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2196 return;
2199 /* And if we additionally have the same mode for a register. */
2200 else if (REG_P (target)
2201 && GET_MODE (target) == mode
2202 && bytes == GET_MODE_SIZE (mode))
2204 emit_move_insn (target, srcreg);
2205 return;
2208 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2209 into a new pseudo which is a full word. */
2210 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2212 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2213 mode = word_mode;
2216 /* Copy the structure BITSIZE bits at a time. If the target lives in
2217 memory, take care of not reading/writing past its end by selecting
2218 a copy mode suited to BITSIZE. This should always be possible given
2219 how it is computed.
2221 If the target lives in register, make sure not to select a copy mode
2222 larger than the mode of the register.
2224 We could probably emit more efficient code for machines which do not use
2225 strict alignment, but it doesn't seem worth the effort at the current
2226 time. */
2228 copy_mode = word_mode;
2229 if (MEM_P (target))
2231 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2232 if (mem_mode != BLKmode)
2233 copy_mode = mem_mode;
2235 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2236 copy_mode = tmode;
2238 for (bitpos = 0, xbitpos = padding_correction;
2239 bitpos < bytes * BITS_PER_UNIT;
2240 bitpos += bitsize, xbitpos += bitsize)
2242 /* We need a new source operand each time xbitpos is on a
2243 word boundary and when xbitpos == padding_correction
2244 (the first time through). */
2245 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2246 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2248 /* We need a new destination operand each time bitpos is on
2249 a word boundary. */
2250 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2251 dst = target;
2252 else if (bitpos % BITS_PER_WORD == 0)
2253 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2255 /* Use xbitpos for the source extraction (right justified) and
2256 bitpos for the destination store (left justified). */
2257 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2258 extract_bit_field (src, bitsize,
2259 xbitpos % BITS_PER_WORD, 1,
2260 NULL_RTX, copy_mode, copy_mode));
2264 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2265 register if it contains any data, otherwise return null.
2267 This is used on targets that return BLKmode values in registers. */
2270 copy_blkmode_to_reg (enum machine_mode mode, tree src)
2272 int i, n_regs;
2273 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2274 unsigned int bitsize;
2275 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2276 enum machine_mode dst_mode;
2278 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2280 x = expand_normal (src);
2282 bytes = int_size_in_bytes (TREE_TYPE (src));
2283 if (bytes == 0)
2284 return NULL_RTX;
2286 /* If the structure doesn't take up a whole number of words, see
2287 whether the register value should be padded on the left or on
2288 the right. Set PADDING_CORRECTION to the number of padding
2289 bits needed on the left side.
2291 In most ABIs, the structure will be returned at the least end of
2292 the register, which translates to right padding on little-endian
2293 targets and left padding on big-endian targets. The opposite
2294 holds if the structure is returned at the most significant
2295 end of the register. */
2296 if (bytes % UNITS_PER_WORD != 0
2297 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2298 ? !BYTES_BIG_ENDIAN
2299 : BYTES_BIG_ENDIAN))
2300 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2301 * BITS_PER_UNIT));
2303 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2304 dst_words = XALLOCAVEC (rtx, n_regs);
2305 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2307 /* Copy the structure BITSIZE bits at a time. */
2308 for (bitpos = 0, xbitpos = padding_correction;
2309 bitpos < bytes * BITS_PER_UNIT;
2310 bitpos += bitsize, xbitpos += bitsize)
2312 /* We need a new destination pseudo each time xbitpos is
2313 on a word boundary and when xbitpos == padding_correction
2314 (the first time through). */
2315 if (xbitpos % BITS_PER_WORD == 0
2316 || xbitpos == padding_correction)
2318 /* Generate an appropriate register. */
2319 dst_word = gen_reg_rtx (word_mode);
2320 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2322 /* Clear the destination before we move anything into it. */
2323 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2326 /* We need a new source operand each time bitpos is on a word
2327 boundary. */
2328 if (bitpos % BITS_PER_WORD == 0)
2329 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2331 /* Use bitpos for the source extraction (left justified) and
2332 xbitpos for the destination store (right justified). */
2333 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2334 0, 0, word_mode,
2335 extract_bit_field (src_word, bitsize,
2336 bitpos % BITS_PER_WORD, 1,
2337 NULL_RTX, word_mode, word_mode));
2340 if (mode == BLKmode)
2342 /* Find the smallest integer mode large enough to hold the
2343 entire structure. */
2344 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2345 mode != VOIDmode;
2346 mode = GET_MODE_WIDER_MODE (mode))
2347 /* Have we found a large enough mode? */
2348 if (GET_MODE_SIZE (mode) >= bytes)
2349 break;
2351 /* A suitable mode should have been found. */
2352 gcc_assert (mode != VOIDmode);
2355 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2356 dst_mode = word_mode;
2357 else
2358 dst_mode = mode;
2359 dst = gen_reg_rtx (dst_mode);
2361 for (i = 0; i < n_regs; i++)
2362 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2364 if (mode != dst_mode)
2365 dst = gen_lowpart (mode, dst);
2367 return dst;
2370 /* Add a USE expression for REG to the (possibly empty) list pointed
2371 to by CALL_FUSAGE. REG must denote a hard register. */
2373 void
2374 use_reg_mode (rtx *call_fusage, rtx reg, enum machine_mode mode)
2376 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2378 *call_fusage
2379 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2382 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2383 starting at REGNO. All of these registers must be hard registers. */
2385 void
2386 use_regs (rtx *call_fusage, int regno, int nregs)
2388 int i;
2390 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2392 for (i = 0; i < nregs; i++)
2393 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2396 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2397 PARALLEL REGS. This is for calls that pass values in multiple
2398 non-contiguous locations. The Irix 6 ABI has examples of this. */
2400 void
2401 use_group_regs (rtx *call_fusage, rtx regs)
2403 int i;
2405 for (i = 0; i < XVECLEN (regs, 0); i++)
2407 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2409 /* A NULL entry means the parameter goes both on the stack and in
2410 registers. This can also be a MEM for targets that pass values
2411 partially on the stack and partially in registers. */
2412 if (reg != 0 && REG_P (reg))
2413 use_reg (call_fusage, reg);
2417 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2418 assigment and the code of the expresion on the RHS is CODE. Return
2419 NULL otherwise. */
2421 static gimple
2422 get_def_for_expr (tree name, enum tree_code code)
2424 gimple def_stmt;
2426 if (TREE_CODE (name) != SSA_NAME)
2427 return NULL;
2429 def_stmt = get_gimple_for_ssa_name (name);
2430 if (!def_stmt
2431 || gimple_assign_rhs_code (def_stmt) != code)
2432 return NULL;
2434 return def_stmt;
2437 #ifdef HAVE_conditional_move
2438 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2439 assigment and the class of the expresion on the RHS is CLASS. Return
2440 NULL otherwise. */
2442 static gimple
2443 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2445 gimple def_stmt;
2447 if (TREE_CODE (name) != SSA_NAME)
2448 return NULL;
2450 def_stmt = get_gimple_for_ssa_name (name);
2451 if (!def_stmt
2452 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2453 return NULL;
2455 return def_stmt;
2457 #endif
2460 /* Determine whether the LEN bytes generated by CONSTFUN can be
2461 stored to memory using several move instructions. CONSTFUNDATA is
2462 a pointer which will be passed as argument in every CONSTFUN call.
2463 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2464 a memset operation and false if it's a copy of a constant string.
2465 Return nonzero if a call to store_by_pieces should succeed. */
2468 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2469 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2470 void *constfundata, unsigned int align, bool memsetp)
2472 unsigned HOST_WIDE_INT l;
2473 unsigned int max_size;
2474 HOST_WIDE_INT offset = 0;
2475 enum machine_mode mode;
2476 enum insn_code icode;
2477 int reverse;
2478 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2479 rtx cst ATTRIBUTE_UNUSED;
2481 if (len == 0)
2482 return 1;
2484 if (! (memsetp
2485 ? SET_BY_PIECES_P (len, align)
2486 : STORE_BY_PIECES_P (len, align)))
2487 return 0;
2489 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2491 /* We would first store what we can in the largest integer mode, then go to
2492 successively smaller modes. */
2494 for (reverse = 0;
2495 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2496 reverse++)
2498 l = len;
2499 max_size = STORE_MAX_PIECES + 1;
2500 while (max_size > 1 && l > 0)
2502 mode = widest_int_mode_for_size (max_size);
2504 if (mode == VOIDmode)
2505 break;
2507 icode = optab_handler (mov_optab, mode);
2508 if (icode != CODE_FOR_nothing
2509 && align >= GET_MODE_ALIGNMENT (mode))
2511 unsigned int size = GET_MODE_SIZE (mode);
2513 while (l >= size)
2515 if (reverse)
2516 offset -= size;
2518 cst = (*constfun) (constfundata, offset, mode);
2519 if (!targetm.legitimate_constant_p (mode, cst))
2520 return 0;
2522 if (!reverse)
2523 offset += size;
2525 l -= size;
2529 max_size = GET_MODE_SIZE (mode);
2532 /* The code above should have handled everything. */
2533 gcc_assert (!l);
2536 return 1;
2539 /* Generate several move instructions to store LEN bytes generated by
2540 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2541 pointer which will be passed as argument in every CONSTFUN call.
2542 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2543 a memset operation and false if it's a copy of a constant string.
2544 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2545 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2546 stpcpy. */
2549 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2550 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2551 void *constfundata, unsigned int align, bool memsetp, int endp)
2553 enum machine_mode to_addr_mode = get_address_mode (to);
2554 struct store_by_pieces_d data;
2556 if (len == 0)
2558 gcc_assert (endp != 2);
2559 return to;
2562 gcc_assert (memsetp
2563 ? SET_BY_PIECES_P (len, align)
2564 : STORE_BY_PIECES_P (len, align));
2565 data.constfun = constfun;
2566 data.constfundata = constfundata;
2567 data.len = len;
2568 data.to = to;
2569 store_by_pieces_1 (&data, align);
2570 if (endp)
2572 rtx to1;
2574 gcc_assert (!data.reverse);
2575 if (data.autinc_to)
2577 if (endp == 2)
2579 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2580 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2581 else
2582 data.to_addr = copy_to_mode_reg (to_addr_mode,
2583 plus_constant (to_addr_mode,
2584 data.to_addr,
2585 -1));
2587 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2588 data.offset);
2590 else
2592 if (endp == 2)
2593 --data.offset;
2594 to1 = adjust_address (data.to, QImode, data.offset);
2596 return to1;
2598 else
2599 return data.to;
2602 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2603 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2605 static void
2606 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2608 struct store_by_pieces_d data;
2610 if (len == 0)
2611 return;
2613 data.constfun = clear_by_pieces_1;
2614 data.constfundata = NULL;
2615 data.len = len;
2616 data.to = to;
2617 store_by_pieces_1 (&data, align);
2620 /* Callback routine for clear_by_pieces.
2621 Return const0_rtx unconditionally. */
2623 static rtx
2624 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2625 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2626 enum machine_mode mode ATTRIBUTE_UNUSED)
2628 return const0_rtx;
2631 /* Subroutine of clear_by_pieces and store_by_pieces.
2632 Generate several move instructions to store LEN bytes of block TO. (A MEM
2633 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2635 static void
2636 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2637 unsigned int align ATTRIBUTE_UNUSED)
2639 enum machine_mode to_addr_mode = get_address_mode (data->to);
2640 rtx to_addr = XEXP (data->to, 0);
2641 unsigned int max_size = STORE_MAX_PIECES + 1;
2642 enum insn_code icode;
2644 data->offset = 0;
2645 data->to_addr = to_addr;
2646 data->autinc_to
2647 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2648 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2650 data->explicit_inc_to = 0;
2651 data->reverse
2652 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2653 if (data->reverse)
2654 data->offset = data->len;
2656 /* If storing requires more than two move insns,
2657 copy addresses to registers (to make displacements shorter)
2658 and use post-increment if available. */
2659 if (!data->autinc_to
2660 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2662 /* Determine the main mode we'll be using.
2663 MODE might not be used depending on the definitions of the
2664 USE_* macros below. */
2665 enum machine_mode mode ATTRIBUTE_UNUSED
2666 = widest_int_mode_for_size (max_size);
2668 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2670 data->to_addr = copy_to_mode_reg (to_addr_mode,
2671 plus_constant (to_addr_mode,
2672 to_addr,
2673 data->len));
2674 data->autinc_to = 1;
2675 data->explicit_inc_to = -1;
2678 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2679 && ! data->autinc_to)
2681 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2682 data->autinc_to = 1;
2683 data->explicit_inc_to = 1;
2686 if ( !data->autinc_to && CONSTANT_P (to_addr))
2687 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2690 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2692 /* First store what we can in the largest integer mode, then go to
2693 successively smaller modes. */
2695 while (max_size > 1 && data->len > 0)
2697 enum machine_mode mode = widest_int_mode_for_size (max_size);
2699 if (mode == VOIDmode)
2700 break;
2702 icode = optab_handler (mov_optab, mode);
2703 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2704 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2706 max_size = GET_MODE_SIZE (mode);
2709 /* The code above should have handled everything. */
2710 gcc_assert (!data->len);
2713 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2714 with move instructions for mode MODE. GENFUN is the gen_... function
2715 to make a move insn for that mode. DATA has all the other info. */
2717 static void
2718 store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
2719 struct store_by_pieces_d *data)
2721 unsigned int size = GET_MODE_SIZE (mode);
2722 rtx to1, cst;
2724 while (data->len >= size)
2726 if (data->reverse)
2727 data->offset -= size;
2729 if (data->autinc_to)
2730 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2731 data->offset);
2732 else
2733 to1 = adjust_address (data->to, mode, data->offset);
2735 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2736 emit_insn (gen_add2_insn (data->to_addr,
2737 gen_int_mode (-(HOST_WIDE_INT) size,
2738 GET_MODE (data->to_addr))));
2740 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2741 emit_insn ((*genfun) (to1, cst));
2743 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2744 emit_insn (gen_add2_insn (data->to_addr,
2745 gen_int_mode (size,
2746 GET_MODE (data->to_addr))));
2748 if (! data->reverse)
2749 data->offset += size;
2751 data->len -= size;
2755 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2756 its length in bytes. */
2759 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2760 unsigned int expected_align, HOST_WIDE_INT expected_size,
2761 unsigned HOST_WIDE_INT min_size,
2762 unsigned HOST_WIDE_INT max_size,
2763 unsigned HOST_WIDE_INT probable_max_size)
2765 enum machine_mode mode = GET_MODE (object);
2766 unsigned int align;
2768 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2770 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2771 just move a zero. Otherwise, do this a piece at a time. */
2772 if (mode != BLKmode
2773 && CONST_INT_P (size)
2774 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2776 rtx zero = CONST0_RTX (mode);
2777 if (zero != NULL)
2779 emit_move_insn (object, zero);
2780 return NULL;
2783 if (COMPLEX_MODE_P (mode))
2785 zero = CONST0_RTX (GET_MODE_INNER (mode));
2786 if (zero != NULL)
2788 write_complex_part (object, zero, 0);
2789 write_complex_part (object, zero, 1);
2790 return NULL;
2795 if (size == const0_rtx)
2796 return NULL;
2798 align = MEM_ALIGN (object);
2800 if (CONST_INT_P (size)
2801 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2802 clear_by_pieces (object, INTVAL (size), align);
2803 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2804 expected_align, expected_size,
2805 min_size, max_size, probable_max_size))
2807 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2808 return set_storage_via_libcall (object, size, const0_rtx,
2809 method == BLOCK_OP_TAILCALL);
2810 else
2811 gcc_unreachable ();
2813 return NULL;
2817 clear_storage (rtx object, rtx size, enum block_op_methods method)
2819 unsigned HOST_WIDE_INT max, min = 0;
2820 if (GET_CODE (size) == CONST_INT)
2821 min = max = UINTVAL (size);
2822 else
2823 max = GET_MODE_MASK (GET_MODE (size));
2824 return clear_storage_hints (object, size, method, 0, -1, min, max, max);
2828 /* A subroutine of clear_storage. Expand a call to memset.
2829 Return the return value of memset, 0 otherwise. */
2832 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2834 tree call_expr, fn, object_tree, size_tree, val_tree;
2835 enum machine_mode size_mode;
2836 rtx retval;
2838 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2839 place those into new pseudos into a VAR_DECL and use them later. */
2841 object = copy_addr_to_reg (XEXP (object, 0));
2843 size_mode = TYPE_MODE (sizetype);
2844 size = convert_to_mode (size_mode, size, 1);
2845 size = copy_to_mode_reg (size_mode, size);
2847 /* It is incorrect to use the libcall calling conventions to call
2848 memset in this context. This could be a user call to memset and
2849 the user may wish to examine the return value from memset. For
2850 targets where libcalls and normal calls have different conventions
2851 for returning pointers, we could end up generating incorrect code. */
2853 object_tree = make_tree (ptr_type_node, object);
2854 if (!CONST_INT_P (val))
2855 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2856 size_tree = make_tree (sizetype, size);
2857 val_tree = make_tree (integer_type_node, val);
2859 fn = clear_storage_libcall_fn (true);
2860 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2861 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2863 retval = expand_normal (call_expr);
2865 return retval;
2868 /* A subroutine of set_storage_via_libcall. Create the tree node
2869 for the function we use for block clears. */
2871 tree block_clear_fn;
2873 void
2874 init_block_clear_fn (const char *asmspec)
2876 if (!block_clear_fn)
2878 tree fn, args;
2880 fn = get_identifier ("memset");
2881 args = build_function_type_list (ptr_type_node, ptr_type_node,
2882 integer_type_node, sizetype,
2883 NULL_TREE);
2885 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2886 DECL_EXTERNAL (fn) = 1;
2887 TREE_PUBLIC (fn) = 1;
2888 DECL_ARTIFICIAL (fn) = 1;
2889 TREE_NOTHROW (fn) = 1;
2890 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2891 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2893 block_clear_fn = fn;
2896 if (asmspec)
2897 set_user_assembler_name (block_clear_fn, asmspec);
2900 static tree
2901 clear_storage_libcall_fn (int for_call)
2903 static bool emitted_extern;
2905 if (!block_clear_fn)
2906 init_block_clear_fn (NULL);
2908 if (for_call && !emitted_extern)
2910 emitted_extern = true;
2911 make_decl_rtl (block_clear_fn);
2914 return block_clear_fn;
2917 /* Expand a setmem pattern; return true if successful. */
2919 bool
2920 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2921 unsigned int expected_align, HOST_WIDE_INT expected_size,
2922 unsigned HOST_WIDE_INT min_size,
2923 unsigned HOST_WIDE_INT max_size,
2924 unsigned HOST_WIDE_INT probable_max_size)
2926 /* Try the most limited insn first, because there's no point
2927 including more than one in the machine description unless
2928 the more limited one has some advantage. */
2930 enum machine_mode mode;
2932 if (expected_align < align)
2933 expected_align = align;
2934 if (expected_size != -1)
2936 if ((unsigned HOST_WIDE_INT)expected_size > max_size)
2937 expected_size = max_size;
2938 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
2939 expected_size = min_size;
2942 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2943 mode = GET_MODE_WIDER_MODE (mode))
2945 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2947 if (code != CODE_FOR_nothing
2948 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2949 here because if SIZE is less than the mode mask, as it is
2950 returned by the macro, it will definitely be less than the
2951 actual mode mask. Since SIZE is within the Pmode address
2952 space, we limit MODE to Pmode. */
2953 && ((CONST_INT_P (size)
2954 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2955 <= (GET_MODE_MASK (mode) >> 1)))
2956 || max_size <= (GET_MODE_MASK (mode) >> 1)
2957 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
2959 struct expand_operand ops[9];
2960 unsigned int nops;
2962 nops = insn_data[(int) code].n_generator_args;
2963 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
2965 create_fixed_operand (&ops[0], object);
2966 /* The check above guarantees that this size conversion is valid. */
2967 create_convert_operand_to (&ops[1], size, mode, true);
2968 create_convert_operand_from (&ops[2], val, byte_mode, true);
2969 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2970 if (nops >= 6)
2972 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2973 create_integer_operand (&ops[5], expected_size);
2975 if (nops >= 8)
2977 create_integer_operand (&ops[6], min_size);
2978 /* If we can not represent the maximal size,
2979 make parameter NULL. */
2980 if ((HOST_WIDE_INT) max_size != -1)
2981 create_integer_operand (&ops[7], max_size);
2982 else
2983 create_fixed_operand (&ops[7], NULL);
2985 if (nops == 9)
2987 /* If we can not represent the maximal size,
2988 make parameter NULL. */
2989 if ((HOST_WIDE_INT) probable_max_size != -1)
2990 create_integer_operand (&ops[8], probable_max_size);
2991 else
2992 create_fixed_operand (&ops[8], NULL);
2994 if (maybe_expand_insn (code, nops, ops))
2995 return true;
2999 return false;
3003 /* Write to one of the components of the complex value CPLX. Write VAL to
3004 the real part if IMAG_P is false, and the imaginary part if its true. */
3006 static void
3007 write_complex_part (rtx cplx, rtx val, bool imag_p)
3009 enum machine_mode cmode;
3010 enum machine_mode imode;
3011 unsigned ibitsize;
3013 if (GET_CODE (cplx) == CONCAT)
3015 emit_move_insn (XEXP (cplx, imag_p), val);
3016 return;
3019 cmode = GET_MODE (cplx);
3020 imode = GET_MODE_INNER (cmode);
3021 ibitsize = GET_MODE_BITSIZE (imode);
3023 /* For MEMs simplify_gen_subreg may generate an invalid new address
3024 because, e.g., the original address is considered mode-dependent
3025 by the target, which restricts simplify_subreg from invoking
3026 adjust_address_nv. Instead of preparing fallback support for an
3027 invalid address, we call adjust_address_nv directly. */
3028 if (MEM_P (cplx))
3030 emit_move_insn (adjust_address_nv (cplx, imode,
3031 imag_p ? GET_MODE_SIZE (imode) : 0),
3032 val);
3033 return;
3036 /* If the sub-object is at least word sized, then we know that subregging
3037 will work. This special case is important, since store_bit_field
3038 wants to operate on integer modes, and there's rarely an OImode to
3039 correspond to TCmode. */
3040 if (ibitsize >= BITS_PER_WORD
3041 /* For hard regs we have exact predicates. Assume we can split
3042 the original object if it spans an even number of hard regs.
3043 This special case is important for SCmode on 64-bit platforms
3044 where the natural size of floating-point regs is 32-bit. */
3045 || (REG_P (cplx)
3046 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3047 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3049 rtx part = simplify_gen_subreg (imode, cplx, cmode,
3050 imag_p ? GET_MODE_SIZE (imode) : 0);
3051 if (part)
3053 emit_move_insn (part, val);
3054 return;
3056 else
3057 /* simplify_gen_subreg may fail for sub-word MEMs. */
3058 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3061 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
3064 /* Extract one of the components of the complex value CPLX. Extract the
3065 real part if IMAG_P is false, and the imaginary part if it's true. */
3067 static rtx
3068 read_complex_part (rtx cplx, bool imag_p)
3070 enum machine_mode cmode, imode;
3071 unsigned ibitsize;
3073 if (GET_CODE (cplx) == CONCAT)
3074 return XEXP (cplx, imag_p);
3076 cmode = GET_MODE (cplx);
3077 imode = GET_MODE_INNER (cmode);
3078 ibitsize = GET_MODE_BITSIZE (imode);
3080 /* Special case reads from complex constants that got spilled to memory. */
3081 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3083 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3084 if (decl && TREE_CODE (decl) == COMPLEX_CST)
3086 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3087 if (CONSTANT_CLASS_P (part))
3088 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3092 /* For MEMs simplify_gen_subreg may generate an invalid new address
3093 because, e.g., the original address is considered mode-dependent
3094 by the target, which restricts simplify_subreg from invoking
3095 adjust_address_nv. Instead of preparing fallback support for an
3096 invalid address, we call adjust_address_nv directly. */
3097 if (MEM_P (cplx))
3098 return adjust_address_nv (cplx, imode,
3099 imag_p ? GET_MODE_SIZE (imode) : 0);
3101 /* If the sub-object is at least word sized, then we know that subregging
3102 will work. This special case is important, since extract_bit_field
3103 wants to operate on integer modes, and there's rarely an OImode to
3104 correspond to TCmode. */
3105 if (ibitsize >= BITS_PER_WORD
3106 /* For hard regs we have exact predicates. Assume we can split
3107 the original object if it spans an even number of hard regs.
3108 This special case is important for SCmode on 64-bit platforms
3109 where the natural size of floating-point regs is 32-bit. */
3110 || (REG_P (cplx)
3111 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3112 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3114 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3115 imag_p ? GET_MODE_SIZE (imode) : 0);
3116 if (ret)
3117 return ret;
3118 else
3119 /* simplify_gen_subreg may fail for sub-word MEMs. */
3120 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3123 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3124 true, NULL_RTX, imode, imode);
3127 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3128 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3129 represented in NEW_MODE. If FORCE is true, this will never happen, as
3130 we'll force-create a SUBREG if needed. */
3132 static rtx
3133 emit_move_change_mode (enum machine_mode new_mode,
3134 enum machine_mode old_mode, rtx x, bool force)
3136 rtx ret;
3138 if (push_operand (x, GET_MODE (x)))
3140 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3141 MEM_COPY_ATTRIBUTES (ret, x);
3143 else if (MEM_P (x))
3145 /* We don't have to worry about changing the address since the
3146 size in bytes is supposed to be the same. */
3147 if (reload_in_progress)
3149 /* Copy the MEM to change the mode and move any
3150 substitutions from the old MEM to the new one. */
3151 ret = adjust_address_nv (x, new_mode, 0);
3152 copy_replacements (x, ret);
3154 else
3155 ret = adjust_address (x, new_mode, 0);
3157 else
3159 /* Note that we do want simplify_subreg's behavior of validating
3160 that the new mode is ok for a hard register. If we were to use
3161 simplify_gen_subreg, we would create the subreg, but would
3162 probably run into the target not being able to implement it. */
3163 /* Except, of course, when FORCE is true, when this is exactly what
3164 we want. Which is needed for CCmodes on some targets. */
3165 if (force)
3166 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3167 else
3168 ret = simplify_subreg (new_mode, x, old_mode, 0);
3171 return ret;
3174 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3175 an integer mode of the same size as MODE. Returns the instruction
3176 emitted, or NULL if such a move could not be generated. */
3178 static rtx
3179 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
3181 enum machine_mode imode;
3182 enum insn_code code;
3184 /* There must exist a mode of the exact size we require. */
3185 imode = int_mode_for_mode (mode);
3186 if (imode == BLKmode)
3187 return NULL_RTX;
3189 /* The target must support moves in this mode. */
3190 code = optab_handler (mov_optab, imode);
3191 if (code == CODE_FOR_nothing)
3192 return NULL_RTX;
3194 x = emit_move_change_mode (imode, mode, x, force);
3195 if (x == NULL_RTX)
3196 return NULL_RTX;
3197 y = emit_move_change_mode (imode, mode, y, force);
3198 if (y == NULL_RTX)
3199 return NULL_RTX;
3200 return emit_insn (GEN_FCN (code) (x, y));
3203 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3204 Return an equivalent MEM that does not use an auto-increment. */
3206 static rtx
3207 emit_move_resolve_push (enum machine_mode mode, rtx x)
3209 enum rtx_code code = GET_CODE (XEXP (x, 0));
3210 HOST_WIDE_INT adjust;
3211 rtx temp;
3213 adjust = GET_MODE_SIZE (mode);
3214 #ifdef PUSH_ROUNDING
3215 adjust = PUSH_ROUNDING (adjust);
3216 #endif
3217 if (code == PRE_DEC || code == POST_DEC)
3218 adjust = -adjust;
3219 else if (code == PRE_MODIFY || code == POST_MODIFY)
3221 rtx expr = XEXP (XEXP (x, 0), 1);
3222 HOST_WIDE_INT val;
3224 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3225 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3226 val = INTVAL (XEXP (expr, 1));
3227 if (GET_CODE (expr) == MINUS)
3228 val = -val;
3229 gcc_assert (adjust == val || adjust == -val);
3230 adjust = val;
3233 /* Do not use anti_adjust_stack, since we don't want to update
3234 stack_pointer_delta. */
3235 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3236 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3237 0, OPTAB_LIB_WIDEN);
3238 if (temp != stack_pointer_rtx)
3239 emit_move_insn (stack_pointer_rtx, temp);
3241 switch (code)
3243 case PRE_INC:
3244 case PRE_DEC:
3245 case PRE_MODIFY:
3246 temp = stack_pointer_rtx;
3247 break;
3248 case POST_INC:
3249 case POST_DEC:
3250 case POST_MODIFY:
3251 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3252 break;
3253 default:
3254 gcc_unreachable ();
3257 return replace_equiv_address (x, temp);
3260 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3261 X is known to satisfy push_operand, and MODE is known to be complex.
3262 Returns the last instruction emitted. */
3265 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3267 enum machine_mode submode = GET_MODE_INNER (mode);
3268 bool imag_first;
3270 #ifdef PUSH_ROUNDING
3271 unsigned int submodesize = GET_MODE_SIZE (submode);
3273 /* In case we output to the stack, but the size is smaller than the
3274 machine can push exactly, we need to use move instructions. */
3275 if (PUSH_ROUNDING (submodesize) != submodesize)
3277 x = emit_move_resolve_push (mode, x);
3278 return emit_move_insn (x, y);
3280 #endif
3282 /* Note that the real part always precedes the imag part in memory
3283 regardless of machine's endianness. */
3284 switch (GET_CODE (XEXP (x, 0)))
3286 case PRE_DEC:
3287 case POST_DEC:
3288 imag_first = true;
3289 break;
3290 case PRE_INC:
3291 case POST_INC:
3292 imag_first = false;
3293 break;
3294 default:
3295 gcc_unreachable ();
3298 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3299 read_complex_part (y, imag_first));
3300 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3301 read_complex_part (y, !imag_first));
3304 /* A subroutine of emit_move_complex. Perform the move from Y to X
3305 via two moves of the parts. Returns the last instruction emitted. */
3308 emit_move_complex_parts (rtx x, rtx y)
3310 /* Show the output dies here. This is necessary for SUBREGs
3311 of pseudos since we cannot track their lifetimes correctly;
3312 hard regs shouldn't appear here except as return values. */
3313 if (!reload_completed && !reload_in_progress
3314 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3315 emit_clobber (x);
3317 write_complex_part (x, read_complex_part (y, false), false);
3318 write_complex_part (x, read_complex_part (y, true), true);
3320 return get_last_insn ();
3323 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3324 MODE is known to be complex. Returns the last instruction emitted. */
3326 static rtx
3327 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3329 bool try_int;
3331 /* Need to take special care for pushes, to maintain proper ordering
3332 of the data, and possibly extra padding. */
3333 if (push_operand (x, mode))
3334 return emit_move_complex_push (mode, x, y);
3336 /* See if we can coerce the target into moving both values at once, except
3337 for floating point where we favor moving as parts if this is easy. */
3338 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3339 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3340 && !(REG_P (x)
3341 && HARD_REGISTER_P (x)
3342 && hard_regno_nregs[REGNO (x)][mode] == 1)
3343 && !(REG_P (y)
3344 && HARD_REGISTER_P (y)
3345 && hard_regno_nregs[REGNO (y)][mode] == 1))
3346 try_int = false;
3347 /* Not possible if the values are inherently not adjacent. */
3348 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3349 try_int = false;
3350 /* Is possible if both are registers (or subregs of registers). */
3351 else if (register_operand (x, mode) && register_operand (y, mode))
3352 try_int = true;
3353 /* If one of the operands is a memory, and alignment constraints
3354 are friendly enough, we may be able to do combined memory operations.
3355 We do not attempt this if Y is a constant because that combination is
3356 usually better with the by-parts thing below. */
3357 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3358 && (!STRICT_ALIGNMENT
3359 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3360 try_int = true;
3361 else
3362 try_int = false;
3364 if (try_int)
3366 rtx ret;
3368 /* For memory to memory moves, optimal behavior can be had with the
3369 existing block move logic. */
3370 if (MEM_P (x) && MEM_P (y))
3372 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3373 BLOCK_OP_NO_LIBCALL);
3374 return get_last_insn ();
3377 ret = emit_move_via_integer (mode, x, y, true);
3378 if (ret)
3379 return ret;
3382 return emit_move_complex_parts (x, y);
3385 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3386 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3388 static rtx
3389 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3391 rtx ret;
3393 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3394 if (mode != CCmode)
3396 enum insn_code code = optab_handler (mov_optab, CCmode);
3397 if (code != CODE_FOR_nothing)
3399 x = emit_move_change_mode (CCmode, mode, x, true);
3400 y = emit_move_change_mode (CCmode, mode, y, true);
3401 return emit_insn (GEN_FCN (code) (x, y));
3405 /* Otherwise, find the MODE_INT mode of the same width. */
3406 ret = emit_move_via_integer (mode, x, y, false);
3407 gcc_assert (ret != NULL);
3408 return ret;
3411 /* Return true if word I of OP lies entirely in the
3412 undefined bits of a paradoxical subreg. */
3414 static bool
3415 undefined_operand_subword_p (const_rtx op, int i)
3417 enum machine_mode innermode, innermostmode;
3418 int offset;
3419 if (GET_CODE (op) != SUBREG)
3420 return false;
3421 innermode = GET_MODE (op);
3422 innermostmode = GET_MODE (SUBREG_REG (op));
3423 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3424 /* The SUBREG_BYTE represents offset, as if the value were stored in
3425 memory, except for a paradoxical subreg where we define
3426 SUBREG_BYTE to be 0; undo this exception as in
3427 simplify_subreg. */
3428 if (SUBREG_BYTE (op) == 0
3429 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3431 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3432 if (WORDS_BIG_ENDIAN)
3433 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3434 if (BYTES_BIG_ENDIAN)
3435 offset += difference % UNITS_PER_WORD;
3437 if (offset >= GET_MODE_SIZE (innermostmode)
3438 || offset <= -GET_MODE_SIZE (word_mode))
3439 return true;
3440 return false;
3443 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3444 MODE is any multi-word or full-word mode that lacks a move_insn
3445 pattern. Note that you will get better code if you define such
3446 patterns, even if they must turn into multiple assembler instructions. */
3448 static rtx
3449 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3451 rtx last_insn = 0;
3452 rtx seq, inner;
3453 bool need_clobber;
3454 int i;
3456 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3458 /* If X is a push on the stack, do the push now and replace
3459 X with a reference to the stack pointer. */
3460 if (push_operand (x, mode))
3461 x = emit_move_resolve_push (mode, x);
3463 /* If we are in reload, see if either operand is a MEM whose address
3464 is scheduled for replacement. */
3465 if (reload_in_progress && MEM_P (x)
3466 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3467 x = replace_equiv_address_nv (x, inner);
3468 if (reload_in_progress && MEM_P (y)
3469 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3470 y = replace_equiv_address_nv (y, inner);
3472 start_sequence ();
3474 need_clobber = false;
3475 for (i = 0;
3476 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3477 i++)
3479 rtx xpart = operand_subword (x, i, 1, mode);
3480 rtx ypart;
3482 /* Do not generate code for a move if it would come entirely
3483 from the undefined bits of a paradoxical subreg. */
3484 if (undefined_operand_subword_p (y, i))
3485 continue;
3487 ypart = operand_subword (y, i, 1, mode);
3489 /* If we can't get a part of Y, put Y into memory if it is a
3490 constant. Otherwise, force it into a register. Then we must
3491 be able to get a part of Y. */
3492 if (ypart == 0 && CONSTANT_P (y))
3494 y = use_anchored_address (force_const_mem (mode, y));
3495 ypart = operand_subword (y, i, 1, mode);
3497 else if (ypart == 0)
3498 ypart = operand_subword_force (y, i, mode);
3500 gcc_assert (xpart && ypart);
3502 need_clobber |= (GET_CODE (xpart) == SUBREG);
3504 last_insn = emit_move_insn (xpart, ypart);
3507 seq = get_insns ();
3508 end_sequence ();
3510 /* Show the output dies here. This is necessary for SUBREGs
3511 of pseudos since we cannot track their lifetimes correctly;
3512 hard regs shouldn't appear here except as return values.
3513 We never want to emit such a clobber after reload. */
3514 if (x != y
3515 && ! (reload_in_progress || reload_completed)
3516 && need_clobber != 0)
3517 emit_clobber (x);
3519 emit_insn (seq);
3521 return last_insn;
3524 /* Low level part of emit_move_insn.
3525 Called just like emit_move_insn, but assumes X and Y
3526 are basically valid. */
3529 emit_move_insn_1 (rtx x, rtx y)
3531 enum machine_mode mode = GET_MODE (x);
3532 enum insn_code code;
3534 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3536 code = optab_handler (mov_optab, mode);
3537 if (code != CODE_FOR_nothing)
3538 return emit_insn (GEN_FCN (code) (x, y));
3540 /* Expand complex moves by moving real part and imag part. */
3541 if (COMPLEX_MODE_P (mode))
3542 return emit_move_complex (mode, x, y);
3544 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3545 || ALL_FIXED_POINT_MODE_P (mode))
3547 rtx result = emit_move_via_integer (mode, x, y, true);
3549 /* If we can't find an integer mode, use multi words. */
3550 if (result)
3551 return result;
3552 else
3553 return emit_move_multi_word (mode, x, y);
3556 if (GET_MODE_CLASS (mode) == MODE_CC)
3557 return emit_move_ccmode (mode, x, y);
3559 /* Try using a move pattern for the corresponding integer mode. This is
3560 only safe when simplify_subreg can convert MODE constants into integer
3561 constants. At present, it can only do this reliably if the value
3562 fits within a HOST_WIDE_INT. */
3563 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3565 rtx ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3567 if (ret)
3569 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3570 return ret;
3574 return emit_move_multi_word (mode, x, y);
3577 /* Generate code to copy Y into X.
3578 Both Y and X must have the same mode, except that
3579 Y can be a constant with VOIDmode.
3580 This mode cannot be BLKmode; use emit_block_move for that.
3582 Return the last instruction emitted. */
3585 emit_move_insn (rtx x, rtx y)
3587 enum machine_mode mode = GET_MODE (x);
3588 rtx y_cst = NULL_RTX;
3589 rtx last_insn, set;
3591 gcc_assert (mode != BLKmode
3592 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3594 if (CONSTANT_P (y))
3596 if (optimize
3597 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3598 && (last_insn = compress_float_constant (x, y)))
3599 return last_insn;
3601 y_cst = y;
3603 if (!targetm.legitimate_constant_p (mode, y))
3605 y = force_const_mem (mode, y);
3607 /* If the target's cannot_force_const_mem prevented the spill,
3608 assume that the target's move expanders will also take care
3609 of the non-legitimate constant. */
3610 if (!y)
3611 y = y_cst;
3612 else
3613 y = use_anchored_address (y);
3617 /* If X or Y are memory references, verify that their addresses are valid
3618 for the machine. */
3619 if (MEM_P (x)
3620 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3621 MEM_ADDR_SPACE (x))
3622 && ! push_operand (x, GET_MODE (x))))
3623 x = validize_mem (x);
3625 if (MEM_P (y)
3626 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3627 MEM_ADDR_SPACE (y)))
3628 y = validize_mem (y);
3630 gcc_assert (mode != BLKmode);
3632 last_insn = emit_move_insn_1 (x, y);
3634 if (y_cst && REG_P (x)
3635 && (set = single_set (last_insn)) != NULL_RTX
3636 && SET_DEST (set) == x
3637 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3638 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3640 return last_insn;
3643 /* If Y is representable exactly in a narrower mode, and the target can
3644 perform the extension directly from constant or memory, then emit the
3645 move as an extension. */
3647 static rtx
3648 compress_float_constant (rtx x, rtx y)
3650 enum machine_mode dstmode = GET_MODE (x);
3651 enum machine_mode orig_srcmode = GET_MODE (y);
3652 enum machine_mode srcmode;
3653 REAL_VALUE_TYPE r;
3654 int oldcost, newcost;
3655 bool speed = optimize_insn_for_speed_p ();
3657 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3659 if (targetm.legitimate_constant_p (dstmode, y))
3660 oldcost = set_src_cost (y, speed);
3661 else
3662 oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3664 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3665 srcmode != orig_srcmode;
3666 srcmode = GET_MODE_WIDER_MODE (srcmode))
3668 enum insn_code ic;
3669 rtx trunc_y, last_insn;
3671 /* Skip if the target can't extend this way. */
3672 ic = can_extend_p (dstmode, srcmode, 0);
3673 if (ic == CODE_FOR_nothing)
3674 continue;
3676 /* Skip if the narrowed value isn't exact. */
3677 if (! exact_real_truncate (srcmode, &r))
3678 continue;
3680 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3682 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3684 /* Skip if the target needs extra instructions to perform
3685 the extension. */
3686 if (!insn_operand_matches (ic, 1, trunc_y))
3687 continue;
3688 /* This is valid, but may not be cheaper than the original. */
3689 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3690 speed);
3691 if (oldcost < newcost)
3692 continue;
3694 else if (float_extend_from_mem[dstmode][srcmode])
3696 trunc_y = force_const_mem (srcmode, trunc_y);
3697 /* This is valid, but may not be cheaper than the original. */
3698 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3699 speed);
3700 if (oldcost < newcost)
3701 continue;
3702 trunc_y = validize_mem (trunc_y);
3704 else
3705 continue;
3707 /* For CSE's benefit, force the compressed constant pool entry
3708 into a new pseudo. This constant may be used in different modes,
3709 and if not, combine will put things back together for us. */
3710 trunc_y = force_reg (srcmode, trunc_y);
3711 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3712 last_insn = get_last_insn ();
3714 if (REG_P (x))
3715 set_unique_reg_note (last_insn, REG_EQUAL, y);
3717 return last_insn;
3720 return NULL_RTX;
3723 /* Pushing data onto the stack. */
3725 /* Push a block of length SIZE (perhaps variable)
3726 and return an rtx to address the beginning of the block.
3727 The value may be virtual_outgoing_args_rtx.
3729 EXTRA is the number of bytes of padding to push in addition to SIZE.
3730 BELOW nonzero means this padding comes at low addresses;
3731 otherwise, the padding comes at high addresses. */
3734 push_block (rtx size, int extra, int below)
3736 rtx temp;
3738 size = convert_modes (Pmode, ptr_mode, size, 1);
3739 if (CONSTANT_P (size))
3740 anti_adjust_stack (plus_constant (Pmode, size, extra));
3741 else if (REG_P (size) && extra == 0)
3742 anti_adjust_stack (size);
3743 else
3745 temp = copy_to_mode_reg (Pmode, size);
3746 if (extra != 0)
3747 temp = expand_binop (Pmode, add_optab, temp,
3748 gen_int_mode (extra, Pmode),
3749 temp, 0, OPTAB_LIB_WIDEN);
3750 anti_adjust_stack (temp);
3753 #ifndef STACK_GROWS_DOWNWARD
3754 if (0)
3755 #else
3756 if (1)
3757 #endif
3759 temp = virtual_outgoing_args_rtx;
3760 if (extra != 0 && below)
3761 temp = plus_constant (Pmode, temp, extra);
3763 else
3765 if (CONST_INT_P (size))
3766 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3767 -INTVAL (size) - (below ? 0 : extra));
3768 else if (extra != 0 && !below)
3769 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3770 negate_rtx (Pmode, plus_constant (Pmode, size,
3771 extra)));
3772 else
3773 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3774 negate_rtx (Pmode, size));
3777 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3780 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3782 static rtx
3783 mem_autoinc_base (rtx mem)
3785 if (MEM_P (mem))
3787 rtx addr = XEXP (mem, 0);
3788 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3789 return XEXP (addr, 0);
3791 return NULL;
3794 /* A utility routine used here, in reload, and in try_split. The insns
3795 after PREV up to and including LAST are known to adjust the stack,
3796 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3797 placing notes as appropriate. PREV may be NULL, indicating the
3798 entire insn sequence prior to LAST should be scanned.
3800 The set of allowed stack pointer modifications is small:
3801 (1) One or more auto-inc style memory references (aka pushes),
3802 (2) One or more addition/subtraction with the SP as destination,
3803 (3) A single move insn with the SP as destination,
3804 (4) A call_pop insn,
3805 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3807 Insns in the sequence that do not modify the SP are ignored,
3808 except for noreturn calls.
3810 The return value is the amount of adjustment that can be trivially
3811 verified, via immediate operand or auto-inc. If the adjustment
3812 cannot be trivially extracted, the return value is INT_MIN. */
3814 HOST_WIDE_INT
3815 find_args_size_adjust (rtx insn)
3817 rtx dest, set, pat;
3818 int i;
3820 pat = PATTERN (insn);
3821 set = NULL;
3823 /* Look for a call_pop pattern. */
3824 if (CALL_P (insn))
3826 /* We have to allow non-call_pop patterns for the case
3827 of emit_single_push_insn of a TLS address. */
3828 if (GET_CODE (pat) != PARALLEL)
3829 return 0;
3831 /* All call_pop have a stack pointer adjust in the parallel.
3832 The call itself is always first, and the stack adjust is
3833 usually last, so search from the end. */
3834 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3836 set = XVECEXP (pat, 0, i);
3837 if (GET_CODE (set) != SET)
3838 continue;
3839 dest = SET_DEST (set);
3840 if (dest == stack_pointer_rtx)
3841 break;
3843 /* We'd better have found the stack pointer adjust. */
3844 if (i == 0)
3845 return 0;
3846 /* Fall through to process the extracted SET and DEST
3847 as if it was a standalone insn. */
3849 else if (GET_CODE (pat) == SET)
3850 set = pat;
3851 else if ((set = single_set (insn)) != NULL)
3853 else if (GET_CODE (pat) == PARALLEL)
3855 /* ??? Some older ports use a parallel with a stack adjust
3856 and a store for a PUSH_ROUNDING pattern, rather than a
3857 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3858 /* ??? See h8300 and m68k, pushqi1. */
3859 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3861 set = XVECEXP (pat, 0, i);
3862 if (GET_CODE (set) != SET)
3863 continue;
3864 dest = SET_DEST (set);
3865 if (dest == stack_pointer_rtx)
3866 break;
3868 /* We do not expect an auto-inc of the sp in the parallel. */
3869 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3870 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3871 != stack_pointer_rtx);
3873 if (i < 0)
3874 return 0;
3876 else
3877 return 0;
3879 dest = SET_DEST (set);
3881 /* Look for direct modifications of the stack pointer. */
3882 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3884 /* Look for a trivial adjustment, otherwise assume nothing. */
3885 /* Note that the SPU restore_stack_block pattern refers to
3886 the stack pointer in V4SImode. Consider that non-trivial. */
3887 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3888 && GET_CODE (SET_SRC (set)) == PLUS
3889 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3890 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3891 return INTVAL (XEXP (SET_SRC (set), 1));
3892 /* ??? Reload can generate no-op moves, which will be cleaned
3893 up later. Recognize it and continue searching. */
3894 else if (rtx_equal_p (dest, SET_SRC (set)))
3895 return 0;
3896 else
3897 return HOST_WIDE_INT_MIN;
3899 else
3901 rtx mem, addr;
3903 /* Otherwise only think about autoinc patterns. */
3904 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3906 mem = dest;
3907 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3908 != stack_pointer_rtx);
3910 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3911 mem = SET_SRC (set);
3912 else
3913 return 0;
3915 addr = XEXP (mem, 0);
3916 switch (GET_CODE (addr))
3918 case PRE_INC:
3919 case POST_INC:
3920 return GET_MODE_SIZE (GET_MODE (mem));
3921 case PRE_DEC:
3922 case POST_DEC:
3923 return -GET_MODE_SIZE (GET_MODE (mem));
3924 case PRE_MODIFY:
3925 case POST_MODIFY:
3926 addr = XEXP (addr, 1);
3927 gcc_assert (GET_CODE (addr) == PLUS);
3928 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3929 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3930 return INTVAL (XEXP (addr, 1));
3931 default:
3932 gcc_unreachable ();
3938 fixup_args_size_notes (rtx prev, rtx last, int end_args_size)
3940 int args_size = end_args_size;
3941 bool saw_unknown = false;
3942 rtx insn;
3944 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3946 HOST_WIDE_INT this_delta;
3948 if (!NONDEBUG_INSN_P (insn))
3949 continue;
3951 this_delta = find_args_size_adjust (insn);
3952 if (this_delta == 0)
3954 if (!CALL_P (insn)
3955 || ACCUMULATE_OUTGOING_ARGS
3956 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3957 continue;
3960 gcc_assert (!saw_unknown);
3961 if (this_delta == HOST_WIDE_INT_MIN)
3962 saw_unknown = true;
3964 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3965 #ifdef STACK_GROWS_DOWNWARD
3966 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
3967 #endif
3968 args_size -= this_delta;
3971 return saw_unknown ? INT_MIN : args_size;
3974 #ifdef PUSH_ROUNDING
3975 /* Emit single push insn. */
3977 static void
3978 emit_single_push_insn_1 (enum machine_mode mode, rtx x, tree type)
3980 rtx dest_addr;
3981 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3982 rtx dest;
3983 enum insn_code icode;
3985 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3986 /* If there is push pattern, use it. Otherwise try old way of throwing
3987 MEM representing push operation to move expander. */
3988 icode = optab_handler (push_optab, mode);
3989 if (icode != CODE_FOR_nothing)
3991 struct expand_operand ops[1];
3993 create_input_operand (&ops[0], x, mode);
3994 if (maybe_expand_insn (icode, 1, ops))
3995 return;
3997 if (GET_MODE_SIZE (mode) == rounded_size)
3998 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3999 /* If we are to pad downward, adjust the stack pointer first and
4000 then store X into the stack location using an offset. This is
4001 because emit_move_insn does not know how to pad; it does not have
4002 access to type. */
4003 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
4005 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
4006 HOST_WIDE_INT offset;
4008 emit_move_insn (stack_pointer_rtx,
4009 expand_binop (Pmode,
4010 #ifdef STACK_GROWS_DOWNWARD
4011 sub_optab,
4012 #else
4013 add_optab,
4014 #endif
4015 stack_pointer_rtx,
4016 gen_int_mode (rounded_size, Pmode),
4017 NULL_RTX, 0, OPTAB_LIB_WIDEN));
4019 offset = (HOST_WIDE_INT) padding_size;
4020 #ifdef STACK_GROWS_DOWNWARD
4021 if (STACK_PUSH_CODE == POST_DEC)
4022 /* We have already decremented the stack pointer, so get the
4023 previous value. */
4024 offset += (HOST_WIDE_INT) rounded_size;
4025 #else
4026 if (STACK_PUSH_CODE == POST_INC)
4027 /* We have already incremented the stack pointer, so get the
4028 previous value. */
4029 offset -= (HOST_WIDE_INT) rounded_size;
4030 #endif
4031 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4032 gen_int_mode (offset, Pmode));
4034 else
4036 #ifdef STACK_GROWS_DOWNWARD
4037 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4038 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4039 gen_int_mode (-(HOST_WIDE_INT) rounded_size,
4040 Pmode));
4041 #else
4042 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4043 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4044 gen_int_mode (rounded_size, Pmode));
4045 #endif
4046 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4049 dest = gen_rtx_MEM (mode, dest_addr);
4051 if (type != 0)
4053 set_mem_attributes (dest, type, 1);
4055 if (flag_optimize_sibling_calls)
4056 /* Function incoming arguments may overlap with sibling call
4057 outgoing arguments and we cannot allow reordering of reads
4058 from function arguments with stores to outgoing arguments
4059 of sibling calls. */
4060 set_mem_alias_set (dest, 0);
4062 emit_move_insn (dest, x);
4065 /* Emit and annotate a single push insn. */
4067 static void
4068 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
4070 int delta, old_delta = stack_pointer_delta;
4071 rtx prev = get_last_insn ();
4072 rtx last;
4074 emit_single_push_insn_1 (mode, x, type);
4076 last = get_last_insn ();
4078 /* Notice the common case where we emitted exactly one insn. */
4079 if (PREV_INSN (last) == prev)
4081 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4082 return;
4085 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4086 gcc_assert (delta == INT_MIN || delta == old_delta);
4088 #endif
4090 /* Generate code to push X onto the stack, assuming it has mode MODE and
4091 type TYPE.
4092 MODE is redundant except when X is a CONST_INT (since they don't
4093 carry mode info).
4094 SIZE is an rtx for the size of data to be copied (in bytes),
4095 needed only if X is BLKmode.
4097 ALIGN (in bits) is maximum alignment we can assume.
4099 If PARTIAL and REG are both nonzero, then copy that many of the first
4100 bytes of X into registers starting with REG, and push the rest of X.
4101 The amount of space pushed is decreased by PARTIAL bytes.
4102 REG must be a hard register in this case.
4103 If REG is zero but PARTIAL is not, take any all others actions for an
4104 argument partially in registers, but do not actually load any
4105 registers.
4107 EXTRA is the amount in bytes of extra space to leave next to this arg.
4108 This is ignored if an argument block has already been allocated.
4110 On a machine that lacks real push insns, ARGS_ADDR is the address of
4111 the bottom of the argument block for this call. We use indexing off there
4112 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4113 argument block has not been preallocated.
4115 ARGS_SO_FAR is the size of args previously pushed for this call.
4117 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4118 for arguments passed in registers. If nonzero, it will be the number
4119 of bytes required. */
4121 void
4122 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
4123 unsigned int align, int partial, rtx reg, int extra,
4124 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4125 rtx alignment_pad)
4127 rtx xinner;
4128 enum direction stack_direction
4129 #ifdef STACK_GROWS_DOWNWARD
4130 = downward;
4131 #else
4132 = upward;
4133 #endif
4135 /* Decide where to pad the argument: `downward' for below,
4136 `upward' for above, or `none' for don't pad it.
4137 Default is below for small data on big-endian machines; else above. */
4138 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4140 /* Invert direction if stack is post-decrement.
4141 FIXME: why? */
4142 if (STACK_PUSH_CODE == POST_DEC)
4143 if (where_pad != none)
4144 where_pad = (where_pad == downward ? upward : downward);
4146 xinner = x;
4148 if (mode == BLKmode
4149 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4151 /* Copy a block into the stack, entirely or partially. */
4153 rtx temp;
4154 int used;
4155 int offset;
4156 int skip;
4158 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4159 used = partial - offset;
4161 if (mode != BLKmode)
4163 /* A value is to be stored in an insufficiently aligned
4164 stack slot; copy via a suitably aligned slot if
4165 necessary. */
4166 size = GEN_INT (GET_MODE_SIZE (mode));
4167 if (!MEM_P (xinner))
4169 temp = assign_temp (type, 1, 1);
4170 emit_move_insn (temp, xinner);
4171 xinner = temp;
4175 gcc_assert (size);
4177 /* USED is now the # of bytes we need not copy to the stack
4178 because registers will take care of them. */
4180 if (partial != 0)
4181 xinner = adjust_address (xinner, BLKmode, used);
4183 /* If the partial register-part of the arg counts in its stack size,
4184 skip the part of stack space corresponding to the registers.
4185 Otherwise, start copying to the beginning of the stack space,
4186 by setting SKIP to 0. */
4187 skip = (reg_parm_stack_space == 0) ? 0 : used;
4189 #ifdef PUSH_ROUNDING
4190 /* Do it with several push insns if that doesn't take lots of insns
4191 and if there is no difficulty with push insns that skip bytes
4192 on the stack for alignment purposes. */
4193 if (args_addr == 0
4194 && PUSH_ARGS
4195 && CONST_INT_P (size)
4196 && skip == 0
4197 && MEM_ALIGN (xinner) >= align
4198 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
4199 /* Here we avoid the case of a structure whose weak alignment
4200 forces many pushes of a small amount of data,
4201 and such small pushes do rounding that causes trouble. */
4202 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4203 || align >= BIGGEST_ALIGNMENT
4204 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4205 == (align / BITS_PER_UNIT)))
4206 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4208 /* Push padding now if padding above and stack grows down,
4209 or if padding below and stack grows up.
4210 But if space already allocated, this has already been done. */
4211 if (extra && args_addr == 0
4212 && where_pad != none && where_pad != stack_direction)
4213 anti_adjust_stack (GEN_INT (extra));
4215 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4217 else
4218 #endif /* PUSH_ROUNDING */
4220 rtx target;
4222 /* Otherwise make space on the stack and copy the data
4223 to the address of that space. */
4225 /* Deduct words put into registers from the size we must copy. */
4226 if (partial != 0)
4228 if (CONST_INT_P (size))
4229 size = GEN_INT (INTVAL (size) - used);
4230 else
4231 size = expand_binop (GET_MODE (size), sub_optab, size,
4232 gen_int_mode (used, GET_MODE (size)),
4233 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4236 /* Get the address of the stack space.
4237 In this case, we do not deal with EXTRA separately.
4238 A single stack adjust will do. */
4239 if (! args_addr)
4241 temp = push_block (size, extra, where_pad == downward);
4242 extra = 0;
4244 else if (CONST_INT_P (args_so_far))
4245 temp = memory_address (BLKmode,
4246 plus_constant (Pmode, args_addr,
4247 skip + INTVAL (args_so_far)));
4248 else
4249 temp = memory_address (BLKmode,
4250 plus_constant (Pmode,
4251 gen_rtx_PLUS (Pmode,
4252 args_addr,
4253 args_so_far),
4254 skip));
4256 if (!ACCUMULATE_OUTGOING_ARGS)
4258 /* If the source is referenced relative to the stack pointer,
4259 copy it to another register to stabilize it. We do not need
4260 to do this if we know that we won't be changing sp. */
4262 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4263 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4264 temp = copy_to_reg (temp);
4267 target = gen_rtx_MEM (BLKmode, temp);
4269 /* We do *not* set_mem_attributes here, because incoming arguments
4270 may overlap with sibling call outgoing arguments and we cannot
4271 allow reordering of reads from function arguments with stores
4272 to outgoing arguments of sibling calls. We do, however, want
4273 to record the alignment of the stack slot. */
4274 /* ALIGN may well be better aligned than TYPE, e.g. due to
4275 PARM_BOUNDARY. Assume the caller isn't lying. */
4276 set_mem_align (target, align);
4278 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4281 else if (partial > 0)
4283 /* Scalar partly in registers. */
4285 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4286 int i;
4287 int not_stack;
4288 /* # bytes of start of argument
4289 that we must make space for but need not store. */
4290 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4291 int args_offset = INTVAL (args_so_far);
4292 int skip;
4294 /* Push padding now if padding above and stack grows down,
4295 or if padding below and stack grows up.
4296 But if space already allocated, this has already been done. */
4297 if (extra && args_addr == 0
4298 && where_pad != none && where_pad != stack_direction)
4299 anti_adjust_stack (GEN_INT (extra));
4301 /* If we make space by pushing it, we might as well push
4302 the real data. Otherwise, we can leave OFFSET nonzero
4303 and leave the space uninitialized. */
4304 if (args_addr == 0)
4305 offset = 0;
4307 /* Now NOT_STACK gets the number of words that we don't need to
4308 allocate on the stack. Convert OFFSET to words too. */
4309 not_stack = (partial - offset) / UNITS_PER_WORD;
4310 offset /= UNITS_PER_WORD;
4312 /* If the partial register-part of the arg counts in its stack size,
4313 skip the part of stack space corresponding to the registers.
4314 Otherwise, start copying to the beginning of the stack space,
4315 by setting SKIP to 0. */
4316 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4318 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4319 x = validize_mem (force_const_mem (mode, x));
4321 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4322 SUBREGs of such registers are not allowed. */
4323 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4324 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4325 x = copy_to_reg (x);
4327 /* Loop over all the words allocated on the stack for this arg. */
4328 /* We can do it by words, because any scalar bigger than a word
4329 has a size a multiple of a word. */
4330 #ifndef PUSH_ARGS_REVERSED
4331 for (i = not_stack; i < size; i++)
4332 #else
4333 for (i = size - 1; i >= not_stack; i--)
4334 #endif
4335 if (i >= not_stack + offset)
4336 emit_push_insn (operand_subword_force (x, i, mode),
4337 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4338 0, args_addr,
4339 GEN_INT (args_offset + ((i - not_stack + skip)
4340 * UNITS_PER_WORD)),
4341 reg_parm_stack_space, alignment_pad);
4343 else
4345 rtx addr;
4346 rtx dest;
4348 /* Push padding now if padding above and stack grows down,
4349 or if padding below and stack grows up.
4350 But if space already allocated, this has already been done. */
4351 if (extra && args_addr == 0
4352 && where_pad != none && where_pad != stack_direction)
4353 anti_adjust_stack (GEN_INT (extra));
4355 #ifdef PUSH_ROUNDING
4356 if (args_addr == 0 && PUSH_ARGS)
4357 emit_single_push_insn (mode, x, type);
4358 else
4359 #endif
4361 if (CONST_INT_P (args_so_far))
4362 addr
4363 = memory_address (mode,
4364 plus_constant (Pmode, args_addr,
4365 INTVAL (args_so_far)));
4366 else
4367 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4368 args_so_far));
4369 dest = gen_rtx_MEM (mode, addr);
4371 /* We do *not* set_mem_attributes here, because incoming arguments
4372 may overlap with sibling call outgoing arguments and we cannot
4373 allow reordering of reads from function arguments with stores
4374 to outgoing arguments of sibling calls. We do, however, want
4375 to record the alignment of the stack slot. */
4376 /* ALIGN may well be better aligned than TYPE, e.g. due to
4377 PARM_BOUNDARY. Assume the caller isn't lying. */
4378 set_mem_align (dest, align);
4380 emit_move_insn (dest, x);
4384 /* If part should go in registers, copy that part
4385 into the appropriate registers. Do this now, at the end,
4386 since mem-to-mem copies above may do function calls. */
4387 if (partial > 0 && reg != 0)
4389 /* Handle calls that pass values in multiple non-contiguous locations.
4390 The Irix 6 ABI has examples of this. */
4391 if (GET_CODE (reg) == PARALLEL)
4392 emit_group_load (reg, x, type, -1);
4393 else
4395 gcc_assert (partial % UNITS_PER_WORD == 0);
4396 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4400 if (extra && args_addr == 0 && where_pad == stack_direction)
4401 anti_adjust_stack (GEN_INT (extra));
4403 if (alignment_pad && args_addr == 0)
4404 anti_adjust_stack (alignment_pad);
4407 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4408 operations. */
4410 static rtx
4411 get_subtarget (rtx x)
4413 return (optimize
4414 || x == 0
4415 /* Only registers can be subtargets. */
4416 || !REG_P (x)
4417 /* Don't use hard regs to avoid extending their life. */
4418 || REGNO (x) < FIRST_PSEUDO_REGISTER
4419 ? 0 : x);
4422 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4423 FIELD is a bitfield. Returns true if the optimization was successful,
4424 and there's nothing else to do. */
4426 static bool
4427 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4428 unsigned HOST_WIDE_INT bitpos,
4429 unsigned HOST_WIDE_INT bitregion_start,
4430 unsigned HOST_WIDE_INT bitregion_end,
4431 enum machine_mode mode1, rtx str_rtx,
4432 tree to, tree src)
4434 enum machine_mode str_mode = GET_MODE (str_rtx);
4435 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4436 tree op0, op1;
4437 rtx value, result;
4438 optab binop;
4439 gimple srcstmt;
4440 enum tree_code code;
4442 if (mode1 != VOIDmode
4443 || bitsize >= BITS_PER_WORD
4444 || str_bitsize > BITS_PER_WORD
4445 || TREE_SIDE_EFFECTS (to)
4446 || TREE_THIS_VOLATILE (to))
4447 return false;
4449 STRIP_NOPS (src);
4450 if (TREE_CODE (src) != SSA_NAME)
4451 return false;
4452 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4453 return false;
4455 srcstmt = get_gimple_for_ssa_name (src);
4456 if (!srcstmt
4457 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4458 return false;
4460 code = gimple_assign_rhs_code (srcstmt);
4462 op0 = gimple_assign_rhs1 (srcstmt);
4464 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4465 to find its initialization. Hopefully the initialization will
4466 be from a bitfield load. */
4467 if (TREE_CODE (op0) == SSA_NAME)
4469 gimple op0stmt = get_gimple_for_ssa_name (op0);
4471 /* We want to eventually have OP0 be the same as TO, which
4472 should be a bitfield. */
4473 if (!op0stmt
4474 || !is_gimple_assign (op0stmt)
4475 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4476 return false;
4477 op0 = gimple_assign_rhs1 (op0stmt);
4480 op1 = gimple_assign_rhs2 (srcstmt);
4482 if (!operand_equal_p (to, op0, 0))
4483 return false;
4485 if (MEM_P (str_rtx))
4487 unsigned HOST_WIDE_INT offset1;
4489 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4490 str_mode = word_mode;
4491 str_mode = get_best_mode (bitsize, bitpos,
4492 bitregion_start, bitregion_end,
4493 MEM_ALIGN (str_rtx), str_mode, 0);
4494 if (str_mode == VOIDmode)
4495 return false;
4496 str_bitsize = GET_MODE_BITSIZE (str_mode);
4498 offset1 = bitpos;
4499 bitpos %= str_bitsize;
4500 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4501 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4503 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4504 return false;
4506 /* If the bit field covers the whole REG/MEM, store_field
4507 will likely generate better code. */
4508 if (bitsize >= str_bitsize)
4509 return false;
4511 /* We can't handle fields split across multiple entities. */
4512 if (bitpos + bitsize > str_bitsize)
4513 return false;
4515 if (BYTES_BIG_ENDIAN)
4516 bitpos = str_bitsize - bitpos - bitsize;
4518 switch (code)
4520 case PLUS_EXPR:
4521 case MINUS_EXPR:
4522 /* For now, just optimize the case of the topmost bitfield
4523 where we don't need to do any masking and also
4524 1 bit bitfields where xor can be used.
4525 We might win by one instruction for the other bitfields
4526 too if insv/extv instructions aren't used, so that
4527 can be added later. */
4528 if (bitpos + bitsize != str_bitsize
4529 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4530 break;
4532 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4533 value = convert_modes (str_mode,
4534 TYPE_MODE (TREE_TYPE (op1)), value,
4535 TYPE_UNSIGNED (TREE_TYPE (op1)));
4537 /* We may be accessing data outside the field, which means
4538 we can alias adjacent data. */
4539 if (MEM_P (str_rtx))
4541 str_rtx = shallow_copy_rtx (str_rtx);
4542 set_mem_alias_set (str_rtx, 0);
4543 set_mem_expr (str_rtx, 0);
4546 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4547 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4549 value = expand_and (str_mode, value, const1_rtx, NULL);
4550 binop = xor_optab;
4552 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4553 result = expand_binop (str_mode, binop, str_rtx,
4554 value, str_rtx, 1, OPTAB_WIDEN);
4555 if (result != str_rtx)
4556 emit_move_insn (str_rtx, result);
4557 return true;
4559 case BIT_IOR_EXPR:
4560 case BIT_XOR_EXPR:
4561 if (TREE_CODE (op1) != INTEGER_CST)
4562 break;
4563 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4564 value = convert_modes (str_mode,
4565 TYPE_MODE (TREE_TYPE (op1)), value,
4566 TYPE_UNSIGNED (TREE_TYPE (op1)));
4568 /* We may be accessing data outside the field, which means
4569 we can alias adjacent data. */
4570 if (MEM_P (str_rtx))
4572 str_rtx = shallow_copy_rtx (str_rtx);
4573 set_mem_alias_set (str_rtx, 0);
4574 set_mem_expr (str_rtx, 0);
4577 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4578 if (bitpos + bitsize != str_bitsize)
4580 rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4581 str_mode);
4582 value = expand_and (str_mode, value, mask, NULL_RTX);
4584 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4585 result = expand_binop (str_mode, binop, str_rtx,
4586 value, str_rtx, 1, OPTAB_WIDEN);
4587 if (result != str_rtx)
4588 emit_move_insn (str_rtx, result);
4589 return true;
4591 default:
4592 break;
4595 return false;
4598 /* In the C++ memory model, consecutive bit fields in a structure are
4599 considered one memory location.
4601 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4602 returns the bit range of consecutive bits in which this COMPONENT_REF
4603 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4604 and *OFFSET may be adjusted in the process.
4606 If the access does not need to be restricted, 0 is returned in both
4607 *BITSTART and *BITEND. */
4609 static void
4610 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4611 unsigned HOST_WIDE_INT *bitend,
4612 tree exp,
4613 HOST_WIDE_INT *bitpos,
4614 tree *offset)
4616 HOST_WIDE_INT bitoffset;
4617 tree field, repr;
4619 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4621 field = TREE_OPERAND (exp, 1);
4622 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4623 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4624 need to limit the range we can access. */
4625 if (!repr)
4627 *bitstart = *bitend = 0;
4628 return;
4631 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4632 part of a larger bit field, then the representative does not serve any
4633 useful purpose. This can occur in Ada. */
4634 if (handled_component_p (TREE_OPERAND (exp, 0)))
4636 enum machine_mode rmode;
4637 HOST_WIDE_INT rbitsize, rbitpos;
4638 tree roffset;
4639 int unsignedp;
4640 int volatilep = 0;
4641 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4642 &roffset, &rmode, &unsignedp, &volatilep, false);
4643 if ((rbitpos % BITS_PER_UNIT) != 0)
4645 *bitstart = *bitend = 0;
4646 return;
4650 /* Compute the adjustment to bitpos from the offset of the field
4651 relative to the representative. DECL_FIELD_OFFSET of field and
4652 repr are the same by construction if they are not constants,
4653 see finish_bitfield_layout. */
4654 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
4655 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
4656 bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
4657 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
4658 else
4659 bitoffset = 0;
4660 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4661 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4663 /* If the adjustment is larger than bitpos, we would have a negative bit
4664 position for the lower bound and this may wreak havoc later. Adjust
4665 offset and bitpos to make the lower bound non-negative in that case. */
4666 if (bitoffset > *bitpos)
4668 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4669 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4671 *bitpos += adjust;
4672 if (*offset == NULL_TREE)
4673 *offset = size_int (-adjust / BITS_PER_UNIT);
4674 else
4675 *offset
4676 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4677 *bitstart = 0;
4679 else
4680 *bitstart = *bitpos - bitoffset;
4682 *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4685 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4686 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4687 DECL_RTL was not set yet, return NORTL. */
4689 static inline bool
4690 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4692 if (TREE_CODE (addr) != ADDR_EXPR)
4693 return false;
4695 tree base = TREE_OPERAND (addr, 0);
4697 if (!DECL_P (base)
4698 || TREE_ADDRESSABLE (base)
4699 || DECL_MODE (base) == BLKmode)
4700 return false;
4702 if (!DECL_RTL_SET_P (base))
4703 return nortl;
4705 return (!MEM_P (DECL_RTL (base)));
4708 /* Returns true if the MEM_REF REF refers to an object that does not
4709 reside in memory and has non-BLKmode. */
4711 static inline bool
4712 mem_ref_refers_to_non_mem_p (tree ref)
4714 tree base = TREE_OPERAND (ref, 0);
4715 return addr_expr_of_non_mem_decl_p_1 (base, false);
4718 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4719 is true, try generating a nontemporal store. */
4721 void
4722 expand_assignment (tree to, tree from, bool nontemporal)
4724 rtx to_rtx = 0;
4725 rtx result;
4726 enum machine_mode mode;
4727 unsigned int align;
4728 enum insn_code icode;
4730 /* Don't crash if the lhs of the assignment was erroneous. */
4731 if (TREE_CODE (to) == ERROR_MARK)
4733 expand_normal (from);
4734 return;
4737 /* Optimize away no-op moves without side-effects. */
4738 if (operand_equal_p (to, from, 0))
4739 return;
4741 /* Handle misaligned stores. */
4742 mode = TYPE_MODE (TREE_TYPE (to));
4743 if ((TREE_CODE (to) == MEM_REF
4744 || TREE_CODE (to) == TARGET_MEM_REF)
4745 && mode != BLKmode
4746 && !mem_ref_refers_to_non_mem_p (to)
4747 && ((align = get_object_alignment (to))
4748 < GET_MODE_ALIGNMENT (mode))
4749 && (((icode = optab_handler (movmisalign_optab, mode))
4750 != CODE_FOR_nothing)
4751 || SLOW_UNALIGNED_ACCESS (mode, align)))
4753 rtx reg, mem;
4755 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4756 reg = force_not_mem (reg);
4757 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4759 if (icode != CODE_FOR_nothing)
4761 struct expand_operand ops[2];
4763 create_fixed_operand (&ops[0], mem);
4764 create_input_operand (&ops[1], reg, mode);
4765 /* The movmisalign<mode> pattern cannot fail, else the assignment
4766 would silently be omitted. */
4767 expand_insn (icode, 2, ops);
4769 else
4770 store_bit_field (mem, GET_MODE_BITSIZE (mode),
4771 0, 0, 0, mode, reg);
4772 return;
4775 /* Assignment of a structure component needs special treatment
4776 if the structure component's rtx is not simply a MEM.
4777 Assignment of an array element at a constant index, and assignment of
4778 an array element in an unaligned packed structure field, has the same
4779 problem. Same for (partially) storing into a non-memory object. */
4780 if (handled_component_p (to)
4781 || (TREE_CODE (to) == MEM_REF
4782 && mem_ref_refers_to_non_mem_p (to))
4783 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4785 enum machine_mode mode1;
4786 HOST_WIDE_INT bitsize, bitpos;
4787 unsigned HOST_WIDE_INT bitregion_start = 0;
4788 unsigned HOST_WIDE_INT bitregion_end = 0;
4789 tree offset;
4790 int unsignedp;
4791 int volatilep = 0;
4792 tree tem;
4794 push_temp_slots ();
4795 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4796 &unsignedp, &volatilep, true);
4798 /* Make sure bitpos is not negative, it can wreak havoc later. */
4799 if (bitpos < 0)
4801 gcc_assert (offset == NULL_TREE);
4802 offset = size_int (bitpos >> (BITS_PER_UNIT == 8
4803 ? 3 : exact_log2 (BITS_PER_UNIT)));
4804 bitpos &= BITS_PER_UNIT - 1;
4807 if (TREE_CODE (to) == COMPONENT_REF
4808 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4809 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4811 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4813 /* If the bitfield is volatile, we want to access it in the
4814 field's mode, not the computed mode.
4815 If a MEM has VOIDmode (external with incomplete type),
4816 use BLKmode for it instead. */
4817 if (MEM_P (to_rtx))
4819 if (volatilep && flag_strict_volatile_bitfields > 0)
4820 to_rtx = adjust_address (to_rtx, mode1, 0);
4821 else if (GET_MODE (to_rtx) == VOIDmode)
4822 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4825 if (offset != 0)
4827 enum machine_mode address_mode;
4828 rtx offset_rtx;
4830 if (!MEM_P (to_rtx))
4832 /* We can get constant negative offsets into arrays with broken
4833 user code. Translate this to a trap instead of ICEing. */
4834 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4835 expand_builtin_trap ();
4836 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4839 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4840 address_mode = get_address_mode (to_rtx);
4841 if (GET_MODE (offset_rtx) != address_mode)
4842 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4844 /* A constant address in TO_RTX can have VOIDmode, we must not try
4845 to call force_reg for that case. Avoid that case. */
4846 if (MEM_P (to_rtx)
4847 && GET_MODE (to_rtx) == BLKmode
4848 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4849 && bitsize > 0
4850 && (bitpos % bitsize) == 0
4851 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4852 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4854 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4855 bitpos = 0;
4858 to_rtx = offset_address (to_rtx, offset_rtx,
4859 highest_pow2_factor_for_target (to,
4860 offset));
4863 /* No action is needed if the target is not a memory and the field
4864 lies completely outside that target. This can occur if the source
4865 code contains an out-of-bounds access to a small array. */
4866 if (!MEM_P (to_rtx)
4867 && GET_MODE (to_rtx) != BLKmode
4868 && (unsigned HOST_WIDE_INT) bitpos
4869 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4871 expand_normal (from);
4872 result = NULL;
4874 /* Handle expand_expr of a complex value returning a CONCAT. */
4875 else if (GET_CODE (to_rtx) == CONCAT)
4877 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4878 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4879 && bitpos == 0
4880 && bitsize == mode_bitsize)
4881 result = store_expr (from, to_rtx, false, nontemporal);
4882 else if (bitsize == mode_bitsize / 2
4883 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4884 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4885 nontemporal);
4886 else if (bitpos + bitsize <= mode_bitsize / 2)
4887 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4888 bitregion_start, bitregion_end,
4889 mode1, from,
4890 get_alias_set (to), nontemporal);
4891 else if (bitpos >= mode_bitsize / 2)
4892 result = store_field (XEXP (to_rtx, 1), bitsize,
4893 bitpos - mode_bitsize / 2,
4894 bitregion_start, bitregion_end,
4895 mode1, from,
4896 get_alias_set (to), nontemporal);
4897 else if (bitpos == 0 && bitsize == mode_bitsize)
4899 rtx from_rtx;
4900 result = expand_normal (from);
4901 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4902 TYPE_MODE (TREE_TYPE (from)), 0);
4903 emit_move_insn (XEXP (to_rtx, 0),
4904 read_complex_part (from_rtx, false));
4905 emit_move_insn (XEXP (to_rtx, 1),
4906 read_complex_part (from_rtx, true));
4908 else
4910 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4911 GET_MODE_SIZE (GET_MODE (to_rtx)));
4912 write_complex_part (temp, XEXP (to_rtx, 0), false);
4913 write_complex_part (temp, XEXP (to_rtx, 1), true);
4914 result = store_field (temp, bitsize, bitpos,
4915 bitregion_start, bitregion_end,
4916 mode1, from,
4917 get_alias_set (to), nontemporal);
4918 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4919 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4922 else
4924 if (MEM_P (to_rtx))
4926 /* If the field is at offset zero, we could have been given the
4927 DECL_RTX of the parent struct. Don't munge it. */
4928 to_rtx = shallow_copy_rtx (to_rtx);
4929 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4930 if (volatilep)
4931 MEM_VOLATILE_P (to_rtx) = 1;
4934 if (optimize_bitfield_assignment_op (bitsize, bitpos,
4935 bitregion_start, bitregion_end,
4936 mode1,
4937 to_rtx, to, from))
4938 result = NULL;
4939 else
4940 result = store_field (to_rtx, bitsize, bitpos,
4941 bitregion_start, bitregion_end,
4942 mode1, from,
4943 get_alias_set (to), nontemporal);
4946 if (result)
4947 preserve_temp_slots (result);
4948 pop_temp_slots ();
4949 return;
4952 /* If the rhs is a function call and its value is not an aggregate,
4953 call the function before we start to compute the lhs.
4954 This is needed for correct code for cases such as
4955 val = setjmp (buf) on machines where reference to val
4956 requires loading up part of an address in a separate insn.
4958 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4959 since it might be a promoted variable where the zero- or sign- extension
4960 needs to be done. Handling this in the normal way is safe because no
4961 computation is done before the call. The same is true for SSA names. */
4962 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4963 && COMPLETE_TYPE_P (TREE_TYPE (from))
4964 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4965 && ! (((TREE_CODE (to) == VAR_DECL
4966 || TREE_CODE (to) == PARM_DECL
4967 || TREE_CODE (to) == RESULT_DECL)
4968 && REG_P (DECL_RTL (to)))
4969 || TREE_CODE (to) == SSA_NAME))
4971 rtx value;
4973 push_temp_slots ();
4974 value = expand_normal (from);
4975 if (to_rtx == 0)
4976 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4978 /* Handle calls that return values in multiple non-contiguous locations.
4979 The Irix 6 ABI has examples of this. */
4980 if (GET_CODE (to_rtx) == PARALLEL)
4982 if (GET_CODE (value) == PARALLEL)
4983 emit_group_move (to_rtx, value);
4984 else
4985 emit_group_load (to_rtx, value, TREE_TYPE (from),
4986 int_size_in_bytes (TREE_TYPE (from)));
4988 else if (GET_CODE (value) == PARALLEL)
4989 emit_group_store (to_rtx, value, TREE_TYPE (from),
4990 int_size_in_bytes (TREE_TYPE (from)));
4991 else if (GET_MODE (to_rtx) == BLKmode)
4993 /* Handle calls that return BLKmode values in registers. */
4994 if (REG_P (value))
4995 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
4996 else
4997 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4999 else
5001 if (POINTER_TYPE_P (TREE_TYPE (to)))
5002 value = convert_memory_address_addr_space
5003 (GET_MODE (to_rtx), value,
5004 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5006 emit_move_insn (to_rtx, value);
5008 preserve_temp_slots (to_rtx);
5009 pop_temp_slots ();
5010 return;
5013 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5014 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5016 /* Don't move directly into a return register. */
5017 if (TREE_CODE (to) == RESULT_DECL
5018 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5020 rtx temp;
5022 push_temp_slots ();
5024 /* If the source is itself a return value, it still is in a pseudo at
5025 this point so we can move it back to the return register directly. */
5026 if (REG_P (to_rtx)
5027 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5028 && TREE_CODE (from) != CALL_EXPR)
5029 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5030 else
5031 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5033 /* Handle calls that return values in multiple non-contiguous locations.
5034 The Irix 6 ABI has examples of this. */
5035 if (GET_CODE (to_rtx) == PARALLEL)
5037 if (GET_CODE (temp) == PARALLEL)
5038 emit_group_move (to_rtx, temp);
5039 else
5040 emit_group_load (to_rtx, temp, TREE_TYPE (from),
5041 int_size_in_bytes (TREE_TYPE (from)));
5043 else if (temp)
5044 emit_move_insn (to_rtx, temp);
5046 preserve_temp_slots (to_rtx);
5047 pop_temp_slots ();
5048 return;
5051 /* In case we are returning the contents of an object which overlaps
5052 the place the value is being stored, use a safe function when copying
5053 a value through a pointer into a structure value return block. */
5054 if (TREE_CODE (to) == RESULT_DECL
5055 && TREE_CODE (from) == INDIRECT_REF
5056 && ADDR_SPACE_GENERIC_P
5057 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5058 && refs_may_alias_p (to, from)
5059 && cfun->returns_struct
5060 && !cfun->returns_pcc_struct)
5062 rtx from_rtx, size;
5064 push_temp_slots ();
5065 size = expr_size (from);
5066 from_rtx = expand_normal (from);
5068 emit_library_call (memmove_libfunc, LCT_NORMAL,
5069 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
5070 XEXP (from_rtx, 0), Pmode,
5071 convert_to_mode (TYPE_MODE (sizetype),
5072 size, TYPE_UNSIGNED (sizetype)),
5073 TYPE_MODE (sizetype));
5075 preserve_temp_slots (to_rtx);
5076 pop_temp_slots ();
5077 return;
5080 /* Compute FROM and store the value in the rtx we got. */
5082 push_temp_slots ();
5083 result = store_expr (from, to_rtx, 0, nontemporal);
5084 preserve_temp_slots (result);
5085 pop_temp_slots ();
5086 return;
5089 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5090 succeeded, false otherwise. */
5092 bool
5093 emit_storent_insn (rtx to, rtx from)
5095 struct expand_operand ops[2];
5096 enum machine_mode mode = GET_MODE (to);
5097 enum insn_code code = optab_handler (storent_optab, mode);
5099 if (code == CODE_FOR_nothing)
5100 return false;
5102 create_fixed_operand (&ops[0], to);
5103 create_input_operand (&ops[1], from, mode);
5104 return maybe_expand_insn (code, 2, ops);
5107 /* Generate code for computing expression EXP,
5108 and storing the value into TARGET.
5110 If the mode is BLKmode then we may return TARGET itself.
5111 It turns out that in BLKmode it doesn't cause a problem.
5112 because C has no operators that could combine two different
5113 assignments into the same BLKmode object with different values
5114 with no sequence point. Will other languages need this to
5115 be more thorough?
5117 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5118 stack, and block moves may need to be treated specially.
5120 If NONTEMPORAL is true, try using a nontemporal store instruction. */
5123 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
5125 rtx temp;
5126 rtx alt_rtl = NULL_RTX;
5127 location_t loc = curr_insn_location ();
5129 if (VOID_TYPE_P (TREE_TYPE (exp)))
5131 /* C++ can generate ?: expressions with a throw expression in one
5132 branch and an rvalue in the other. Here, we resolve attempts to
5133 store the throw expression's nonexistent result. */
5134 gcc_assert (!call_param_p);
5135 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5136 return NULL_RTX;
5138 if (TREE_CODE (exp) == COMPOUND_EXPR)
5140 /* Perform first part of compound expression, then assign from second
5141 part. */
5142 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5143 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5144 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5145 nontemporal);
5147 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5149 /* For conditional expression, get safe form of the target. Then
5150 test the condition, doing the appropriate assignment on either
5151 side. This avoids the creation of unnecessary temporaries.
5152 For non-BLKmode, it is more efficient not to do this. */
5154 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
5156 do_pending_stack_adjust ();
5157 NO_DEFER_POP;
5158 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5159 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5160 nontemporal);
5161 emit_jump_insn (gen_jump (lab2));
5162 emit_barrier ();
5163 emit_label (lab1);
5164 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
5165 nontemporal);
5166 emit_label (lab2);
5167 OK_DEFER_POP;
5169 return NULL_RTX;
5171 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5172 /* If this is a scalar in a register that is stored in a wider mode
5173 than the declared mode, compute the result into its declared mode
5174 and then convert to the wider mode. Our value is the computed
5175 expression. */
5177 rtx inner_target = 0;
5179 /* We can do the conversion inside EXP, which will often result
5180 in some optimizations. Do the conversion in two steps: first
5181 change the signedness, if needed, then the extend. But don't
5182 do this if the type of EXP is a subtype of something else
5183 since then the conversion might involve more than just
5184 converting modes. */
5185 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5186 && TREE_TYPE (TREE_TYPE (exp)) == 0
5187 && GET_MODE_PRECISION (GET_MODE (target))
5188 == TYPE_PRECISION (TREE_TYPE (exp)))
5190 if (TYPE_UNSIGNED (TREE_TYPE (exp))
5191 != SUBREG_PROMOTED_UNSIGNED_P (target))
5193 /* Some types, e.g. Fortran's logical*4, won't have a signed
5194 version, so use the mode instead. */
5195 tree ntype
5196 = (signed_or_unsigned_type_for
5197 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
5198 if (ntype == NULL)
5199 ntype = lang_hooks.types.type_for_mode
5200 (TYPE_MODE (TREE_TYPE (exp)),
5201 SUBREG_PROMOTED_UNSIGNED_P (target));
5203 exp = fold_convert_loc (loc, ntype, exp);
5206 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5207 (GET_MODE (SUBREG_REG (target)),
5208 SUBREG_PROMOTED_UNSIGNED_P (target)),
5209 exp);
5211 inner_target = SUBREG_REG (target);
5214 temp = expand_expr (exp, inner_target, VOIDmode,
5215 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5217 /* If TEMP is a VOIDmode constant, use convert_modes to make
5218 sure that we properly convert it. */
5219 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5221 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5222 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
5223 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5224 GET_MODE (target), temp,
5225 SUBREG_PROMOTED_UNSIGNED_P (target));
5228 convert_move (SUBREG_REG (target), temp,
5229 SUBREG_PROMOTED_UNSIGNED_P (target));
5231 return NULL_RTX;
5233 else if ((TREE_CODE (exp) == STRING_CST
5234 || (TREE_CODE (exp) == MEM_REF
5235 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5236 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5237 == STRING_CST
5238 && integer_zerop (TREE_OPERAND (exp, 1))))
5239 && !nontemporal && !call_param_p
5240 && MEM_P (target))
5242 /* Optimize initialization of an array with a STRING_CST. */
5243 HOST_WIDE_INT exp_len, str_copy_len;
5244 rtx dest_mem;
5245 tree str = TREE_CODE (exp) == STRING_CST
5246 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5248 exp_len = int_expr_size (exp);
5249 if (exp_len <= 0)
5250 goto normal_expr;
5252 if (TREE_STRING_LENGTH (str) <= 0)
5253 goto normal_expr;
5255 str_copy_len = strlen (TREE_STRING_POINTER (str));
5256 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5257 goto normal_expr;
5259 str_copy_len = TREE_STRING_LENGTH (str);
5260 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5261 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5263 str_copy_len += STORE_MAX_PIECES - 1;
5264 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5266 str_copy_len = MIN (str_copy_len, exp_len);
5267 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5268 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5269 MEM_ALIGN (target), false))
5270 goto normal_expr;
5272 dest_mem = target;
5274 dest_mem = store_by_pieces (dest_mem,
5275 str_copy_len, builtin_strncpy_read_str,
5276 CONST_CAST (char *,
5277 TREE_STRING_POINTER (str)),
5278 MEM_ALIGN (target), false,
5279 exp_len > str_copy_len ? 1 : 0);
5280 if (exp_len > str_copy_len)
5281 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5282 GEN_INT (exp_len - str_copy_len),
5283 BLOCK_OP_NORMAL);
5284 return NULL_RTX;
5286 else
5288 rtx tmp_target;
5290 normal_expr:
5291 /* If we want to use a nontemporal store, force the value to
5292 register first. */
5293 tmp_target = nontemporal ? NULL_RTX : target;
5294 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5295 (call_param_p
5296 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5297 &alt_rtl);
5300 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5301 the same as that of TARGET, adjust the constant. This is needed, for
5302 example, in case it is a CONST_DOUBLE and we want only a word-sized
5303 value. */
5304 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5305 && TREE_CODE (exp) != ERROR_MARK
5306 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5307 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5308 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5310 /* If value was not generated in the target, store it there.
5311 Convert the value to TARGET's type first if necessary and emit the
5312 pending incrementations that have been queued when expanding EXP.
5313 Note that we cannot emit the whole queue blindly because this will
5314 effectively disable the POST_INC optimization later.
5316 If TEMP and TARGET compare equal according to rtx_equal_p, but
5317 one or both of them are volatile memory refs, we have to distinguish
5318 two cases:
5319 - expand_expr has used TARGET. In this case, we must not generate
5320 another copy. This can be detected by TARGET being equal according
5321 to == .
5322 - expand_expr has not used TARGET - that means that the source just
5323 happens to have the same RTX form. Since temp will have been created
5324 by expand_expr, it will compare unequal according to == .
5325 We must generate a copy in this case, to reach the correct number
5326 of volatile memory references. */
5328 if ((! rtx_equal_p (temp, target)
5329 || (temp != target && (side_effects_p (temp)
5330 || side_effects_p (target))))
5331 && TREE_CODE (exp) != ERROR_MARK
5332 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5333 but TARGET is not valid memory reference, TEMP will differ
5334 from TARGET although it is really the same location. */
5335 && !(alt_rtl
5336 && rtx_equal_p (alt_rtl, target)
5337 && !side_effects_p (alt_rtl)
5338 && !side_effects_p (target))
5339 /* If there's nothing to copy, don't bother. Don't call
5340 expr_size unless necessary, because some front-ends (C++)
5341 expr_size-hook must not be given objects that are not
5342 supposed to be bit-copied or bit-initialized. */
5343 && expr_size (exp) != const0_rtx)
5345 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5347 if (GET_MODE (target) == BLKmode)
5349 /* Handle calls that return BLKmode values in registers. */
5350 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5351 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5352 else
5353 store_bit_field (target,
5354 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5355 0, 0, 0, GET_MODE (temp), temp);
5357 else
5358 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5361 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5363 /* Handle copying a string constant into an array. The string
5364 constant may be shorter than the array. So copy just the string's
5365 actual length, and clear the rest. First get the size of the data
5366 type of the string, which is actually the size of the target. */
5367 rtx size = expr_size (exp);
5369 if (CONST_INT_P (size)
5370 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5371 emit_block_move (target, temp, size,
5372 (call_param_p
5373 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5374 else
5376 enum machine_mode pointer_mode
5377 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5378 enum machine_mode address_mode = get_address_mode (target);
5380 /* Compute the size of the data to copy from the string. */
5381 tree copy_size
5382 = size_binop_loc (loc, MIN_EXPR,
5383 make_tree (sizetype, size),
5384 size_int (TREE_STRING_LENGTH (exp)));
5385 rtx copy_size_rtx
5386 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5387 (call_param_p
5388 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5389 rtx label = 0;
5391 /* Copy that much. */
5392 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5393 TYPE_UNSIGNED (sizetype));
5394 emit_block_move (target, temp, copy_size_rtx,
5395 (call_param_p
5396 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5398 /* Figure out how much is left in TARGET that we have to clear.
5399 Do all calculations in pointer_mode. */
5400 if (CONST_INT_P (copy_size_rtx))
5402 size = plus_constant (address_mode, size,
5403 -INTVAL (copy_size_rtx));
5404 target = adjust_address (target, BLKmode,
5405 INTVAL (copy_size_rtx));
5407 else
5409 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5410 copy_size_rtx, NULL_RTX, 0,
5411 OPTAB_LIB_WIDEN);
5413 if (GET_MODE (copy_size_rtx) != address_mode)
5414 copy_size_rtx = convert_to_mode (address_mode,
5415 copy_size_rtx,
5416 TYPE_UNSIGNED (sizetype));
5418 target = offset_address (target, copy_size_rtx,
5419 highest_pow2_factor (copy_size));
5420 label = gen_label_rtx ();
5421 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5422 GET_MODE (size), 0, label);
5425 if (size != const0_rtx)
5426 clear_storage (target, size, BLOCK_OP_NORMAL);
5428 if (label)
5429 emit_label (label);
5432 /* Handle calls that return values in multiple non-contiguous locations.
5433 The Irix 6 ABI has examples of this. */
5434 else if (GET_CODE (target) == PARALLEL)
5436 if (GET_CODE (temp) == PARALLEL)
5437 emit_group_move (target, temp);
5438 else
5439 emit_group_load (target, temp, TREE_TYPE (exp),
5440 int_size_in_bytes (TREE_TYPE (exp)));
5442 else if (GET_CODE (temp) == PARALLEL)
5443 emit_group_store (target, temp, TREE_TYPE (exp),
5444 int_size_in_bytes (TREE_TYPE (exp)));
5445 else if (GET_MODE (temp) == BLKmode)
5446 emit_block_move (target, temp, expr_size (exp),
5447 (call_param_p
5448 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5449 /* If we emit a nontemporal store, there is nothing else to do. */
5450 else if (nontemporal && emit_storent_insn (target, temp))
5452 else
5454 temp = force_operand (temp, target);
5455 if (temp != target)
5456 emit_move_insn (target, temp);
5460 return NULL_RTX;
5463 /* Return true if field F of structure TYPE is a flexible array. */
5465 static bool
5466 flexible_array_member_p (const_tree f, const_tree type)
5468 const_tree tf;
5470 tf = TREE_TYPE (f);
5471 return (DECL_CHAIN (f) == NULL
5472 && TREE_CODE (tf) == ARRAY_TYPE
5473 && TYPE_DOMAIN (tf)
5474 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5475 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5476 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5477 && int_size_in_bytes (type) >= 0);
5480 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5481 must have in order for it to completely initialize a value of type TYPE.
5482 Return -1 if the number isn't known.
5484 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5486 static HOST_WIDE_INT
5487 count_type_elements (const_tree type, bool for_ctor_p)
5489 switch (TREE_CODE (type))
5491 case ARRAY_TYPE:
5493 tree nelts;
5495 nelts = array_type_nelts (type);
5496 if (nelts && tree_fits_uhwi_p (nelts))
5498 unsigned HOST_WIDE_INT n;
5500 n = tree_to_uhwi (nelts) + 1;
5501 if (n == 0 || for_ctor_p)
5502 return n;
5503 else
5504 return n * count_type_elements (TREE_TYPE (type), false);
5506 return for_ctor_p ? -1 : 1;
5509 case RECORD_TYPE:
5511 unsigned HOST_WIDE_INT n;
5512 tree f;
5514 n = 0;
5515 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5516 if (TREE_CODE (f) == FIELD_DECL)
5518 if (!for_ctor_p)
5519 n += count_type_elements (TREE_TYPE (f), false);
5520 else if (!flexible_array_member_p (f, type))
5521 /* Don't count flexible arrays, which are not supposed
5522 to be initialized. */
5523 n += 1;
5526 return n;
5529 case UNION_TYPE:
5530 case QUAL_UNION_TYPE:
5532 tree f;
5533 HOST_WIDE_INT n, m;
5535 gcc_assert (!for_ctor_p);
5536 /* Estimate the number of scalars in each field and pick the
5537 maximum. Other estimates would do instead; the idea is simply
5538 to make sure that the estimate is not sensitive to the ordering
5539 of the fields. */
5540 n = 1;
5541 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5542 if (TREE_CODE (f) == FIELD_DECL)
5544 m = count_type_elements (TREE_TYPE (f), false);
5545 /* If the field doesn't span the whole union, add an extra
5546 scalar for the rest. */
5547 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5548 TYPE_SIZE (type)) != 1)
5549 m++;
5550 if (n < m)
5551 n = m;
5553 return n;
5556 case COMPLEX_TYPE:
5557 return 2;
5559 case VECTOR_TYPE:
5560 return TYPE_VECTOR_SUBPARTS (type);
5562 case INTEGER_TYPE:
5563 case REAL_TYPE:
5564 case FIXED_POINT_TYPE:
5565 case ENUMERAL_TYPE:
5566 case BOOLEAN_TYPE:
5567 case POINTER_TYPE:
5568 case OFFSET_TYPE:
5569 case REFERENCE_TYPE:
5570 case NULLPTR_TYPE:
5571 return 1;
5573 case ERROR_MARK:
5574 return 0;
5576 case VOID_TYPE:
5577 case METHOD_TYPE:
5578 case FUNCTION_TYPE:
5579 case LANG_TYPE:
5580 default:
5581 gcc_unreachable ();
5585 /* Helper for categorize_ctor_elements. Identical interface. */
5587 static bool
5588 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5589 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5591 unsigned HOST_WIDE_INT idx;
5592 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5593 tree value, purpose, elt_type;
5595 /* Whether CTOR is a valid constant initializer, in accordance with what
5596 initializer_constant_valid_p does. If inferred from the constructor
5597 elements, true until proven otherwise. */
5598 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5599 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5601 nz_elts = 0;
5602 init_elts = 0;
5603 num_fields = 0;
5604 elt_type = NULL_TREE;
5606 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5608 HOST_WIDE_INT mult = 1;
5610 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5612 tree lo_index = TREE_OPERAND (purpose, 0);
5613 tree hi_index = TREE_OPERAND (purpose, 1);
5615 if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
5616 mult = (tree_to_uhwi (hi_index)
5617 - tree_to_uhwi (lo_index) + 1);
5619 num_fields += mult;
5620 elt_type = TREE_TYPE (value);
5622 switch (TREE_CODE (value))
5624 case CONSTRUCTOR:
5626 HOST_WIDE_INT nz = 0, ic = 0;
5628 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5629 p_complete);
5631 nz_elts += mult * nz;
5632 init_elts += mult * ic;
5634 if (const_from_elts_p && const_p)
5635 const_p = const_elt_p;
5637 break;
5639 case INTEGER_CST:
5640 case REAL_CST:
5641 case FIXED_CST:
5642 if (!initializer_zerop (value))
5643 nz_elts += mult;
5644 init_elts += mult;
5645 break;
5647 case STRING_CST:
5648 nz_elts += mult * TREE_STRING_LENGTH (value);
5649 init_elts += mult * TREE_STRING_LENGTH (value);
5650 break;
5652 case COMPLEX_CST:
5653 if (!initializer_zerop (TREE_REALPART (value)))
5654 nz_elts += mult;
5655 if (!initializer_zerop (TREE_IMAGPART (value)))
5656 nz_elts += mult;
5657 init_elts += mult;
5658 break;
5660 case VECTOR_CST:
5662 unsigned i;
5663 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5665 tree v = VECTOR_CST_ELT (value, i);
5666 if (!initializer_zerop (v))
5667 nz_elts += mult;
5668 init_elts += mult;
5671 break;
5673 default:
5675 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5676 nz_elts += mult * tc;
5677 init_elts += mult * tc;
5679 if (const_from_elts_p && const_p)
5680 const_p = initializer_constant_valid_p (value, elt_type)
5681 != NULL_TREE;
5683 break;
5687 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5688 num_fields, elt_type))
5689 *p_complete = false;
5691 *p_nz_elts += nz_elts;
5692 *p_init_elts += init_elts;
5694 return const_p;
5697 /* Examine CTOR to discover:
5698 * how many scalar fields are set to nonzero values,
5699 and place it in *P_NZ_ELTS;
5700 * how many scalar fields in total are in CTOR,
5701 and place it in *P_ELT_COUNT.
5702 * whether the constructor is complete -- in the sense that every
5703 meaningful byte is explicitly given a value --
5704 and place it in *P_COMPLETE.
5706 Return whether or not CTOR is a valid static constant initializer, the same
5707 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5709 bool
5710 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5711 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5713 *p_nz_elts = 0;
5714 *p_init_elts = 0;
5715 *p_complete = true;
5717 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5720 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5721 of which had type LAST_TYPE. Each element was itself a complete
5722 initializer, in the sense that every meaningful byte was explicitly
5723 given a value. Return true if the same is true for the constructor
5724 as a whole. */
5726 bool
5727 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5728 const_tree last_type)
5730 if (TREE_CODE (type) == UNION_TYPE
5731 || TREE_CODE (type) == QUAL_UNION_TYPE)
5733 if (num_elts == 0)
5734 return false;
5736 gcc_assert (num_elts == 1 && last_type);
5738 /* ??? We could look at each element of the union, and find the
5739 largest element. Which would avoid comparing the size of the
5740 initialized element against any tail padding in the union.
5741 Doesn't seem worth the effort... */
5742 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5745 return count_type_elements (type, true) == num_elts;
5748 /* Return 1 if EXP contains mostly (3/4) zeros. */
5750 static int
5751 mostly_zeros_p (const_tree exp)
5753 if (TREE_CODE (exp) == CONSTRUCTOR)
5755 HOST_WIDE_INT nz_elts, init_elts;
5756 bool complete_p;
5758 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5759 return !complete_p || nz_elts < init_elts / 4;
5762 return initializer_zerop (exp);
5765 /* Return 1 if EXP contains all zeros. */
5767 static int
5768 all_zeros_p (const_tree exp)
5770 if (TREE_CODE (exp) == CONSTRUCTOR)
5772 HOST_WIDE_INT nz_elts, init_elts;
5773 bool complete_p;
5775 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5776 return nz_elts == 0;
5779 return initializer_zerop (exp);
5782 /* Helper function for store_constructor.
5783 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5784 CLEARED is as for store_constructor.
5785 ALIAS_SET is the alias set to use for any stores.
5787 This provides a recursive shortcut back to store_constructor when it isn't
5788 necessary to go through store_field. This is so that we can pass through
5789 the cleared field to let store_constructor know that we may not have to
5790 clear a substructure if the outer structure has already been cleared. */
5792 static void
5793 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5794 HOST_WIDE_INT bitpos, enum machine_mode mode,
5795 tree exp, int cleared, alias_set_type alias_set)
5797 if (TREE_CODE (exp) == CONSTRUCTOR
5798 /* We can only call store_constructor recursively if the size and
5799 bit position are on a byte boundary. */
5800 && bitpos % BITS_PER_UNIT == 0
5801 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5802 /* If we have a nonzero bitpos for a register target, then we just
5803 let store_field do the bitfield handling. This is unlikely to
5804 generate unnecessary clear instructions anyways. */
5805 && (bitpos == 0 || MEM_P (target)))
5807 if (MEM_P (target))
5808 target
5809 = adjust_address (target,
5810 GET_MODE (target) == BLKmode
5811 || 0 != (bitpos
5812 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5813 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5816 /* Update the alias set, if required. */
5817 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5818 && MEM_ALIAS_SET (target) != 0)
5820 target = copy_rtx (target);
5821 set_mem_alias_set (target, alias_set);
5824 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5826 else
5827 store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false);
5831 /* Returns the number of FIELD_DECLs in TYPE. */
5833 static int
5834 fields_length (const_tree type)
5836 tree t = TYPE_FIELDS (type);
5837 int count = 0;
5839 for (; t; t = DECL_CHAIN (t))
5840 if (TREE_CODE (t) == FIELD_DECL)
5841 ++count;
5843 return count;
5847 /* Store the value of constructor EXP into the rtx TARGET.
5848 TARGET is either a REG or a MEM; we know it cannot conflict, since
5849 safe_from_p has been called.
5850 CLEARED is true if TARGET is known to have been zero'd.
5851 SIZE is the number of bytes of TARGET we are allowed to modify: this
5852 may not be the same as the size of EXP if we are assigning to a field
5853 which has been packed to exclude padding bits. */
5855 static void
5856 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5858 tree type = TREE_TYPE (exp);
5859 #ifdef WORD_REGISTER_OPERATIONS
5860 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5861 #endif
5863 switch (TREE_CODE (type))
5865 case RECORD_TYPE:
5866 case UNION_TYPE:
5867 case QUAL_UNION_TYPE:
5869 unsigned HOST_WIDE_INT idx;
5870 tree field, value;
5872 /* If size is zero or the target is already cleared, do nothing. */
5873 if (size == 0 || cleared)
5874 cleared = 1;
5875 /* We either clear the aggregate or indicate the value is dead. */
5876 else if ((TREE_CODE (type) == UNION_TYPE
5877 || TREE_CODE (type) == QUAL_UNION_TYPE)
5878 && ! CONSTRUCTOR_ELTS (exp))
5879 /* If the constructor is empty, clear the union. */
5881 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5882 cleared = 1;
5885 /* If we are building a static constructor into a register,
5886 set the initial value as zero so we can fold the value into
5887 a constant. But if more than one register is involved,
5888 this probably loses. */
5889 else if (REG_P (target) && TREE_STATIC (exp)
5890 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5892 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5893 cleared = 1;
5896 /* If the constructor has fewer fields than the structure or
5897 if we are initializing the structure to mostly zeros, clear
5898 the whole structure first. Don't do this if TARGET is a
5899 register whose mode size isn't equal to SIZE since
5900 clear_storage can't handle this case. */
5901 else if (size > 0
5902 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
5903 != fields_length (type))
5904 || mostly_zeros_p (exp))
5905 && (!REG_P (target)
5906 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5907 == size)))
5909 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5910 cleared = 1;
5913 if (REG_P (target) && !cleared)
5914 emit_clobber (target);
5916 /* Store each element of the constructor into the
5917 corresponding field of TARGET. */
5918 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5920 enum machine_mode mode;
5921 HOST_WIDE_INT bitsize;
5922 HOST_WIDE_INT bitpos = 0;
5923 tree offset;
5924 rtx to_rtx = target;
5926 /* Just ignore missing fields. We cleared the whole
5927 structure, above, if any fields are missing. */
5928 if (field == 0)
5929 continue;
5931 if (cleared && initializer_zerop (value))
5932 continue;
5934 if (tree_fits_uhwi_p (DECL_SIZE (field)))
5935 bitsize = tree_to_uhwi (DECL_SIZE (field));
5936 else
5937 bitsize = -1;
5939 mode = DECL_MODE (field);
5940 if (DECL_BIT_FIELD (field))
5941 mode = VOIDmode;
5943 offset = DECL_FIELD_OFFSET (field);
5944 if (tree_fits_shwi_p (offset)
5945 && tree_fits_shwi_p (bit_position (field)))
5947 bitpos = int_bit_position (field);
5948 offset = 0;
5950 else
5951 bitpos = tree_to_shwi (DECL_FIELD_BIT_OFFSET (field));
5953 if (offset)
5955 enum machine_mode address_mode;
5956 rtx offset_rtx;
5958 offset
5959 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5960 make_tree (TREE_TYPE (exp),
5961 target));
5963 offset_rtx = expand_normal (offset);
5964 gcc_assert (MEM_P (to_rtx));
5966 address_mode = get_address_mode (to_rtx);
5967 if (GET_MODE (offset_rtx) != address_mode)
5968 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5970 to_rtx = offset_address (to_rtx, offset_rtx,
5971 highest_pow2_factor (offset));
5974 #ifdef WORD_REGISTER_OPERATIONS
5975 /* If this initializes a field that is smaller than a
5976 word, at the start of a word, try to widen it to a full
5977 word. This special case allows us to output C++ member
5978 function initializations in a form that the optimizers
5979 can understand. */
5980 if (REG_P (target)
5981 && bitsize < BITS_PER_WORD
5982 && bitpos % BITS_PER_WORD == 0
5983 && GET_MODE_CLASS (mode) == MODE_INT
5984 && TREE_CODE (value) == INTEGER_CST
5985 && exp_size >= 0
5986 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5988 tree type = TREE_TYPE (value);
5990 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5992 type = lang_hooks.types.type_for_mode
5993 (word_mode, TYPE_UNSIGNED (type));
5994 value = fold_convert (type, value);
5997 if (BYTES_BIG_ENDIAN)
5998 value
5999 = fold_build2 (LSHIFT_EXPR, type, value,
6000 build_int_cst (type,
6001 BITS_PER_WORD - bitsize));
6002 bitsize = BITS_PER_WORD;
6003 mode = word_mode;
6005 #endif
6007 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6008 && DECL_NONADDRESSABLE_P (field))
6010 to_rtx = copy_rtx (to_rtx);
6011 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6014 store_constructor_field (to_rtx, bitsize, bitpos, mode,
6015 value, cleared,
6016 get_alias_set (TREE_TYPE (field)));
6018 break;
6020 case ARRAY_TYPE:
6022 tree value, index;
6023 unsigned HOST_WIDE_INT i;
6024 int need_to_clear;
6025 tree domain;
6026 tree elttype = TREE_TYPE (type);
6027 int const_bounds_p;
6028 HOST_WIDE_INT minelt = 0;
6029 HOST_WIDE_INT maxelt = 0;
6031 domain = TYPE_DOMAIN (type);
6032 const_bounds_p = (TYPE_MIN_VALUE (domain)
6033 && TYPE_MAX_VALUE (domain)
6034 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6035 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6037 /* If we have constant bounds for the range of the type, get them. */
6038 if (const_bounds_p)
6040 minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6041 maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6044 /* If the constructor has fewer elements than the array, clear
6045 the whole array first. Similarly if this is static
6046 constructor of a non-BLKmode object. */
6047 if (cleared)
6048 need_to_clear = 0;
6049 else if (REG_P (target) && TREE_STATIC (exp))
6050 need_to_clear = 1;
6051 else
6053 unsigned HOST_WIDE_INT idx;
6054 tree index, value;
6055 HOST_WIDE_INT count = 0, zero_count = 0;
6056 need_to_clear = ! const_bounds_p;
6058 /* This loop is a more accurate version of the loop in
6059 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6060 is also needed to check for missing elements. */
6061 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6063 HOST_WIDE_INT this_node_count;
6065 if (need_to_clear)
6066 break;
6068 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6070 tree lo_index = TREE_OPERAND (index, 0);
6071 tree hi_index = TREE_OPERAND (index, 1);
6073 if (! tree_fits_uhwi_p (lo_index)
6074 || ! tree_fits_uhwi_p (hi_index))
6076 need_to_clear = 1;
6077 break;
6080 this_node_count = (tree_to_uhwi (hi_index)
6081 - tree_to_uhwi (lo_index) + 1);
6083 else
6084 this_node_count = 1;
6086 count += this_node_count;
6087 if (mostly_zeros_p (value))
6088 zero_count += this_node_count;
6091 /* Clear the entire array first if there are any missing
6092 elements, or if the incidence of zero elements is >=
6093 75%. */
6094 if (! need_to_clear
6095 && (count < maxelt - minelt + 1
6096 || 4 * zero_count >= 3 * count))
6097 need_to_clear = 1;
6100 if (need_to_clear && size > 0)
6102 if (REG_P (target))
6103 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6104 else
6105 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6106 cleared = 1;
6109 if (!cleared && REG_P (target))
6110 /* Inform later passes that the old value is dead. */
6111 emit_clobber (target);
6113 /* Store each element of the constructor into the
6114 corresponding element of TARGET, determined by counting the
6115 elements. */
6116 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6118 enum machine_mode mode;
6119 HOST_WIDE_INT bitsize;
6120 HOST_WIDE_INT bitpos;
6121 rtx xtarget = target;
6123 if (cleared && initializer_zerop (value))
6124 continue;
6126 mode = TYPE_MODE (elttype);
6127 if (mode == BLKmode)
6128 bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6129 ? tree_to_uhwi (TYPE_SIZE (elttype))
6130 : -1);
6131 else
6132 bitsize = GET_MODE_BITSIZE (mode);
6134 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6136 tree lo_index = TREE_OPERAND (index, 0);
6137 tree hi_index = TREE_OPERAND (index, 1);
6138 rtx index_r, pos_rtx;
6139 HOST_WIDE_INT lo, hi, count;
6140 tree position;
6142 /* If the range is constant and "small", unroll the loop. */
6143 if (const_bounds_p
6144 && tree_fits_shwi_p (lo_index)
6145 && tree_fits_shwi_p (hi_index)
6146 && (lo = tree_to_shwi (lo_index),
6147 hi = tree_to_shwi (hi_index),
6148 count = hi - lo + 1,
6149 (!MEM_P (target)
6150 || count <= 2
6151 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6152 && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6153 <= 40 * 8)))))
6155 lo -= minelt; hi -= minelt;
6156 for (; lo <= hi; lo++)
6158 bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6160 if (MEM_P (target)
6161 && !MEM_KEEP_ALIAS_SET_P (target)
6162 && TREE_CODE (type) == ARRAY_TYPE
6163 && TYPE_NONALIASED_COMPONENT (type))
6165 target = copy_rtx (target);
6166 MEM_KEEP_ALIAS_SET_P (target) = 1;
6169 store_constructor_field
6170 (target, bitsize, bitpos, mode, value, cleared,
6171 get_alias_set (elttype));
6174 else
6176 rtx loop_start = gen_label_rtx ();
6177 rtx loop_end = gen_label_rtx ();
6178 tree exit_cond;
6180 expand_normal (hi_index);
6182 index = build_decl (EXPR_LOCATION (exp),
6183 VAR_DECL, NULL_TREE, domain);
6184 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6185 SET_DECL_RTL (index, index_r);
6186 store_expr (lo_index, index_r, 0, false);
6188 /* Build the head of the loop. */
6189 do_pending_stack_adjust ();
6190 emit_label (loop_start);
6192 /* Assign value to element index. */
6193 position =
6194 fold_convert (ssizetype,
6195 fold_build2 (MINUS_EXPR,
6196 TREE_TYPE (index),
6197 index,
6198 TYPE_MIN_VALUE (domain)));
6200 position =
6201 size_binop (MULT_EXPR, position,
6202 fold_convert (ssizetype,
6203 TYPE_SIZE_UNIT (elttype)));
6205 pos_rtx = expand_normal (position);
6206 xtarget = offset_address (target, pos_rtx,
6207 highest_pow2_factor (position));
6208 xtarget = adjust_address (xtarget, mode, 0);
6209 if (TREE_CODE (value) == CONSTRUCTOR)
6210 store_constructor (value, xtarget, cleared,
6211 bitsize / BITS_PER_UNIT);
6212 else
6213 store_expr (value, xtarget, 0, false);
6215 /* Generate a conditional jump to exit the loop. */
6216 exit_cond = build2 (LT_EXPR, integer_type_node,
6217 index, hi_index);
6218 jumpif (exit_cond, loop_end, -1);
6220 /* Update the loop counter, and jump to the head of
6221 the loop. */
6222 expand_assignment (index,
6223 build2 (PLUS_EXPR, TREE_TYPE (index),
6224 index, integer_one_node),
6225 false);
6227 emit_jump (loop_start);
6229 /* Build the end of the loop. */
6230 emit_label (loop_end);
6233 else if ((index != 0 && ! tree_fits_shwi_p (index))
6234 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6236 tree position;
6238 if (index == 0)
6239 index = ssize_int (1);
6241 if (minelt)
6242 index = fold_convert (ssizetype,
6243 fold_build2 (MINUS_EXPR,
6244 TREE_TYPE (index),
6245 index,
6246 TYPE_MIN_VALUE (domain)));
6248 position =
6249 size_binop (MULT_EXPR, index,
6250 fold_convert (ssizetype,
6251 TYPE_SIZE_UNIT (elttype)));
6252 xtarget = offset_address (target,
6253 expand_normal (position),
6254 highest_pow2_factor (position));
6255 xtarget = adjust_address (xtarget, mode, 0);
6256 store_expr (value, xtarget, 0, false);
6258 else
6260 if (index != 0)
6261 bitpos = ((tree_to_shwi (index) - minelt)
6262 * tree_to_uhwi (TYPE_SIZE (elttype)));
6263 else
6264 bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6266 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6267 && TREE_CODE (type) == ARRAY_TYPE
6268 && TYPE_NONALIASED_COMPONENT (type))
6270 target = copy_rtx (target);
6271 MEM_KEEP_ALIAS_SET_P (target) = 1;
6273 store_constructor_field (target, bitsize, bitpos, mode, value,
6274 cleared, get_alias_set (elttype));
6277 break;
6280 case VECTOR_TYPE:
6282 unsigned HOST_WIDE_INT idx;
6283 constructor_elt *ce;
6284 int i;
6285 int need_to_clear;
6286 int icode = CODE_FOR_nothing;
6287 tree elttype = TREE_TYPE (type);
6288 int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6289 enum machine_mode eltmode = TYPE_MODE (elttype);
6290 HOST_WIDE_INT bitsize;
6291 HOST_WIDE_INT bitpos;
6292 rtvec vector = NULL;
6293 unsigned n_elts;
6294 alias_set_type alias;
6296 gcc_assert (eltmode != BLKmode);
6298 n_elts = TYPE_VECTOR_SUBPARTS (type);
6299 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6301 enum machine_mode mode = GET_MODE (target);
6303 icode = (int) optab_handler (vec_init_optab, mode);
6304 if (icode != CODE_FOR_nothing)
6306 unsigned int i;
6308 vector = rtvec_alloc (n_elts);
6309 for (i = 0; i < n_elts; i++)
6310 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6314 /* If the constructor has fewer elements than the vector,
6315 clear the whole array first. Similarly if this is static
6316 constructor of a non-BLKmode object. */
6317 if (cleared)
6318 need_to_clear = 0;
6319 else if (REG_P (target) && TREE_STATIC (exp))
6320 need_to_clear = 1;
6321 else
6323 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6324 tree value;
6326 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6328 int n_elts_here = tree_to_uhwi
6329 (int_const_binop (TRUNC_DIV_EXPR,
6330 TYPE_SIZE (TREE_TYPE (value)),
6331 TYPE_SIZE (elttype)));
6333 count += n_elts_here;
6334 if (mostly_zeros_p (value))
6335 zero_count += n_elts_here;
6338 /* Clear the entire vector first if there are any missing elements,
6339 or if the incidence of zero elements is >= 75%. */
6340 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6343 if (need_to_clear && size > 0 && !vector)
6345 if (REG_P (target))
6346 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6347 else
6348 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6349 cleared = 1;
6352 /* Inform later passes that the old value is dead. */
6353 if (!cleared && !vector && REG_P (target))
6354 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6356 if (MEM_P (target))
6357 alias = MEM_ALIAS_SET (target);
6358 else
6359 alias = get_alias_set (elttype);
6361 /* Store each element of the constructor into the corresponding
6362 element of TARGET, determined by counting the elements. */
6363 for (idx = 0, i = 0;
6364 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6365 idx++, i += bitsize / elt_size)
6367 HOST_WIDE_INT eltpos;
6368 tree value = ce->value;
6370 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6371 if (cleared && initializer_zerop (value))
6372 continue;
6374 if (ce->index)
6375 eltpos = tree_to_uhwi (ce->index);
6376 else
6377 eltpos = i;
6379 if (vector)
6381 /* Vector CONSTRUCTORs should only be built from smaller
6382 vectors in the case of BLKmode vectors. */
6383 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6384 RTVEC_ELT (vector, eltpos)
6385 = expand_normal (value);
6387 else
6389 enum machine_mode value_mode =
6390 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6391 ? TYPE_MODE (TREE_TYPE (value))
6392 : eltmode;
6393 bitpos = eltpos * elt_size;
6394 store_constructor_field (target, bitsize, bitpos, value_mode,
6395 value, cleared, alias);
6399 if (vector)
6400 emit_insn (GEN_FCN (icode)
6401 (target,
6402 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6403 break;
6406 default:
6407 gcc_unreachable ();
6411 /* Store the value of EXP (an expression tree)
6412 into a subfield of TARGET which has mode MODE and occupies
6413 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6414 If MODE is VOIDmode, it means that we are storing into a bit-field.
6416 BITREGION_START is bitpos of the first bitfield in this region.
6417 BITREGION_END is the bitpos of the ending bitfield in this region.
6418 These two fields are 0, if the C++ memory model does not apply,
6419 or we are not interested in keeping track of bitfield regions.
6421 Always return const0_rtx unless we have something particular to
6422 return.
6424 ALIAS_SET is the alias set for the destination. This value will
6425 (in general) be different from that for TARGET, since TARGET is a
6426 reference to the containing structure.
6428 If NONTEMPORAL is true, try generating a nontemporal store. */
6430 static rtx
6431 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6432 unsigned HOST_WIDE_INT bitregion_start,
6433 unsigned HOST_WIDE_INT bitregion_end,
6434 enum machine_mode mode, tree exp,
6435 alias_set_type alias_set, bool nontemporal)
6437 if (TREE_CODE (exp) == ERROR_MARK)
6438 return const0_rtx;
6440 /* If we have nothing to store, do nothing unless the expression has
6441 side-effects. */
6442 if (bitsize == 0)
6443 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6445 if (GET_CODE (target) == CONCAT)
6447 /* We're storing into a struct containing a single __complex. */
6449 gcc_assert (!bitpos);
6450 return store_expr (exp, target, 0, nontemporal);
6453 /* If the structure is in a register or if the component
6454 is a bit field, we cannot use addressing to access it.
6455 Use bit-field techniques or SUBREG to store in it. */
6457 if (mode == VOIDmode
6458 || (mode != BLKmode && ! direct_store[(int) mode]
6459 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6460 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6461 || REG_P (target)
6462 || GET_CODE (target) == SUBREG
6463 /* If the field isn't aligned enough to store as an ordinary memref,
6464 store it as a bit field. */
6465 || (mode != BLKmode
6466 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6467 || bitpos % GET_MODE_ALIGNMENT (mode))
6468 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6469 || (bitpos % BITS_PER_UNIT != 0)))
6470 || (bitsize >= 0 && mode != BLKmode
6471 && GET_MODE_BITSIZE (mode) > bitsize)
6472 /* If the RHS and field are a constant size and the size of the
6473 RHS isn't the same size as the bitfield, we must use bitfield
6474 operations. */
6475 || (bitsize >= 0
6476 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6477 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6478 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6479 decl we must use bitfield operations. */
6480 || (bitsize >= 0
6481 && TREE_CODE (exp) == MEM_REF
6482 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6483 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6484 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6485 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6487 rtx temp;
6488 gimple nop_def;
6490 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6491 implies a mask operation. If the precision is the same size as
6492 the field we're storing into, that mask is redundant. This is
6493 particularly common with bit field assignments generated by the
6494 C front end. */
6495 nop_def = get_def_for_expr (exp, NOP_EXPR);
6496 if (nop_def)
6498 tree type = TREE_TYPE (exp);
6499 if (INTEGRAL_TYPE_P (type)
6500 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6501 && bitsize == TYPE_PRECISION (type))
6503 tree op = gimple_assign_rhs1 (nop_def);
6504 type = TREE_TYPE (op);
6505 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6506 exp = op;
6510 temp = expand_normal (exp);
6512 /* If BITSIZE is narrower than the size of the type of EXP
6513 we will be narrowing TEMP. Normally, what's wanted are the
6514 low-order bits. However, if EXP's type is a record and this is
6515 big-endian machine, we want the upper BITSIZE bits. */
6516 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6517 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6518 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6519 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6520 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6521 NULL_RTX, 1);
6523 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6524 if (mode != VOIDmode && mode != BLKmode
6525 && mode != TYPE_MODE (TREE_TYPE (exp)))
6526 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6528 /* If the modes of TEMP and TARGET are both BLKmode, both
6529 must be in memory and BITPOS must be aligned on a byte
6530 boundary. If so, we simply do a block copy. Likewise
6531 for a BLKmode-like TARGET. */
6532 if (GET_MODE (temp) == BLKmode
6533 && (GET_MODE (target) == BLKmode
6534 || (MEM_P (target)
6535 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6536 && (bitpos % BITS_PER_UNIT) == 0
6537 && (bitsize % BITS_PER_UNIT) == 0)))
6539 gcc_assert (MEM_P (target) && MEM_P (temp)
6540 && (bitpos % BITS_PER_UNIT) == 0);
6542 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6543 emit_block_move (target, temp,
6544 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6545 / BITS_PER_UNIT),
6546 BLOCK_OP_NORMAL);
6548 return const0_rtx;
6551 /* Handle calls that return values in multiple non-contiguous locations.
6552 The Irix 6 ABI has examples of this. */
6553 if (GET_CODE (temp) == PARALLEL)
6555 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6556 rtx temp_target;
6557 if (mode == BLKmode)
6558 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6559 temp_target = gen_reg_rtx (mode);
6560 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6561 temp = temp_target;
6563 else if (mode == BLKmode)
6565 /* Handle calls that return BLKmode values in registers. */
6566 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6568 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6569 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6570 temp = temp_target;
6572 else
6574 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6575 rtx temp_target;
6576 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6577 temp_target = gen_reg_rtx (mode);
6578 temp_target
6579 = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
6580 temp_target, mode, mode);
6581 temp = temp_target;
6585 /* Store the value in the bitfield. */
6586 store_bit_field (target, bitsize, bitpos,
6587 bitregion_start, bitregion_end,
6588 mode, temp);
6590 return const0_rtx;
6592 else
6594 /* Now build a reference to just the desired component. */
6595 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6597 if (to_rtx == target)
6598 to_rtx = copy_rtx (to_rtx);
6600 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6601 set_mem_alias_set (to_rtx, alias_set);
6603 return store_expr (exp, to_rtx, 0, nontemporal);
6607 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6608 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6609 codes and find the ultimate containing object, which we return.
6611 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6612 bit position, and *PUNSIGNEDP to the signedness of the field.
6613 If the position of the field is variable, we store a tree
6614 giving the variable offset (in units) in *POFFSET.
6615 This offset is in addition to the bit position.
6616 If the position is not variable, we store 0 in *POFFSET.
6618 If any of the extraction expressions is volatile,
6619 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6621 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6622 Otherwise, it is a mode that can be used to access the field.
6624 If the field describes a variable-sized object, *PMODE is set to
6625 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6626 this case, but the address of the object can be found.
6628 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6629 look through nodes that serve as markers of a greater alignment than
6630 the one that can be deduced from the expression. These nodes make it
6631 possible for front-ends to prevent temporaries from being created by
6632 the middle-end on alignment considerations. For that purpose, the
6633 normal operating mode at high-level is to always pass FALSE so that
6634 the ultimate containing object is really returned; moreover, the
6635 associated predicate handled_component_p will always return TRUE
6636 on these nodes, thus indicating that they are essentially handled
6637 by get_inner_reference. TRUE should only be passed when the caller
6638 is scanning the expression in order to build another representation
6639 and specifically knows how to handle these nodes; as such, this is
6640 the normal operating mode in the RTL expanders. */
6642 tree
6643 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6644 HOST_WIDE_INT *pbitpos, tree *poffset,
6645 enum machine_mode *pmode, int *punsignedp,
6646 int *pvolatilep, bool keep_aligning)
6648 tree size_tree = 0;
6649 enum machine_mode mode = VOIDmode;
6650 bool blkmode_bitfield = false;
6651 tree offset = size_zero_node;
6652 double_int bit_offset = double_int_zero;
6654 /* First get the mode, signedness, and size. We do this from just the
6655 outermost expression. */
6656 *pbitsize = -1;
6657 if (TREE_CODE (exp) == COMPONENT_REF)
6659 tree field = TREE_OPERAND (exp, 1);
6660 size_tree = DECL_SIZE (field);
6661 if (flag_strict_volatile_bitfields > 0
6662 && TREE_THIS_VOLATILE (exp)
6663 && DECL_BIT_FIELD_TYPE (field)
6664 && DECL_MODE (field) != BLKmode)
6665 /* Volatile bitfields should be accessed in the mode of the
6666 field's type, not the mode computed based on the bit
6667 size. */
6668 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6669 else if (!DECL_BIT_FIELD (field))
6670 mode = DECL_MODE (field);
6671 else if (DECL_MODE (field) == BLKmode)
6672 blkmode_bitfield = true;
6674 *punsignedp = DECL_UNSIGNED (field);
6676 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6678 size_tree = TREE_OPERAND (exp, 1);
6679 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6680 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6682 /* For vector types, with the correct size of access, use the mode of
6683 inner type. */
6684 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6685 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6686 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6687 mode = TYPE_MODE (TREE_TYPE (exp));
6689 else
6691 mode = TYPE_MODE (TREE_TYPE (exp));
6692 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6694 if (mode == BLKmode)
6695 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6696 else
6697 *pbitsize = GET_MODE_BITSIZE (mode);
6700 if (size_tree != 0)
6702 if (! tree_fits_uhwi_p (size_tree))
6703 mode = BLKmode, *pbitsize = -1;
6704 else
6705 *pbitsize = tree_to_uhwi (size_tree);
6708 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6709 and find the ultimate containing object. */
6710 while (1)
6712 switch (TREE_CODE (exp))
6714 case BIT_FIELD_REF:
6715 bit_offset += tree_to_double_int (TREE_OPERAND (exp, 2));
6716 break;
6718 case COMPONENT_REF:
6720 tree field = TREE_OPERAND (exp, 1);
6721 tree this_offset = component_ref_field_offset (exp);
6723 /* If this field hasn't been filled in yet, don't go past it.
6724 This should only happen when folding expressions made during
6725 type construction. */
6726 if (this_offset == 0)
6727 break;
6729 offset = size_binop (PLUS_EXPR, offset, this_offset);
6730 bit_offset += tree_to_double_int (DECL_FIELD_BIT_OFFSET (field));
6732 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6734 break;
6736 case ARRAY_REF:
6737 case ARRAY_RANGE_REF:
6739 tree index = TREE_OPERAND (exp, 1);
6740 tree low_bound = array_ref_low_bound (exp);
6741 tree unit_size = array_ref_element_size (exp);
6743 /* We assume all arrays have sizes that are a multiple of a byte.
6744 First subtract the lower bound, if any, in the type of the
6745 index, then convert to sizetype and multiply by the size of
6746 the array element. */
6747 if (! integer_zerop (low_bound))
6748 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6749 index, low_bound);
6751 offset = size_binop (PLUS_EXPR, offset,
6752 size_binop (MULT_EXPR,
6753 fold_convert (sizetype, index),
6754 unit_size));
6756 break;
6758 case REALPART_EXPR:
6759 break;
6761 case IMAGPART_EXPR:
6762 bit_offset += double_int::from_uhwi (*pbitsize);
6763 break;
6765 case VIEW_CONVERT_EXPR:
6766 if (keep_aligning && STRICT_ALIGNMENT
6767 && (TYPE_ALIGN (TREE_TYPE (exp))
6768 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6769 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6770 < BIGGEST_ALIGNMENT)
6771 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6772 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6773 goto done;
6774 break;
6776 case MEM_REF:
6777 /* Hand back the decl for MEM[&decl, off]. */
6778 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6780 tree off = TREE_OPERAND (exp, 1);
6781 if (!integer_zerop (off))
6783 double_int boff, coff = mem_ref_offset (exp);
6784 boff = coff.lshift (BITS_PER_UNIT == 8
6785 ? 3 : exact_log2 (BITS_PER_UNIT));
6786 bit_offset += boff;
6788 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6790 goto done;
6792 default:
6793 goto done;
6796 /* If any reference in the chain is volatile, the effect is volatile. */
6797 if (TREE_THIS_VOLATILE (exp))
6798 *pvolatilep = 1;
6800 exp = TREE_OPERAND (exp, 0);
6802 done:
6804 /* If OFFSET is constant, see if we can return the whole thing as a
6805 constant bit position. Make sure to handle overflow during
6806 this conversion. */
6807 if (TREE_CODE (offset) == INTEGER_CST)
6809 double_int tem = tree_to_double_int (offset);
6810 tem = tem.sext (TYPE_PRECISION (sizetype));
6811 tem = tem.lshift (BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT));
6812 tem += bit_offset;
6813 if (tem.fits_shwi ())
6815 *pbitpos = tem.to_shwi ();
6816 *poffset = offset = NULL_TREE;
6820 /* Otherwise, split it up. */
6821 if (offset)
6823 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6824 if (bit_offset.is_negative ())
6826 double_int mask
6827 = double_int::mask (BITS_PER_UNIT == 8
6828 ? 3 : exact_log2 (BITS_PER_UNIT));
6829 double_int tem = bit_offset.and_not (mask);
6830 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6831 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6832 bit_offset -= tem;
6833 tem = tem.arshift (BITS_PER_UNIT == 8
6834 ? 3 : exact_log2 (BITS_PER_UNIT),
6835 HOST_BITS_PER_DOUBLE_INT);
6836 offset = size_binop (PLUS_EXPR, offset,
6837 double_int_to_tree (sizetype, tem));
6840 *pbitpos = bit_offset.to_shwi ();
6841 *poffset = offset;
6844 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6845 if (mode == VOIDmode
6846 && blkmode_bitfield
6847 && (*pbitpos % BITS_PER_UNIT) == 0
6848 && (*pbitsize % BITS_PER_UNIT) == 0)
6849 *pmode = BLKmode;
6850 else
6851 *pmode = mode;
6853 return exp;
6856 /* Return a tree of sizetype representing the size, in bytes, of the element
6857 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6859 tree
6860 array_ref_element_size (tree exp)
6862 tree aligned_size = TREE_OPERAND (exp, 3);
6863 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6864 location_t loc = EXPR_LOCATION (exp);
6866 /* If a size was specified in the ARRAY_REF, it's the size measured
6867 in alignment units of the element type. So multiply by that value. */
6868 if (aligned_size)
6870 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6871 sizetype from another type of the same width and signedness. */
6872 if (TREE_TYPE (aligned_size) != sizetype)
6873 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6874 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6875 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6878 /* Otherwise, take the size from that of the element type. Substitute
6879 any PLACEHOLDER_EXPR that we have. */
6880 else
6881 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6884 /* Return a tree representing the lower bound of the array mentioned in
6885 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6887 tree
6888 array_ref_low_bound (tree exp)
6890 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6892 /* If a lower bound is specified in EXP, use it. */
6893 if (TREE_OPERAND (exp, 2))
6894 return TREE_OPERAND (exp, 2);
6896 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6897 substituting for a PLACEHOLDER_EXPR as needed. */
6898 if (domain_type && TYPE_MIN_VALUE (domain_type))
6899 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6901 /* Otherwise, return a zero of the appropriate type. */
6902 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6905 /* Returns true if REF is an array reference to an array at the end of
6906 a structure. If this is the case, the array may be allocated larger
6907 than its upper bound implies. */
6909 bool
6910 array_at_struct_end_p (tree ref)
6912 if (TREE_CODE (ref) != ARRAY_REF
6913 && TREE_CODE (ref) != ARRAY_RANGE_REF)
6914 return false;
6916 while (handled_component_p (ref))
6918 /* If the reference chain contains a component reference to a
6919 non-union type and there follows another field the reference
6920 is not at the end of a structure. */
6921 if (TREE_CODE (ref) == COMPONENT_REF
6922 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
6924 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
6925 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
6926 nextf = DECL_CHAIN (nextf);
6927 if (nextf)
6928 return false;
6931 ref = TREE_OPERAND (ref, 0);
6934 /* If the reference is based on a declared entity, the size of the array
6935 is constrained by its given domain. */
6936 if (DECL_P (ref))
6937 return false;
6939 return true;
6942 /* Return a tree representing the upper bound of the array mentioned in
6943 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6945 tree
6946 array_ref_up_bound (tree exp)
6948 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6950 /* If there is a domain type and it has an upper bound, use it, substituting
6951 for a PLACEHOLDER_EXPR as needed. */
6952 if (domain_type && TYPE_MAX_VALUE (domain_type))
6953 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6955 /* Otherwise fail. */
6956 return NULL_TREE;
6959 /* Return a tree representing the offset, in bytes, of the field referenced
6960 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6962 tree
6963 component_ref_field_offset (tree exp)
6965 tree aligned_offset = TREE_OPERAND (exp, 2);
6966 tree field = TREE_OPERAND (exp, 1);
6967 location_t loc = EXPR_LOCATION (exp);
6969 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6970 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6971 value. */
6972 if (aligned_offset)
6974 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6975 sizetype from another type of the same width and signedness. */
6976 if (TREE_TYPE (aligned_offset) != sizetype)
6977 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
6978 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
6979 size_int (DECL_OFFSET_ALIGN (field)
6980 / BITS_PER_UNIT));
6983 /* Otherwise, take the offset from that of the field. Substitute
6984 any PLACEHOLDER_EXPR that we have. */
6985 else
6986 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6989 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6991 static unsigned HOST_WIDE_INT
6992 target_align (const_tree target)
6994 /* We might have a chain of nested references with intermediate misaligning
6995 bitfields components, so need to recurse to find out. */
6997 unsigned HOST_WIDE_INT this_align, outer_align;
6999 switch (TREE_CODE (target))
7001 case BIT_FIELD_REF:
7002 return 1;
7004 case COMPONENT_REF:
7005 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7006 outer_align = target_align (TREE_OPERAND (target, 0));
7007 return MIN (this_align, outer_align);
7009 case ARRAY_REF:
7010 case ARRAY_RANGE_REF:
7011 this_align = TYPE_ALIGN (TREE_TYPE (target));
7012 outer_align = target_align (TREE_OPERAND (target, 0));
7013 return MIN (this_align, outer_align);
7015 CASE_CONVERT:
7016 case NON_LVALUE_EXPR:
7017 case VIEW_CONVERT_EXPR:
7018 this_align = TYPE_ALIGN (TREE_TYPE (target));
7019 outer_align = target_align (TREE_OPERAND (target, 0));
7020 return MAX (this_align, outer_align);
7022 default:
7023 return TYPE_ALIGN (TREE_TYPE (target));
7028 /* Given an rtx VALUE that may contain additions and multiplications, return
7029 an equivalent value that just refers to a register, memory, or constant.
7030 This is done by generating instructions to perform the arithmetic and
7031 returning a pseudo-register containing the value.
7033 The returned value may be a REG, SUBREG, MEM or constant. */
7036 force_operand (rtx value, rtx target)
7038 rtx op1, op2;
7039 /* Use subtarget as the target for operand 0 of a binary operation. */
7040 rtx subtarget = get_subtarget (target);
7041 enum rtx_code code = GET_CODE (value);
7043 /* Check for subreg applied to an expression produced by loop optimizer. */
7044 if (code == SUBREG
7045 && !REG_P (SUBREG_REG (value))
7046 && !MEM_P (SUBREG_REG (value)))
7048 value
7049 = simplify_gen_subreg (GET_MODE (value),
7050 force_reg (GET_MODE (SUBREG_REG (value)),
7051 force_operand (SUBREG_REG (value),
7052 NULL_RTX)),
7053 GET_MODE (SUBREG_REG (value)),
7054 SUBREG_BYTE (value));
7055 code = GET_CODE (value);
7058 /* Check for a PIC address load. */
7059 if ((code == PLUS || code == MINUS)
7060 && XEXP (value, 0) == pic_offset_table_rtx
7061 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7062 || GET_CODE (XEXP (value, 1)) == LABEL_REF
7063 || GET_CODE (XEXP (value, 1)) == CONST))
7065 if (!subtarget)
7066 subtarget = gen_reg_rtx (GET_MODE (value));
7067 emit_move_insn (subtarget, value);
7068 return subtarget;
7071 if (ARITHMETIC_P (value))
7073 op2 = XEXP (value, 1);
7074 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7075 subtarget = 0;
7076 if (code == MINUS && CONST_INT_P (op2))
7078 code = PLUS;
7079 op2 = negate_rtx (GET_MODE (value), op2);
7082 /* Check for an addition with OP2 a constant integer and our first
7083 operand a PLUS of a virtual register and something else. In that
7084 case, we want to emit the sum of the virtual register and the
7085 constant first and then add the other value. This allows virtual
7086 register instantiation to simply modify the constant rather than
7087 creating another one around this addition. */
7088 if (code == PLUS && CONST_INT_P (op2)
7089 && GET_CODE (XEXP (value, 0)) == PLUS
7090 && REG_P (XEXP (XEXP (value, 0), 0))
7091 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7092 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7094 rtx temp = expand_simple_binop (GET_MODE (value), code,
7095 XEXP (XEXP (value, 0), 0), op2,
7096 subtarget, 0, OPTAB_LIB_WIDEN);
7097 return expand_simple_binop (GET_MODE (value), code, temp,
7098 force_operand (XEXP (XEXP (value,
7099 0), 1), 0),
7100 target, 0, OPTAB_LIB_WIDEN);
7103 op1 = force_operand (XEXP (value, 0), subtarget);
7104 op2 = force_operand (op2, NULL_RTX);
7105 switch (code)
7107 case MULT:
7108 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7109 case DIV:
7110 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7111 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7112 target, 1, OPTAB_LIB_WIDEN);
7113 else
7114 return expand_divmod (0,
7115 FLOAT_MODE_P (GET_MODE (value))
7116 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7117 GET_MODE (value), op1, op2, target, 0);
7118 case MOD:
7119 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7120 target, 0);
7121 case UDIV:
7122 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7123 target, 1);
7124 case UMOD:
7125 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7126 target, 1);
7127 case ASHIFTRT:
7128 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7129 target, 0, OPTAB_LIB_WIDEN);
7130 default:
7131 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7132 target, 1, OPTAB_LIB_WIDEN);
7135 if (UNARY_P (value))
7137 if (!target)
7138 target = gen_reg_rtx (GET_MODE (value));
7139 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7140 switch (code)
7142 case ZERO_EXTEND:
7143 case SIGN_EXTEND:
7144 case TRUNCATE:
7145 case FLOAT_EXTEND:
7146 case FLOAT_TRUNCATE:
7147 convert_move (target, op1, code == ZERO_EXTEND);
7148 return target;
7150 case FIX:
7151 case UNSIGNED_FIX:
7152 expand_fix (target, op1, code == UNSIGNED_FIX);
7153 return target;
7155 case FLOAT:
7156 case UNSIGNED_FLOAT:
7157 expand_float (target, op1, code == UNSIGNED_FLOAT);
7158 return target;
7160 default:
7161 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7165 #ifdef INSN_SCHEDULING
7166 /* On machines that have insn scheduling, we want all memory reference to be
7167 explicit, so we need to deal with such paradoxical SUBREGs. */
7168 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7169 value
7170 = simplify_gen_subreg (GET_MODE (value),
7171 force_reg (GET_MODE (SUBREG_REG (value)),
7172 force_operand (SUBREG_REG (value),
7173 NULL_RTX)),
7174 GET_MODE (SUBREG_REG (value)),
7175 SUBREG_BYTE (value));
7176 #endif
7178 return value;
7181 /* Subroutine of expand_expr: return nonzero iff there is no way that
7182 EXP can reference X, which is being modified. TOP_P is nonzero if this
7183 call is going to be used to determine whether we need a temporary
7184 for EXP, as opposed to a recursive call to this function.
7186 It is always safe for this routine to return zero since it merely
7187 searches for optimization opportunities. */
7190 safe_from_p (const_rtx x, tree exp, int top_p)
7192 rtx exp_rtl = 0;
7193 int i, nops;
7195 if (x == 0
7196 /* If EXP has varying size, we MUST use a target since we currently
7197 have no way of allocating temporaries of variable size
7198 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7199 So we assume here that something at a higher level has prevented a
7200 clash. This is somewhat bogus, but the best we can do. Only
7201 do this when X is BLKmode and when we are at the top level. */
7202 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7203 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7204 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7205 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7206 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7207 != INTEGER_CST)
7208 && GET_MODE (x) == BLKmode)
7209 /* If X is in the outgoing argument area, it is always safe. */
7210 || (MEM_P (x)
7211 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7212 || (GET_CODE (XEXP (x, 0)) == PLUS
7213 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7214 return 1;
7216 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7217 find the underlying pseudo. */
7218 if (GET_CODE (x) == SUBREG)
7220 x = SUBREG_REG (x);
7221 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7222 return 0;
7225 /* Now look at our tree code and possibly recurse. */
7226 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7228 case tcc_declaration:
7229 exp_rtl = DECL_RTL_IF_SET (exp);
7230 break;
7232 case tcc_constant:
7233 return 1;
7235 case tcc_exceptional:
7236 if (TREE_CODE (exp) == TREE_LIST)
7238 while (1)
7240 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7241 return 0;
7242 exp = TREE_CHAIN (exp);
7243 if (!exp)
7244 return 1;
7245 if (TREE_CODE (exp) != TREE_LIST)
7246 return safe_from_p (x, exp, 0);
7249 else if (TREE_CODE (exp) == CONSTRUCTOR)
7251 constructor_elt *ce;
7252 unsigned HOST_WIDE_INT idx;
7254 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7255 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7256 || !safe_from_p (x, ce->value, 0))
7257 return 0;
7258 return 1;
7260 else if (TREE_CODE (exp) == ERROR_MARK)
7261 return 1; /* An already-visited SAVE_EXPR? */
7262 else
7263 return 0;
7265 case tcc_statement:
7266 /* The only case we look at here is the DECL_INITIAL inside a
7267 DECL_EXPR. */
7268 return (TREE_CODE (exp) != DECL_EXPR
7269 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7270 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7271 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7273 case tcc_binary:
7274 case tcc_comparison:
7275 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7276 return 0;
7277 /* Fall through. */
7279 case tcc_unary:
7280 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7282 case tcc_expression:
7283 case tcc_reference:
7284 case tcc_vl_exp:
7285 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7286 the expression. If it is set, we conflict iff we are that rtx or
7287 both are in memory. Otherwise, we check all operands of the
7288 expression recursively. */
7290 switch (TREE_CODE (exp))
7292 case ADDR_EXPR:
7293 /* If the operand is static or we are static, we can't conflict.
7294 Likewise if we don't conflict with the operand at all. */
7295 if (staticp (TREE_OPERAND (exp, 0))
7296 || TREE_STATIC (exp)
7297 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7298 return 1;
7300 /* Otherwise, the only way this can conflict is if we are taking
7301 the address of a DECL a that address if part of X, which is
7302 very rare. */
7303 exp = TREE_OPERAND (exp, 0);
7304 if (DECL_P (exp))
7306 if (!DECL_RTL_SET_P (exp)
7307 || !MEM_P (DECL_RTL (exp)))
7308 return 0;
7309 else
7310 exp_rtl = XEXP (DECL_RTL (exp), 0);
7312 break;
7314 case MEM_REF:
7315 if (MEM_P (x)
7316 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7317 get_alias_set (exp)))
7318 return 0;
7319 break;
7321 case CALL_EXPR:
7322 /* Assume that the call will clobber all hard registers and
7323 all of memory. */
7324 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7325 || MEM_P (x))
7326 return 0;
7327 break;
7329 case WITH_CLEANUP_EXPR:
7330 case CLEANUP_POINT_EXPR:
7331 /* Lowered by gimplify.c. */
7332 gcc_unreachable ();
7334 case SAVE_EXPR:
7335 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7337 default:
7338 break;
7341 /* If we have an rtx, we do not need to scan our operands. */
7342 if (exp_rtl)
7343 break;
7345 nops = TREE_OPERAND_LENGTH (exp);
7346 for (i = 0; i < nops; i++)
7347 if (TREE_OPERAND (exp, i) != 0
7348 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7349 return 0;
7351 break;
7353 case tcc_type:
7354 /* Should never get a type here. */
7355 gcc_unreachable ();
7358 /* If we have an rtl, find any enclosed object. Then see if we conflict
7359 with it. */
7360 if (exp_rtl)
7362 if (GET_CODE (exp_rtl) == SUBREG)
7364 exp_rtl = SUBREG_REG (exp_rtl);
7365 if (REG_P (exp_rtl)
7366 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7367 return 0;
7370 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7371 are memory and they conflict. */
7372 return ! (rtx_equal_p (x, exp_rtl)
7373 || (MEM_P (x) && MEM_P (exp_rtl)
7374 && true_dependence (exp_rtl, VOIDmode, x)));
7377 /* If we reach here, it is safe. */
7378 return 1;
7382 /* Return the highest power of two that EXP is known to be a multiple of.
7383 This is used in updating alignment of MEMs in array references. */
7385 unsigned HOST_WIDE_INT
7386 highest_pow2_factor (const_tree exp)
7388 unsigned HOST_WIDE_INT ret;
7389 int trailing_zeros = tree_ctz (exp);
7390 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7391 return BIGGEST_ALIGNMENT;
7392 ret = (unsigned HOST_WIDE_INT) 1 << trailing_zeros;
7393 if (ret > BIGGEST_ALIGNMENT)
7394 return BIGGEST_ALIGNMENT;
7395 return ret;
7398 /* Similar, except that the alignment requirements of TARGET are
7399 taken into account. Assume it is at least as aligned as its
7400 type, unless it is a COMPONENT_REF in which case the layout of
7401 the structure gives the alignment. */
7403 static unsigned HOST_WIDE_INT
7404 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7406 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7407 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7409 return MAX (factor, talign);
7412 #ifdef HAVE_conditional_move
7413 /* Convert the tree comparison code TCODE to the rtl one where the
7414 signedness is UNSIGNEDP. */
7416 static enum rtx_code
7417 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7419 enum rtx_code code;
7420 switch (tcode)
7422 case EQ_EXPR:
7423 code = EQ;
7424 break;
7425 case NE_EXPR:
7426 code = NE;
7427 break;
7428 case LT_EXPR:
7429 code = unsignedp ? LTU : LT;
7430 break;
7431 case LE_EXPR:
7432 code = unsignedp ? LEU : LE;
7433 break;
7434 case GT_EXPR:
7435 code = unsignedp ? GTU : GT;
7436 break;
7437 case GE_EXPR:
7438 code = unsignedp ? GEU : GE;
7439 break;
7440 case UNORDERED_EXPR:
7441 code = UNORDERED;
7442 break;
7443 case ORDERED_EXPR:
7444 code = ORDERED;
7445 break;
7446 case UNLT_EXPR:
7447 code = UNLT;
7448 break;
7449 case UNLE_EXPR:
7450 code = UNLE;
7451 break;
7452 case UNGT_EXPR:
7453 code = UNGT;
7454 break;
7455 case UNGE_EXPR:
7456 code = UNGE;
7457 break;
7458 case UNEQ_EXPR:
7459 code = UNEQ;
7460 break;
7461 case LTGT_EXPR:
7462 code = LTGT;
7463 break;
7465 default:
7466 gcc_unreachable ();
7468 return code;
7470 #endif
7472 /* Subroutine of expand_expr. Expand the two operands of a binary
7473 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7474 The value may be stored in TARGET if TARGET is nonzero. The
7475 MODIFIER argument is as documented by expand_expr. */
7477 static void
7478 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7479 enum expand_modifier modifier)
7481 if (! safe_from_p (target, exp1, 1))
7482 target = 0;
7483 if (operand_equal_p (exp0, exp1, 0))
7485 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7486 *op1 = copy_rtx (*op0);
7488 else
7490 /* If we need to preserve evaluation order, copy exp0 into its own
7491 temporary variable so that it can't be clobbered by exp1. */
7492 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7493 exp0 = save_expr (exp0);
7494 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7495 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7500 /* Return a MEM that contains constant EXP. DEFER is as for
7501 output_constant_def and MODIFIER is as for expand_expr. */
7503 static rtx
7504 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7506 rtx mem;
7508 mem = output_constant_def (exp, defer);
7509 if (modifier != EXPAND_INITIALIZER)
7510 mem = use_anchored_address (mem);
7511 return mem;
7514 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7515 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7517 static rtx
7518 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
7519 enum expand_modifier modifier, addr_space_t as)
7521 rtx result, subtarget;
7522 tree inner, offset;
7523 HOST_WIDE_INT bitsize, bitpos;
7524 int volatilep, unsignedp;
7525 enum machine_mode mode1;
7527 /* If we are taking the address of a constant and are at the top level,
7528 we have to use output_constant_def since we can't call force_const_mem
7529 at top level. */
7530 /* ??? This should be considered a front-end bug. We should not be
7531 generating ADDR_EXPR of something that isn't an LVALUE. The only
7532 exception here is STRING_CST. */
7533 if (CONSTANT_CLASS_P (exp))
7535 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7536 if (modifier < EXPAND_SUM)
7537 result = force_operand (result, target);
7538 return result;
7541 /* Everything must be something allowed by is_gimple_addressable. */
7542 switch (TREE_CODE (exp))
7544 case INDIRECT_REF:
7545 /* This case will happen via recursion for &a->b. */
7546 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7548 case MEM_REF:
7550 tree tem = TREE_OPERAND (exp, 0);
7551 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7552 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7553 return expand_expr (tem, target, tmode, modifier);
7556 case CONST_DECL:
7557 /* Expand the initializer like constants above. */
7558 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7559 0, modifier), 0);
7560 if (modifier < EXPAND_SUM)
7561 result = force_operand (result, target);
7562 return result;
7564 case REALPART_EXPR:
7565 /* The real part of the complex number is always first, therefore
7566 the address is the same as the address of the parent object. */
7567 offset = 0;
7568 bitpos = 0;
7569 inner = TREE_OPERAND (exp, 0);
7570 break;
7572 case IMAGPART_EXPR:
7573 /* The imaginary part of the complex number is always second.
7574 The expression is therefore always offset by the size of the
7575 scalar type. */
7576 offset = 0;
7577 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7578 inner = TREE_OPERAND (exp, 0);
7579 break;
7581 case COMPOUND_LITERAL_EXPR:
7582 /* Allow COMPOUND_LITERAL_EXPR in initializers, if e.g.
7583 rtl_for_decl_init is called on DECL_INITIAL with
7584 COMPOUNT_LITERAL_EXPRs in it, they aren't gimplified. */
7585 if (modifier == EXPAND_INITIALIZER
7586 && COMPOUND_LITERAL_EXPR_DECL (exp))
7587 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7588 target, tmode, modifier, as);
7589 /* FALLTHRU */
7590 default:
7591 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7592 expand_expr, as that can have various side effects; LABEL_DECLs for
7593 example, may not have their DECL_RTL set yet. Expand the rtl of
7594 CONSTRUCTORs too, which should yield a memory reference for the
7595 constructor's contents. Assume language specific tree nodes can
7596 be expanded in some interesting way. */
7597 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7598 if (DECL_P (exp)
7599 || TREE_CODE (exp) == CONSTRUCTOR
7600 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7602 result = expand_expr (exp, target, tmode,
7603 modifier == EXPAND_INITIALIZER
7604 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7606 /* If the DECL isn't in memory, then the DECL wasn't properly
7607 marked TREE_ADDRESSABLE, which will be either a front-end
7608 or a tree optimizer bug. */
7610 if (TREE_ADDRESSABLE (exp)
7611 && ! MEM_P (result)
7612 && ! targetm.calls.allocate_stack_slots_for_args ())
7614 error ("local frame unavailable (naked function?)");
7615 return result;
7617 else
7618 gcc_assert (MEM_P (result));
7619 result = XEXP (result, 0);
7621 /* ??? Is this needed anymore? */
7622 if (DECL_P (exp))
7623 TREE_USED (exp) = 1;
7625 if (modifier != EXPAND_INITIALIZER
7626 && modifier != EXPAND_CONST_ADDRESS
7627 && modifier != EXPAND_SUM)
7628 result = force_operand (result, target);
7629 return result;
7632 /* Pass FALSE as the last argument to get_inner_reference although
7633 we are expanding to RTL. The rationale is that we know how to
7634 handle "aligning nodes" here: we can just bypass them because
7635 they won't change the final object whose address will be returned
7636 (they actually exist only for that purpose). */
7637 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7638 &mode1, &unsignedp, &volatilep, false);
7639 break;
7642 /* We must have made progress. */
7643 gcc_assert (inner != exp);
7645 subtarget = offset || bitpos ? NULL_RTX : target;
7646 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7647 inner alignment, force the inner to be sufficiently aligned. */
7648 if (CONSTANT_CLASS_P (inner)
7649 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7651 inner = copy_node (inner);
7652 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7653 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7654 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7656 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7658 if (offset)
7660 rtx tmp;
7662 if (modifier != EXPAND_NORMAL)
7663 result = force_operand (result, NULL);
7664 tmp = expand_expr (offset, NULL_RTX, tmode,
7665 modifier == EXPAND_INITIALIZER
7666 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7668 result = convert_memory_address_addr_space (tmode, result, as);
7669 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7671 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7672 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7673 else
7675 subtarget = bitpos ? NULL_RTX : target;
7676 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7677 1, OPTAB_LIB_WIDEN);
7681 if (bitpos)
7683 /* Someone beforehand should have rejected taking the address
7684 of such an object. */
7685 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7687 result = convert_memory_address_addr_space (tmode, result, as);
7688 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7689 if (modifier < EXPAND_SUM)
7690 result = force_operand (result, target);
7693 return result;
7696 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7697 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7699 static rtx
7700 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
7701 enum expand_modifier modifier)
7703 addr_space_t as = ADDR_SPACE_GENERIC;
7704 enum machine_mode address_mode = Pmode;
7705 enum machine_mode pointer_mode = ptr_mode;
7706 enum machine_mode rmode;
7707 rtx result;
7709 /* Target mode of VOIDmode says "whatever's natural". */
7710 if (tmode == VOIDmode)
7711 tmode = TYPE_MODE (TREE_TYPE (exp));
7713 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7715 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7716 address_mode = targetm.addr_space.address_mode (as);
7717 pointer_mode = targetm.addr_space.pointer_mode (as);
7720 /* We can get called with some Weird Things if the user does silliness
7721 like "(short) &a". In that case, convert_memory_address won't do
7722 the right thing, so ignore the given target mode. */
7723 if (tmode != address_mode && tmode != pointer_mode)
7724 tmode = address_mode;
7726 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7727 tmode, modifier, as);
7729 /* Despite expand_expr claims concerning ignoring TMODE when not
7730 strictly convenient, stuff breaks if we don't honor it. Note
7731 that combined with the above, we only do this for pointer modes. */
7732 rmode = GET_MODE (result);
7733 if (rmode == VOIDmode)
7734 rmode = tmode;
7735 if (rmode != tmode)
7736 result = convert_memory_address_addr_space (tmode, result, as);
7738 return result;
7741 /* Generate code for computing CONSTRUCTOR EXP.
7742 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7743 is TRUE, instead of creating a temporary variable in memory
7744 NULL is returned and the caller needs to handle it differently. */
7746 static rtx
7747 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7748 bool avoid_temp_mem)
7750 tree type = TREE_TYPE (exp);
7751 enum machine_mode mode = TYPE_MODE (type);
7753 /* Try to avoid creating a temporary at all. This is possible
7754 if all of the initializer is zero.
7755 FIXME: try to handle all [0..255] initializers we can handle
7756 with memset. */
7757 if (TREE_STATIC (exp)
7758 && !TREE_ADDRESSABLE (exp)
7759 && target != 0 && mode == BLKmode
7760 && all_zeros_p (exp))
7762 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7763 return target;
7766 /* All elts simple constants => refer to a constant in memory. But
7767 if this is a non-BLKmode mode, let it store a field at a time
7768 since that should make a CONST_INT or CONST_DOUBLE when we
7769 fold. Likewise, if we have a target we can use, it is best to
7770 store directly into the target unless the type is large enough
7771 that memcpy will be used. If we are making an initializer and
7772 all operands are constant, put it in memory as well.
7774 FIXME: Avoid trying to fill vector constructors piece-meal.
7775 Output them with output_constant_def below unless we're sure
7776 they're zeros. This should go away when vector initializers
7777 are treated like VECTOR_CST instead of arrays. */
7778 if ((TREE_STATIC (exp)
7779 && ((mode == BLKmode
7780 && ! (target != 0 && safe_from_p (target, exp, 1)))
7781 || TREE_ADDRESSABLE (exp)
7782 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
7783 && (! MOVE_BY_PIECES_P
7784 (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
7785 TYPE_ALIGN (type)))
7786 && ! mostly_zeros_p (exp))))
7787 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7788 && TREE_CONSTANT (exp)))
7790 rtx constructor;
7792 if (avoid_temp_mem)
7793 return NULL_RTX;
7795 constructor = expand_expr_constant (exp, 1, modifier);
7797 if (modifier != EXPAND_CONST_ADDRESS
7798 && modifier != EXPAND_INITIALIZER
7799 && modifier != EXPAND_SUM)
7800 constructor = validize_mem (constructor);
7802 return constructor;
7805 /* Handle calls that pass values in multiple non-contiguous
7806 locations. The Irix 6 ABI has examples of this. */
7807 if (target == 0 || ! safe_from_p (target, exp, 1)
7808 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7810 if (avoid_temp_mem)
7811 return NULL_RTX;
7813 target
7814 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
7815 | (TREE_READONLY (exp)
7816 * TYPE_QUAL_CONST))),
7817 TREE_ADDRESSABLE (exp), 1);
7820 store_constructor (exp, target, 0, int_expr_size (exp));
7821 return target;
7825 /* expand_expr: generate code for computing expression EXP.
7826 An rtx for the computed value is returned. The value is never null.
7827 In the case of a void EXP, const0_rtx is returned.
7829 The value may be stored in TARGET if TARGET is nonzero.
7830 TARGET is just a suggestion; callers must assume that
7831 the rtx returned may not be the same as TARGET.
7833 If TARGET is CONST0_RTX, it means that the value will be ignored.
7835 If TMODE is not VOIDmode, it suggests generating the
7836 result in mode TMODE. But this is done only when convenient.
7837 Otherwise, TMODE is ignored and the value generated in its natural mode.
7838 TMODE is just a suggestion; callers must assume that
7839 the rtx returned may not have mode TMODE.
7841 Note that TARGET may have neither TMODE nor MODE. In that case, it
7842 probably will not be used.
7844 If MODIFIER is EXPAND_SUM then when EXP is an addition
7845 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7846 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7847 products as above, or REG or MEM, or constant.
7848 Ordinarily in such cases we would output mul or add instructions
7849 and then return a pseudo reg containing the sum.
7851 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7852 it also marks a label as absolutely required (it can't be dead).
7853 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7854 This is used for outputting expressions used in initializers.
7856 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7857 with a constant address even if that address is not normally legitimate.
7858 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7860 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7861 a call parameter. Such targets require special care as we haven't yet
7862 marked TARGET so that it's safe from being trashed by libcalls. We
7863 don't want to use TARGET for anything but the final result;
7864 Intermediate values must go elsewhere. Additionally, calls to
7865 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7867 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7868 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7869 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7870 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7871 recursively. */
7874 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7875 enum expand_modifier modifier, rtx *alt_rtl)
7877 rtx ret;
7879 /* Handle ERROR_MARK before anybody tries to access its type. */
7880 if (TREE_CODE (exp) == ERROR_MARK
7881 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7883 ret = CONST0_RTX (tmode);
7884 return ret ? ret : const0_rtx;
7887 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7888 return ret;
7891 /* Try to expand the conditional expression which is represented by
7892 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
7893 return the rtl reg which repsents the result. Otherwise return
7894 NULL_RTL. */
7896 static rtx
7897 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
7898 tree treeop1 ATTRIBUTE_UNUSED,
7899 tree treeop2 ATTRIBUTE_UNUSED)
7901 #ifdef HAVE_conditional_move
7902 rtx insn;
7903 rtx op00, op01, op1, op2;
7904 enum rtx_code comparison_code;
7905 enum machine_mode comparison_mode;
7906 gimple srcstmt;
7907 rtx temp;
7908 tree type = TREE_TYPE (treeop1);
7909 int unsignedp = TYPE_UNSIGNED (type);
7910 enum machine_mode mode = TYPE_MODE (type);
7911 enum machine_mode orig_mode = mode;
7913 /* If we cannot do a conditional move on the mode, try doing it
7914 with the promoted mode. */
7915 if (!can_conditionally_move_p (mode))
7917 mode = promote_mode (type, mode, &unsignedp);
7918 if (!can_conditionally_move_p (mode))
7919 return NULL_RTX;
7920 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
7922 else
7923 temp = assign_temp (type, 0, 1);
7925 start_sequence ();
7926 expand_operands (treeop1, treeop2,
7927 temp, &op1, &op2, EXPAND_NORMAL);
7929 if (TREE_CODE (treeop0) == SSA_NAME
7930 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
7932 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
7933 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
7934 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
7935 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
7936 comparison_mode = TYPE_MODE (type);
7937 unsignedp = TYPE_UNSIGNED (type);
7938 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7940 else if (TREE_CODE_CLASS (TREE_CODE (treeop0)) == tcc_comparison)
7942 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
7943 enum tree_code cmpcode = TREE_CODE (treeop0);
7944 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
7945 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
7946 unsignedp = TYPE_UNSIGNED (type);
7947 comparison_mode = TYPE_MODE (type);
7948 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7950 else
7952 op00 = expand_normal (treeop0);
7953 op01 = const0_rtx;
7954 comparison_code = NE;
7955 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
7958 if (GET_MODE (op1) != mode)
7959 op1 = gen_lowpart (mode, op1);
7961 if (GET_MODE (op2) != mode)
7962 op2 = gen_lowpart (mode, op2);
7964 /* Try to emit the conditional move. */
7965 insn = emit_conditional_move (temp, comparison_code,
7966 op00, op01, comparison_mode,
7967 op1, op2, mode,
7968 unsignedp);
7970 /* If we could do the conditional move, emit the sequence,
7971 and return. */
7972 if (insn)
7974 rtx seq = get_insns ();
7975 end_sequence ();
7976 emit_insn (seq);
7977 return convert_modes (orig_mode, mode, temp, 0);
7980 /* Otherwise discard the sequence and fall back to code with
7981 branches. */
7982 end_sequence ();
7983 #endif
7984 return NULL_RTX;
7988 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
7989 enum expand_modifier modifier)
7991 rtx op0, op1, op2, temp;
7992 tree type;
7993 int unsignedp;
7994 enum machine_mode mode;
7995 enum tree_code code = ops->code;
7996 optab this_optab;
7997 rtx subtarget, original_target;
7998 int ignore;
7999 bool reduce_bit_field;
8000 location_t loc = ops->location;
8001 tree treeop0, treeop1, treeop2;
8002 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8003 ? reduce_to_bit_field_precision ((expr), \
8004 target, \
8005 type) \
8006 : (expr))
8008 type = ops->type;
8009 mode = TYPE_MODE (type);
8010 unsignedp = TYPE_UNSIGNED (type);
8012 treeop0 = ops->op0;
8013 treeop1 = ops->op1;
8014 treeop2 = ops->op2;
8016 /* We should be called only on simple (binary or unary) expressions,
8017 exactly those that are valid in gimple expressions that aren't
8018 GIMPLE_SINGLE_RHS (or invalid). */
8019 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8020 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8021 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8023 ignore = (target == const0_rtx
8024 || ((CONVERT_EXPR_CODE_P (code)
8025 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8026 && TREE_CODE (type) == VOID_TYPE));
8028 /* We should be called only if we need the result. */
8029 gcc_assert (!ignore);
8031 /* An operation in what may be a bit-field type needs the
8032 result to be reduced to the precision of the bit-field type,
8033 which is narrower than that of the type's mode. */
8034 reduce_bit_field = (INTEGRAL_TYPE_P (type)
8035 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8037 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8038 target = 0;
8040 /* Use subtarget as the target for operand 0 of a binary operation. */
8041 subtarget = get_subtarget (target);
8042 original_target = target;
8044 switch (code)
8046 case NON_LVALUE_EXPR:
8047 case PAREN_EXPR:
8048 CASE_CONVERT:
8049 if (treeop0 == error_mark_node)
8050 return const0_rtx;
8052 if (TREE_CODE (type) == UNION_TYPE)
8054 tree valtype = TREE_TYPE (treeop0);
8056 /* If both input and output are BLKmode, this conversion isn't doing
8057 anything except possibly changing memory attribute. */
8058 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8060 rtx result = expand_expr (treeop0, target, tmode,
8061 modifier);
8063 result = copy_rtx (result);
8064 set_mem_attributes (result, type, 0);
8065 return result;
8068 if (target == 0)
8070 if (TYPE_MODE (type) != BLKmode)
8071 target = gen_reg_rtx (TYPE_MODE (type));
8072 else
8073 target = assign_temp (type, 1, 1);
8076 if (MEM_P (target))
8077 /* Store data into beginning of memory target. */
8078 store_expr (treeop0,
8079 adjust_address (target, TYPE_MODE (valtype), 0),
8080 modifier == EXPAND_STACK_PARM,
8081 false);
8083 else
8085 gcc_assert (REG_P (target));
8087 /* Store this field into a union of the proper type. */
8088 store_field (target,
8089 MIN ((int_size_in_bytes (TREE_TYPE
8090 (treeop0))
8091 * BITS_PER_UNIT),
8092 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8093 0, 0, 0, TYPE_MODE (valtype), treeop0, 0, false);
8096 /* Return the entire union. */
8097 return target;
8100 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8102 op0 = expand_expr (treeop0, target, VOIDmode,
8103 modifier);
8105 /* If the signedness of the conversion differs and OP0 is
8106 a promoted SUBREG, clear that indication since we now
8107 have to do the proper extension. */
8108 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8109 && GET_CODE (op0) == SUBREG)
8110 SUBREG_PROMOTED_VAR_P (op0) = 0;
8112 return REDUCE_BIT_FIELD (op0);
8115 op0 = expand_expr (treeop0, NULL_RTX, mode,
8116 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8117 if (GET_MODE (op0) == mode)
8120 /* If OP0 is a constant, just convert it into the proper mode. */
8121 else if (CONSTANT_P (op0))
8123 tree inner_type = TREE_TYPE (treeop0);
8124 enum machine_mode inner_mode = GET_MODE (op0);
8126 if (inner_mode == VOIDmode)
8127 inner_mode = TYPE_MODE (inner_type);
8129 if (modifier == EXPAND_INITIALIZER)
8130 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8131 subreg_lowpart_offset (mode,
8132 inner_mode));
8133 else
8134 op0= convert_modes (mode, inner_mode, op0,
8135 TYPE_UNSIGNED (inner_type));
8138 else if (modifier == EXPAND_INITIALIZER)
8139 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8141 else if (target == 0)
8142 op0 = convert_to_mode (mode, op0,
8143 TYPE_UNSIGNED (TREE_TYPE
8144 (treeop0)));
8145 else
8147 convert_move (target, op0,
8148 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8149 op0 = target;
8152 return REDUCE_BIT_FIELD (op0);
8154 case ADDR_SPACE_CONVERT_EXPR:
8156 tree treeop0_type = TREE_TYPE (treeop0);
8157 addr_space_t as_to;
8158 addr_space_t as_from;
8160 gcc_assert (POINTER_TYPE_P (type));
8161 gcc_assert (POINTER_TYPE_P (treeop0_type));
8163 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8164 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8166 /* Conversions between pointers to the same address space should
8167 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8168 gcc_assert (as_to != as_from);
8170 /* Ask target code to handle conversion between pointers
8171 to overlapping address spaces. */
8172 if (targetm.addr_space.subset_p (as_to, as_from)
8173 || targetm.addr_space.subset_p (as_from, as_to))
8175 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8176 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8177 gcc_assert (op0);
8178 return op0;
8181 /* For disjoint address spaces, converting anything but
8182 a null pointer invokes undefined behaviour. We simply
8183 always return a null pointer here. */
8184 return CONST0_RTX (mode);
8187 case POINTER_PLUS_EXPR:
8188 /* Even though the sizetype mode and the pointer's mode can be different
8189 expand is able to handle this correctly and get the correct result out
8190 of the PLUS_EXPR code. */
8191 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8192 if sizetype precision is smaller than pointer precision. */
8193 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8194 treeop1 = fold_convert_loc (loc, type,
8195 fold_convert_loc (loc, ssizetype,
8196 treeop1));
8197 /* If sizetype precision is larger than pointer precision, truncate the
8198 offset to have matching modes. */
8199 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8200 treeop1 = fold_convert_loc (loc, type, treeop1);
8202 case PLUS_EXPR:
8203 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8204 something else, make sure we add the register to the constant and
8205 then to the other thing. This case can occur during strength
8206 reduction and doing it this way will produce better code if the
8207 frame pointer or argument pointer is eliminated.
8209 fold-const.c will ensure that the constant is always in the inner
8210 PLUS_EXPR, so the only case we need to do anything about is if
8211 sp, ap, or fp is our second argument, in which case we must swap
8212 the innermost first argument and our second argument. */
8214 if (TREE_CODE (treeop0) == PLUS_EXPR
8215 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8216 && TREE_CODE (treeop1) == VAR_DECL
8217 && (DECL_RTL (treeop1) == frame_pointer_rtx
8218 || DECL_RTL (treeop1) == stack_pointer_rtx
8219 || DECL_RTL (treeop1) == arg_pointer_rtx))
8221 gcc_unreachable ();
8224 /* If the result is to be ptr_mode and we are adding an integer to
8225 something, we might be forming a constant. So try to use
8226 plus_constant. If it produces a sum and we can't accept it,
8227 use force_operand. This allows P = &ARR[const] to generate
8228 efficient code on machines where a SYMBOL_REF is not a valid
8229 address.
8231 If this is an EXPAND_SUM call, always return the sum. */
8232 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8233 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8235 if (modifier == EXPAND_STACK_PARM)
8236 target = 0;
8237 if (TREE_CODE (treeop0) == INTEGER_CST
8238 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8239 && TREE_CONSTANT (treeop1))
8241 rtx constant_part;
8243 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8244 EXPAND_SUM);
8245 /* Use immed_double_const to ensure that the constant is
8246 truncated according to the mode of OP1, then sign extended
8247 to a HOST_WIDE_INT. Using the constant directly can result
8248 in non-canonical RTL in a 64x32 cross compile. */
8249 constant_part
8250 = immed_double_const (TREE_INT_CST_LOW (treeop0),
8251 (HOST_WIDE_INT) 0,
8252 TYPE_MODE (TREE_TYPE (treeop1)));
8253 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8254 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8255 op1 = force_operand (op1, target);
8256 return REDUCE_BIT_FIELD (op1);
8259 else if (TREE_CODE (treeop1) == INTEGER_CST
8260 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8261 && TREE_CONSTANT (treeop0))
8263 rtx constant_part;
8265 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8266 (modifier == EXPAND_INITIALIZER
8267 ? EXPAND_INITIALIZER : EXPAND_SUM));
8268 if (! CONSTANT_P (op0))
8270 op1 = expand_expr (treeop1, NULL_RTX,
8271 VOIDmode, modifier);
8272 /* Return a PLUS if modifier says it's OK. */
8273 if (modifier == EXPAND_SUM
8274 || modifier == EXPAND_INITIALIZER)
8275 return simplify_gen_binary (PLUS, mode, op0, op1);
8276 goto binop2;
8278 /* Use immed_double_const to ensure that the constant is
8279 truncated according to the mode of OP1, then sign extended
8280 to a HOST_WIDE_INT. Using the constant directly can result
8281 in non-canonical RTL in a 64x32 cross compile. */
8282 constant_part
8283 = immed_double_const (TREE_INT_CST_LOW (treeop1),
8284 (HOST_WIDE_INT) 0,
8285 TYPE_MODE (TREE_TYPE (treeop0)));
8286 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8287 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8288 op0 = force_operand (op0, target);
8289 return REDUCE_BIT_FIELD (op0);
8293 /* Use TER to expand pointer addition of a negated value
8294 as pointer subtraction. */
8295 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8296 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8297 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8298 && TREE_CODE (treeop1) == SSA_NAME
8299 && TYPE_MODE (TREE_TYPE (treeop0))
8300 == TYPE_MODE (TREE_TYPE (treeop1)))
8302 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8303 if (def)
8305 treeop1 = gimple_assign_rhs1 (def);
8306 code = MINUS_EXPR;
8307 goto do_minus;
8311 /* No sense saving up arithmetic to be done
8312 if it's all in the wrong mode to form part of an address.
8313 And force_operand won't know whether to sign-extend or
8314 zero-extend. */
8315 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8316 || mode != ptr_mode)
8318 expand_operands (treeop0, treeop1,
8319 subtarget, &op0, &op1, EXPAND_NORMAL);
8320 if (op0 == const0_rtx)
8321 return op1;
8322 if (op1 == const0_rtx)
8323 return op0;
8324 goto binop2;
8327 expand_operands (treeop0, treeop1,
8328 subtarget, &op0, &op1, modifier);
8329 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8331 case MINUS_EXPR:
8332 do_minus:
8333 /* For initializers, we are allowed to return a MINUS of two
8334 symbolic constants. Here we handle all cases when both operands
8335 are constant. */
8336 /* Handle difference of two symbolic constants,
8337 for the sake of an initializer. */
8338 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8339 && really_constant_p (treeop0)
8340 && really_constant_p (treeop1))
8342 expand_operands (treeop0, treeop1,
8343 NULL_RTX, &op0, &op1, modifier);
8345 /* If the last operand is a CONST_INT, use plus_constant of
8346 the negated constant. Else make the MINUS. */
8347 if (CONST_INT_P (op1))
8348 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8349 -INTVAL (op1)));
8350 else
8351 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8354 /* No sense saving up arithmetic to be done
8355 if it's all in the wrong mode to form part of an address.
8356 And force_operand won't know whether to sign-extend or
8357 zero-extend. */
8358 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8359 || mode != ptr_mode)
8360 goto binop;
8362 expand_operands (treeop0, treeop1,
8363 subtarget, &op0, &op1, modifier);
8365 /* Convert A - const to A + (-const). */
8366 if (CONST_INT_P (op1))
8368 op1 = negate_rtx (mode, op1);
8369 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8372 goto binop2;
8374 case WIDEN_MULT_PLUS_EXPR:
8375 case WIDEN_MULT_MINUS_EXPR:
8376 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8377 op2 = expand_normal (treeop2);
8378 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8379 target, unsignedp);
8380 return target;
8382 case WIDEN_MULT_EXPR:
8383 /* If first operand is constant, swap them.
8384 Thus the following special case checks need only
8385 check the second operand. */
8386 if (TREE_CODE (treeop0) == INTEGER_CST)
8388 tree t1 = treeop0;
8389 treeop0 = treeop1;
8390 treeop1 = t1;
8393 /* First, check if we have a multiplication of one signed and one
8394 unsigned operand. */
8395 if (TREE_CODE (treeop1) != INTEGER_CST
8396 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8397 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8399 enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8400 this_optab = usmul_widen_optab;
8401 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8402 != CODE_FOR_nothing)
8404 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8405 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8406 EXPAND_NORMAL);
8407 else
8408 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8409 EXPAND_NORMAL);
8410 /* op0 and op1 might still be constant, despite the above
8411 != INTEGER_CST check. Handle it. */
8412 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8414 op0 = convert_modes (innermode, mode, op0, true);
8415 op1 = convert_modes (innermode, mode, op1, false);
8416 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8417 target, unsignedp));
8419 goto binop3;
8422 /* Check for a multiplication with matching signedness. */
8423 else if ((TREE_CODE (treeop1) == INTEGER_CST
8424 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8425 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8426 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8428 tree op0type = TREE_TYPE (treeop0);
8429 enum machine_mode innermode = TYPE_MODE (op0type);
8430 bool zextend_p = TYPE_UNSIGNED (op0type);
8431 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8432 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8434 if (TREE_CODE (treeop0) != INTEGER_CST)
8436 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8437 != CODE_FOR_nothing)
8439 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8440 EXPAND_NORMAL);
8441 /* op0 and op1 might still be constant, despite the above
8442 != INTEGER_CST check. Handle it. */
8443 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8445 widen_mult_const:
8446 op0 = convert_modes (innermode, mode, op0, zextend_p);
8448 = convert_modes (innermode, mode, op1,
8449 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8450 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8451 target,
8452 unsignedp));
8454 temp = expand_widening_mult (mode, op0, op1, target,
8455 unsignedp, this_optab);
8456 return REDUCE_BIT_FIELD (temp);
8458 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8459 != CODE_FOR_nothing
8460 && innermode == word_mode)
8462 rtx htem, hipart;
8463 op0 = expand_normal (treeop0);
8464 if (TREE_CODE (treeop1) == INTEGER_CST)
8465 op1 = convert_modes (innermode, mode,
8466 expand_normal (treeop1),
8467 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8468 else
8469 op1 = expand_normal (treeop1);
8470 /* op0 and op1 might still be constant, despite the above
8471 != INTEGER_CST check. Handle it. */
8472 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8473 goto widen_mult_const;
8474 temp = expand_binop (mode, other_optab, op0, op1, target,
8475 unsignedp, OPTAB_LIB_WIDEN);
8476 hipart = gen_highpart (innermode, temp);
8477 htem = expand_mult_highpart_adjust (innermode, hipart,
8478 op0, op1, hipart,
8479 zextend_p);
8480 if (htem != hipart)
8481 emit_move_insn (hipart, htem);
8482 return REDUCE_BIT_FIELD (temp);
8486 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8487 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8488 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8489 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8491 case FMA_EXPR:
8493 optab opt = fma_optab;
8494 gimple def0, def2;
8496 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8497 call. */
8498 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8500 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8501 tree call_expr;
8503 gcc_assert (fn != NULL_TREE);
8504 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8505 return expand_builtin (call_expr, target, subtarget, mode, false);
8508 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8509 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8511 op0 = op2 = NULL;
8513 if (def0 && def2
8514 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8516 opt = fnms_optab;
8517 op0 = expand_normal (gimple_assign_rhs1 (def0));
8518 op2 = expand_normal (gimple_assign_rhs1 (def2));
8520 else if (def0
8521 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8523 opt = fnma_optab;
8524 op0 = expand_normal (gimple_assign_rhs1 (def0));
8526 else if (def2
8527 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8529 opt = fms_optab;
8530 op2 = expand_normal (gimple_assign_rhs1 (def2));
8533 if (op0 == NULL)
8534 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8535 if (op2 == NULL)
8536 op2 = expand_normal (treeop2);
8537 op1 = expand_normal (treeop1);
8539 return expand_ternary_op (TYPE_MODE (type), opt,
8540 op0, op1, op2, target, 0);
8543 case MULT_EXPR:
8544 /* If this is a fixed-point operation, then we cannot use the code
8545 below because "expand_mult" doesn't support sat/no-sat fixed-point
8546 multiplications. */
8547 if (ALL_FIXED_POINT_MODE_P (mode))
8548 goto binop;
8550 /* If first operand is constant, swap them.
8551 Thus the following special case checks need only
8552 check the second operand. */
8553 if (TREE_CODE (treeop0) == INTEGER_CST)
8555 tree t1 = treeop0;
8556 treeop0 = treeop1;
8557 treeop1 = t1;
8560 /* Attempt to return something suitable for generating an
8561 indexed address, for machines that support that. */
8563 if (modifier == EXPAND_SUM && mode == ptr_mode
8564 && tree_fits_shwi_p (treeop1))
8566 tree exp1 = treeop1;
8568 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8569 EXPAND_SUM);
8571 if (!REG_P (op0))
8572 op0 = force_operand (op0, NULL_RTX);
8573 if (!REG_P (op0))
8574 op0 = copy_to_mode_reg (mode, op0);
8576 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8577 gen_int_mode (tree_to_shwi (exp1),
8578 TYPE_MODE (TREE_TYPE (exp1)))));
8581 if (modifier == EXPAND_STACK_PARM)
8582 target = 0;
8584 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8585 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8587 case TRUNC_DIV_EXPR:
8588 case FLOOR_DIV_EXPR:
8589 case CEIL_DIV_EXPR:
8590 case ROUND_DIV_EXPR:
8591 case EXACT_DIV_EXPR:
8592 /* If this is a fixed-point operation, then we cannot use the code
8593 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8594 divisions. */
8595 if (ALL_FIXED_POINT_MODE_P (mode))
8596 goto binop;
8598 if (modifier == EXPAND_STACK_PARM)
8599 target = 0;
8600 /* Possible optimization: compute the dividend with EXPAND_SUM
8601 then if the divisor is constant can optimize the case
8602 where some terms of the dividend have coeffs divisible by it. */
8603 expand_operands (treeop0, treeop1,
8604 subtarget, &op0, &op1, EXPAND_NORMAL);
8605 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8607 case RDIV_EXPR:
8608 goto binop;
8610 case MULT_HIGHPART_EXPR:
8611 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8612 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8613 gcc_assert (temp);
8614 return temp;
8616 case TRUNC_MOD_EXPR:
8617 case FLOOR_MOD_EXPR:
8618 case CEIL_MOD_EXPR:
8619 case ROUND_MOD_EXPR:
8620 if (modifier == EXPAND_STACK_PARM)
8621 target = 0;
8622 expand_operands (treeop0, treeop1,
8623 subtarget, &op0, &op1, EXPAND_NORMAL);
8624 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8626 case FIXED_CONVERT_EXPR:
8627 op0 = expand_normal (treeop0);
8628 if (target == 0 || modifier == EXPAND_STACK_PARM)
8629 target = gen_reg_rtx (mode);
8631 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8632 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8633 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8634 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8635 else
8636 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8637 return target;
8639 case FIX_TRUNC_EXPR:
8640 op0 = expand_normal (treeop0);
8641 if (target == 0 || modifier == EXPAND_STACK_PARM)
8642 target = gen_reg_rtx (mode);
8643 expand_fix (target, op0, unsignedp);
8644 return target;
8646 case FLOAT_EXPR:
8647 op0 = expand_normal (treeop0);
8648 if (target == 0 || modifier == EXPAND_STACK_PARM)
8649 target = gen_reg_rtx (mode);
8650 /* expand_float can't figure out what to do if FROM has VOIDmode.
8651 So give it the correct mode. With -O, cse will optimize this. */
8652 if (GET_MODE (op0) == VOIDmode)
8653 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8654 op0);
8655 expand_float (target, op0,
8656 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8657 return target;
8659 case NEGATE_EXPR:
8660 op0 = expand_expr (treeop0, subtarget,
8661 VOIDmode, EXPAND_NORMAL);
8662 if (modifier == EXPAND_STACK_PARM)
8663 target = 0;
8664 temp = expand_unop (mode,
8665 optab_for_tree_code (NEGATE_EXPR, type,
8666 optab_default),
8667 op0, target, 0);
8668 gcc_assert (temp);
8669 return REDUCE_BIT_FIELD (temp);
8671 case ABS_EXPR:
8672 op0 = expand_expr (treeop0, subtarget,
8673 VOIDmode, EXPAND_NORMAL);
8674 if (modifier == EXPAND_STACK_PARM)
8675 target = 0;
8677 /* ABS_EXPR is not valid for complex arguments. */
8678 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8679 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8681 /* Unsigned abs is simply the operand. Testing here means we don't
8682 risk generating incorrect code below. */
8683 if (TYPE_UNSIGNED (type))
8684 return op0;
8686 return expand_abs (mode, op0, target, unsignedp,
8687 safe_from_p (target, treeop0, 1));
8689 case MAX_EXPR:
8690 case MIN_EXPR:
8691 target = original_target;
8692 if (target == 0
8693 || modifier == EXPAND_STACK_PARM
8694 || (MEM_P (target) && MEM_VOLATILE_P (target))
8695 || GET_MODE (target) != mode
8696 || (REG_P (target)
8697 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8698 target = gen_reg_rtx (mode);
8699 expand_operands (treeop0, treeop1,
8700 target, &op0, &op1, EXPAND_NORMAL);
8702 /* First try to do it with a special MIN or MAX instruction.
8703 If that does not win, use a conditional jump to select the proper
8704 value. */
8705 this_optab = optab_for_tree_code (code, type, optab_default);
8706 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8707 OPTAB_WIDEN);
8708 if (temp != 0)
8709 return temp;
8711 /* At this point, a MEM target is no longer useful; we will get better
8712 code without it. */
8714 if (! REG_P (target))
8715 target = gen_reg_rtx (mode);
8717 /* If op1 was placed in target, swap op0 and op1. */
8718 if (target != op0 && target == op1)
8720 temp = op0;
8721 op0 = op1;
8722 op1 = temp;
8725 /* We generate better code and avoid problems with op1 mentioning
8726 target by forcing op1 into a pseudo if it isn't a constant. */
8727 if (! CONSTANT_P (op1))
8728 op1 = force_reg (mode, op1);
8731 enum rtx_code comparison_code;
8732 rtx cmpop1 = op1;
8734 if (code == MAX_EXPR)
8735 comparison_code = unsignedp ? GEU : GE;
8736 else
8737 comparison_code = unsignedp ? LEU : LE;
8739 /* Canonicalize to comparisons against 0. */
8740 if (op1 == const1_rtx)
8742 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8743 or (a != 0 ? a : 1) for unsigned.
8744 For MIN we are safe converting (a <= 1 ? a : 1)
8745 into (a <= 0 ? a : 1) */
8746 cmpop1 = const0_rtx;
8747 if (code == MAX_EXPR)
8748 comparison_code = unsignedp ? NE : GT;
8750 if (op1 == constm1_rtx && !unsignedp)
8752 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8753 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8754 cmpop1 = const0_rtx;
8755 if (code == MIN_EXPR)
8756 comparison_code = LT;
8758 #ifdef HAVE_conditional_move
8759 /* Use a conditional move if possible. */
8760 if (can_conditionally_move_p (mode))
8762 rtx insn;
8764 /* ??? Same problem as in expmed.c: emit_conditional_move
8765 forces a stack adjustment via compare_from_rtx, and we
8766 lose the stack adjustment if the sequence we are about
8767 to create is discarded. */
8768 do_pending_stack_adjust ();
8770 start_sequence ();
8772 /* Try to emit the conditional move. */
8773 insn = emit_conditional_move (target, comparison_code,
8774 op0, cmpop1, mode,
8775 op0, op1, mode,
8776 unsignedp);
8778 /* If we could do the conditional move, emit the sequence,
8779 and return. */
8780 if (insn)
8782 rtx seq = get_insns ();
8783 end_sequence ();
8784 emit_insn (seq);
8785 return target;
8788 /* Otherwise discard the sequence and fall back to code with
8789 branches. */
8790 end_sequence ();
8792 #endif
8793 if (target != op0)
8794 emit_move_insn (target, op0);
8796 temp = gen_label_rtx ();
8797 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8798 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8799 -1);
8801 emit_move_insn (target, op1);
8802 emit_label (temp);
8803 return target;
8805 case BIT_NOT_EXPR:
8806 op0 = expand_expr (treeop0, subtarget,
8807 VOIDmode, EXPAND_NORMAL);
8808 if (modifier == EXPAND_STACK_PARM)
8809 target = 0;
8810 /* In case we have to reduce the result to bitfield precision
8811 for unsigned bitfield expand this as XOR with a proper constant
8812 instead. */
8813 if (reduce_bit_field && TYPE_UNSIGNED (type))
8814 temp = expand_binop (mode, xor_optab, op0,
8815 immed_double_int_const
8816 (double_int::mask (TYPE_PRECISION (type)), mode),
8817 target, 1, OPTAB_LIB_WIDEN);
8818 else
8819 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8820 gcc_assert (temp);
8821 return temp;
8823 /* ??? Can optimize bitwise operations with one arg constant.
8824 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8825 and (a bitwise1 b) bitwise2 b (etc)
8826 but that is probably not worth while. */
8828 case BIT_AND_EXPR:
8829 case BIT_IOR_EXPR:
8830 case BIT_XOR_EXPR:
8831 goto binop;
8833 case LROTATE_EXPR:
8834 case RROTATE_EXPR:
8835 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8836 || (GET_MODE_PRECISION (TYPE_MODE (type))
8837 == TYPE_PRECISION (type)));
8838 /* fall through */
8840 case LSHIFT_EXPR:
8841 case RSHIFT_EXPR:
8842 /* If this is a fixed-point operation, then we cannot use the code
8843 below because "expand_shift" doesn't support sat/no-sat fixed-point
8844 shifts. */
8845 if (ALL_FIXED_POINT_MODE_P (mode))
8846 goto binop;
8848 if (! safe_from_p (subtarget, treeop1, 1))
8849 subtarget = 0;
8850 if (modifier == EXPAND_STACK_PARM)
8851 target = 0;
8852 op0 = expand_expr (treeop0, subtarget,
8853 VOIDmode, EXPAND_NORMAL);
8854 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8855 unsignedp);
8856 if (code == LSHIFT_EXPR)
8857 temp = REDUCE_BIT_FIELD (temp);
8858 return temp;
8860 /* Could determine the answer when only additive constants differ. Also,
8861 the addition of one can be handled by changing the condition. */
8862 case LT_EXPR:
8863 case LE_EXPR:
8864 case GT_EXPR:
8865 case GE_EXPR:
8866 case EQ_EXPR:
8867 case NE_EXPR:
8868 case UNORDERED_EXPR:
8869 case ORDERED_EXPR:
8870 case UNLT_EXPR:
8871 case UNLE_EXPR:
8872 case UNGT_EXPR:
8873 case UNGE_EXPR:
8874 case UNEQ_EXPR:
8875 case LTGT_EXPR:
8876 temp = do_store_flag (ops,
8877 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8878 tmode != VOIDmode ? tmode : mode);
8879 if (temp)
8880 return temp;
8882 /* Use a compare and a jump for BLKmode comparisons, or for function
8883 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8885 if ((target == 0
8886 || modifier == EXPAND_STACK_PARM
8887 || ! safe_from_p (target, treeop0, 1)
8888 || ! safe_from_p (target, treeop1, 1)
8889 /* Make sure we don't have a hard reg (such as function's return
8890 value) live across basic blocks, if not optimizing. */
8891 || (!optimize && REG_P (target)
8892 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8893 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8895 emit_move_insn (target, const0_rtx);
8897 op1 = gen_label_rtx ();
8898 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8900 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8901 emit_move_insn (target, constm1_rtx);
8902 else
8903 emit_move_insn (target, const1_rtx);
8905 emit_label (op1);
8906 return target;
8908 case COMPLEX_EXPR:
8909 /* Get the rtx code of the operands. */
8910 op0 = expand_normal (treeop0);
8911 op1 = expand_normal (treeop1);
8913 if (!target)
8914 target = gen_reg_rtx (TYPE_MODE (type));
8915 else
8916 /* If target overlaps with op1, then either we need to force
8917 op1 into a pseudo (if target also overlaps with op0),
8918 or write the complex parts in reverse order. */
8919 switch (GET_CODE (target))
8921 case CONCAT:
8922 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
8924 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
8926 complex_expr_force_op1:
8927 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
8928 emit_move_insn (temp, op1);
8929 op1 = temp;
8930 break;
8932 complex_expr_swap_order:
8933 /* Move the imaginary (op1) and real (op0) parts to their
8934 location. */
8935 write_complex_part (target, op1, true);
8936 write_complex_part (target, op0, false);
8938 return target;
8940 break;
8941 case MEM:
8942 temp = adjust_address_nv (target,
8943 GET_MODE_INNER (GET_MODE (target)), 0);
8944 if (reg_overlap_mentioned_p (temp, op1))
8946 enum machine_mode imode = GET_MODE_INNER (GET_MODE (target));
8947 temp = adjust_address_nv (target, imode,
8948 GET_MODE_SIZE (imode));
8949 if (reg_overlap_mentioned_p (temp, op0))
8950 goto complex_expr_force_op1;
8951 goto complex_expr_swap_order;
8953 break;
8954 default:
8955 if (reg_overlap_mentioned_p (target, op1))
8957 if (reg_overlap_mentioned_p (target, op0))
8958 goto complex_expr_force_op1;
8959 goto complex_expr_swap_order;
8961 break;
8964 /* Move the real (op0) and imaginary (op1) parts to their location. */
8965 write_complex_part (target, op0, false);
8966 write_complex_part (target, op1, true);
8968 return target;
8970 case WIDEN_SUM_EXPR:
8972 tree oprnd0 = treeop0;
8973 tree oprnd1 = treeop1;
8975 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8976 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8977 target, unsignedp);
8978 return target;
8981 case REDUC_MAX_EXPR:
8982 case REDUC_MIN_EXPR:
8983 case REDUC_PLUS_EXPR:
8985 op0 = expand_normal (treeop0);
8986 this_optab = optab_for_tree_code (code, type, optab_default);
8987 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8988 gcc_assert (temp);
8989 return temp;
8992 case VEC_LSHIFT_EXPR:
8993 case VEC_RSHIFT_EXPR:
8995 target = expand_vec_shift_expr (ops, target);
8996 return target;
8999 case VEC_UNPACK_HI_EXPR:
9000 case VEC_UNPACK_LO_EXPR:
9002 op0 = expand_normal (treeop0);
9003 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9004 target, unsignedp);
9005 gcc_assert (temp);
9006 return temp;
9009 case VEC_UNPACK_FLOAT_HI_EXPR:
9010 case VEC_UNPACK_FLOAT_LO_EXPR:
9012 op0 = expand_normal (treeop0);
9013 /* The signedness is determined from input operand. */
9014 temp = expand_widen_pattern_expr
9015 (ops, op0, NULL_RTX, NULL_RTX,
9016 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9018 gcc_assert (temp);
9019 return temp;
9022 case VEC_WIDEN_MULT_HI_EXPR:
9023 case VEC_WIDEN_MULT_LO_EXPR:
9024 case VEC_WIDEN_MULT_EVEN_EXPR:
9025 case VEC_WIDEN_MULT_ODD_EXPR:
9026 case VEC_WIDEN_LSHIFT_HI_EXPR:
9027 case VEC_WIDEN_LSHIFT_LO_EXPR:
9028 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9029 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9030 target, unsignedp);
9031 gcc_assert (target);
9032 return target;
9034 case VEC_PACK_TRUNC_EXPR:
9035 case VEC_PACK_SAT_EXPR:
9036 case VEC_PACK_FIX_TRUNC_EXPR:
9037 mode = TYPE_MODE (TREE_TYPE (treeop0));
9038 goto binop;
9040 case VEC_PERM_EXPR:
9041 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9042 op2 = expand_normal (treeop2);
9044 /* Careful here: if the target doesn't support integral vector modes,
9045 a constant selection vector could wind up smooshed into a normal
9046 integral constant. */
9047 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9049 tree sel_type = TREE_TYPE (treeop2);
9050 enum machine_mode vmode
9051 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9052 TYPE_VECTOR_SUBPARTS (sel_type));
9053 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9054 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9055 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9057 else
9058 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9060 temp = expand_vec_perm (mode, op0, op1, op2, target);
9061 gcc_assert (temp);
9062 return temp;
9064 case DOT_PROD_EXPR:
9066 tree oprnd0 = treeop0;
9067 tree oprnd1 = treeop1;
9068 tree oprnd2 = treeop2;
9069 rtx op2;
9071 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9072 op2 = expand_normal (oprnd2);
9073 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9074 target, unsignedp);
9075 return target;
9078 case REALIGN_LOAD_EXPR:
9080 tree oprnd0 = treeop0;
9081 tree oprnd1 = treeop1;
9082 tree oprnd2 = treeop2;
9083 rtx op2;
9085 this_optab = optab_for_tree_code (code, type, optab_default);
9086 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9087 op2 = expand_normal (oprnd2);
9088 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9089 target, unsignedp);
9090 gcc_assert (temp);
9091 return temp;
9094 case COND_EXPR:
9095 /* A COND_EXPR with its type being VOID_TYPE represents a
9096 conditional jump and is handled in
9097 expand_gimple_cond_expr. */
9098 gcc_assert (!VOID_TYPE_P (type));
9100 /* Note that COND_EXPRs whose type is a structure or union
9101 are required to be constructed to contain assignments of
9102 a temporary variable, so that we can evaluate them here
9103 for side effect only. If type is void, we must do likewise. */
9105 gcc_assert (!TREE_ADDRESSABLE (type)
9106 && !ignore
9107 && TREE_TYPE (treeop1) != void_type_node
9108 && TREE_TYPE (treeop2) != void_type_node);
9110 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9111 if (temp)
9112 return temp;
9114 /* If we are not to produce a result, we have no target. Otherwise,
9115 if a target was specified use it; it will not be used as an
9116 intermediate target unless it is safe. If no target, use a
9117 temporary. */
9119 if (modifier != EXPAND_STACK_PARM
9120 && original_target
9121 && safe_from_p (original_target, treeop0, 1)
9122 && GET_MODE (original_target) == mode
9123 && !MEM_P (original_target))
9124 temp = original_target;
9125 else
9126 temp = assign_temp (type, 0, 1);
9128 do_pending_stack_adjust ();
9129 NO_DEFER_POP;
9130 op0 = gen_label_rtx ();
9131 op1 = gen_label_rtx ();
9132 jumpifnot (treeop0, op0, -1);
9133 store_expr (treeop1, temp,
9134 modifier == EXPAND_STACK_PARM,
9135 false);
9137 emit_jump_insn (gen_jump (op1));
9138 emit_barrier ();
9139 emit_label (op0);
9140 store_expr (treeop2, temp,
9141 modifier == EXPAND_STACK_PARM,
9142 false);
9144 emit_label (op1);
9145 OK_DEFER_POP;
9146 return temp;
9148 case VEC_COND_EXPR:
9149 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9150 return target;
9152 default:
9153 gcc_unreachable ();
9156 /* Here to do an ordinary binary operator. */
9157 binop:
9158 expand_operands (treeop0, treeop1,
9159 subtarget, &op0, &op1, EXPAND_NORMAL);
9160 binop2:
9161 this_optab = optab_for_tree_code (code, type, optab_default);
9162 binop3:
9163 if (modifier == EXPAND_STACK_PARM)
9164 target = 0;
9165 temp = expand_binop (mode, this_optab, op0, op1, target,
9166 unsignedp, OPTAB_LIB_WIDEN);
9167 gcc_assert (temp);
9168 /* Bitwise operations do not need bitfield reduction as we expect their
9169 operands being properly truncated. */
9170 if (code == BIT_XOR_EXPR
9171 || code == BIT_AND_EXPR
9172 || code == BIT_IOR_EXPR)
9173 return temp;
9174 return REDUCE_BIT_FIELD (temp);
9176 #undef REDUCE_BIT_FIELD
9179 /* Return TRUE if expression STMT is suitable for replacement.
9180 Never consider memory loads as replaceable, because those don't ever lead
9181 into constant expressions. */
9183 static bool
9184 stmt_is_replaceable_p (gimple stmt)
9186 if (ssa_is_replaceable_p (stmt))
9188 /* Don't move around loads. */
9189 if (!gimple_assign_single_p (stmt)
9190 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9191 return true;
9193 return false;
9197 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
9198 enum expand_modifier modifier, rtx *alt_rtl)
9200 rtx op0, op1, temp, decl_rtl;
9201 tree type;
9202 int unsignedp;
9203 enum machine_mode mode;
9204 enum tree_code code = TREE_CODE (exp);
9205 rtx subtarget, original_target;
9206 int ignore;
9207 tree context;
9208 bool reduce_bit_field;
9209 location_t loc = EXPR_LOCATION (exp);
9210 struct separate_ops ops;
9211 tree treeop0, treeop1, treeop2;
9212 tree ssa_name = NULL_TREE;
9213 gimple g;
9215 type = TREE_TYPE (exp);
9216 mode = TYPE_MODE (type);
9217 unsignedp = TYPE_UNSIGNED (type);
9219 treeop0 = treeop1 = treeop2 = NULL_TREE;
9220 if (!VL_EXP_CLASS_P (exp))
9221 switch (TREE_CODE_LENGTH (code))
9223 default:
9224 case 3: treeop2 = TREE_OPERAND (exp, 2);
9225 case 2: treeop1 = TREE_OPERAND (exp, 1);
9226 case 1: treeop0 = TREE_OPERAND (exp, 0);
9227 case 0: break;
9229 ops.code = code;
9230 ops.type = type;
9231 ops.op0 = treeop0;
9232 ops.op1 = treeop1;
9233 ops.op2 = treeop2;
9234 ops.location = loc;
9236 ignore = (target == const0_rtx
9237 || ((CONVERT_EXPR_CODE_P (code)
9238 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9239 && TREE_CODE (type) == VOID_TYPE));
9241 /* An operation in what may be a bit-field type needs the
9242 result to be reduced to the precision of the bit-field type,
9243 which is narrower than that of the type's mode. */
9244 reduce_bit_field = (!ignore
9245 && INTEGRAL_TYPE_P (type)
9246 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9248 /* If we are going to ignore this result, we need only do something
9249 if there is a side-effect somewhere in the expression. If there
9250 is, short-circuit the most common cases here. Note that we must
9251 not call expand_expr with anything but const0_rtx in case this
9252 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9254 if (ignore)
9256 if (! TREE_SIDE_EFFECTS (exp))
9257 return const0_rtx;
9259 /* Ensure we reference a volatile object even if value is ignored, but
9260 don't do this if all we are doing is taking its address. */
9261 if (TREE_THIS_VOLATILE (exp)
9262 && TREE_CODE (exp) != FUNCTION_DECL
9263 && mode != VOIDmode && mode != BLKmode
9264 && modifier != EXPAND_CONST_ADDRESS)
9266 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9267 if (MEM_P (temp))
9268 copy_to_reg (temp);
9269 return const0_rtx;
9272 if (TREE_CODE_CLASS (code) == tcc_unary
9273 || code == BIT_FIELD_REF
9274 || code == COMPONENT_REF
9275 || code == INDIRECT_REF)
9276 return expand_expr (treeop0, const0_rtx, VOIDmode,
9277 modifier);
9279 else if (TREE_CODE_CLASS (code) == tcc_binary
9280 || TREE_CODE_CLASS (code) == tcc_comparison
9281 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9283 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9284 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9285 return const0_rtx;
9288 target = 0;
9291 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9292 target = 0;
9294 /* Use subtarget as the target for operand 0 of a binary operation. */
9295 subtarget = get_subtarget (target);
9296 original_target = target;
9298 switch (code)
9300 case LABEL_DECL:
9302 tree function = decl_function_context (exp);
9304 temp = label_rtx (exp);
9305 temp = gen_rtx_LABEL_REF (Pmode, temp);
9307 if (function != current_function_decl
9308 && function != 0)
9309 LABEL_REF_NONLOCAL_P (temp) = 1;
9311 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9312 return temp;
9315 case SSA_NAME:
9316 /* ??? ivopts calls expander, without any preparation from
9317 out-of-ssa. So fake instructions as if this was an access to the
9318 base variable. This unnecessarily allocates a pseudo, see how we can
9319 reuse it, if partition base vars have it set already. */
9320 if (!currently_expanding_to_rtl)
9322 tree var = SSA_NAME_VAR (exp);
9323 if (var && DECL_RTL_SET_P (var))
9324 return DECL_RTL (var);
9325 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9326 LAST_VIRTUAL_REGISTER + 1);
9329 g = get_gimple_for_ssa_name (exp);
9330 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9331 if (g == NULL
9332 && modifier == EXPAND_INITIALIZER
9333 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9334 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9335 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9336 g = SSA_NAME_DEF_STMT (exp);
9337 if (g)
9339 rtx r;
9340 location_t saved_loc = curr_insn_location ();
9342 set_curr_insn_location (gimple_location (g));
9343 r = expand_expr_real (gimple_assign_rhs_to_tree (g), target,
9344 tmode, modifier, NULL);
9345 set_curr_insn_location (saved_loc);
9346 if (REG_P (r) && !REG_EXPR (r))
9347 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9348 return r;
9351 ssa_name = exp;
9352 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9353 exp = SSA_NAME_VAR (ssa_name);
9354 goto expand_decl_rtl;
9356 case PARM_DECL:
9357 case VAR_DECL:
9358 /* If a static var's type was incomplete when the decl was written,
9359 but the type is complete now, lay out the decl now. */
9360 if (DECL_SIZE (exp) == 0
9361 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9362 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9363 layout_decl (exp, 0);
9365 /* ... fall through ... */
9367 case FUNCTION_DECL:
9368 case RESULT_DECL:
9369 decl_rtl = DECL_RTL (exp);
9370 expand_decl_rtl:
9371 gcc_assert (decl_rtl);
9372 decl_rtl = copy_rtx (decl_rtl);
9373 /* Record writes to register variables. */
9374 if (modifier == EXPAND_WRITE
9375 && REG_P (decl_rtl)
9376 && HARD_REGISTER_P (decl_rtl))
9377 add_to_hard_reg_set (&crtl->asm_clobbers,
9378 GET_MODE (decl_rtl), REGNO (decl_rtl));
9380 /* Ensure variable marked as used even if it doesn't go through
9381 a parser. If it hasn't be used yet, write out an external
9382 definition. */
9383 TREE_USED (exp) = 1;
9385 /* Show we haven't gotten RTL for this yet. */
9386 temp = 0;
9388 /* Variables inherited from containing functions should have
9389 been lowered by this point. */
9390 context = decl_function_context (exp);
9391 gcc_assert (SCOPE_FILE_SCOPE_P (context)
9392 || context == current_function_decl
9393 || TREE_STATIC (exp)
9394 || DECL_EXTERNAL (exp)
9395 /* ??? C++ creates functions that are not TREE_STATIC. */
9396 || TREE_CODE (exp) == FUNCTION_DECL);
9398 /* This is the case of an array whose size is to be determined
9399 from its initializer, while the initializer is still being parsed.
9400 ??? We aren't parsing while expanding anymore. */
9402 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9403 temp = validize_mem (decl_rtl);
9405 /* If DECL_RTL is memory, we are in the normal case and the
9406 address is not valid, get the address into a register. */
9408 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9410 if (alt_rtl)
9411 *alt_rtl = decl_rtl;
9412 decl_rtl = use_anchored_address (decl_rtl);
9413 if (modifier != EXPAND_CONST_ADDRESS
9414 && modifier != EXPAND_SUM
9415 && !memory_address_addr_space_p (DECL_MODE (exp),
9416 XEXP (decl_rtl, 0),
9417 MEM_ADDR_SPACE (decl_rtl)))
9418 temp = replace_equiv_address (decl_rtl,
9419 copy_rtx (XEXP (decl_rtl, 0)));
9422 /* If we got something, return it. But first, set the alignment
9423 if the address is a register. */
9424 if (temp != 0)
9426 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9427 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9429 return temp;
9432 /* If the mode of DECL_RTL does not match that of the decl,
9433 there are two cases: we are dealing with a BLKmode value
9434 that is returned in a register, or we are dealing with
9435 a promoted value. In the latter case, return a SUBREG
9436 of the wanted mode, but mark it so that we know that it
9437 was already extended. */
9438 if (REG_P (decl_rtl)
9439 && DECL_MODE (exp) != BLKmode
9440 && GET_MODE (decl_rtl) != DECL_MODE (exp))
9442 enum machine_mode pmode;
9444 /* Get the signedness to be used for this variable. Ensure we get
9445 the same mode we got when the variable was declared. */
9446 if (code == SSA_NAME
9447 && (g = SSA_NAME_DEF_STMT (ssa_name))
9448 && gimple_code (g) == GIMPLE_CALL)
9450 gcc_assert (!gimple_call_internal_p (g));
9451 pmode = promote_function_mode (type, mode, &unsignedp,
9452 gimple_call_fntype (g),
9455 else
9456 pmode = promote_decl_mode (exp, &unsignedp);
9457 gcc_assert (GET_MODE (decl_rtl) == pmode);
9459 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9460 SUBREG_PROMOTED_VAR_P (temp) = 1;
9461 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
9462 return temp;
9465 return decl_rtl;
9467 case INTEGER_CST:
9468 temp = immed_double_const (TREE_INT_CST_LOW (exp),
9469 TREE_INT_CST_HIGH (exp), mode);
9471 return temp;
9473 case VECTOR_CST:
9475 tree tmp = NULL_TREE;
9476 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9477 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9478 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9479 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9480 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9481 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9482 return const_vector_from_tree (exp);
9483 if (GET_MODE_CLASS (mode) == MODE_INT)
9485 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9486 if (type_for_mode)
9487 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9489 if (!tmp)
9491 vec<constructor_elt, va_gc> *v;
9492 unsigned i;
9493 vec_alloc (v, VECTOR_CST_NELTS (exp));
9494 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9495 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9496 tmp = build_constructor (type, v);
9498 return expand_expr (tmp, ignore ? const0_rtx : target,
9499 tmode, modifier);
9502 case CONST_DECL:
9503 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9505 case REAL_CST:
9506 /* If optimized, generate immediate CONST_DOUBLE
9507 which will be turned into memory by reload if necessary.
9509 We used to force a register so that loop.c could see it. But
9510 this does not allow gen_* patterns to perform optimizations with
9511 the constants. It also produces two insns in cases like "x = 1.0;".
9512 On most machines, floating-point constants are not permitted in
9513 many insns, so we'd end up copying it to a register in any case.
9515 Now, we do the copying in expand_binop, if appropriate. */
9516 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9517 TYPE_MODE (TREE_TYPE (exp)));
9519 case FIXED_CST:
9520 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9521 TYPE_MODE (TREE_TYPE (exp)));
9523 case COMPLEX_CST:
9524 /* Handle evaluating a complex constant in a CONCAT target. */
9525 if (original_target && GET_CODE (original_target) == CONCAT)
9527 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9528 rtx rtarg, itarg;
9530 rtarg = XEXP (original_target, 0);
9531 itarg = XEXP (original_target, 1);
9533 /* Move the real and imaginary parts separately. */
9534 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9535 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9537 if (op0 != rtarg)
9538 emit_move_insn (rtarg, op0);
9539 if (op1 != itarg)
9540 emit_move_insn (itarg, op1);
9542 return original_target;
9545 /* ... fall through ... */
9547 case STRING_CST:
9548 temp = expand_expr_constant (exp, 1, modifier);
9550 /* temp contains a constant address.
9551 On RISC machines where a constant address isn't valid,
9552 make some insns to get that address into a register. */
9553 if (modifier != EXPAND_CONST_ADDRESS
9554 && modifier != EXPAND_INITIALIZER
9555 && modifier != EXPAND_SUM
9556 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9557 MEM_ADDR_SPACE (temp)))
9558 return replace_equiv_address (temp,
9559 copy_rtx (XEXP (temp, 0)));
9560 return temp;
9562 case SAVE_EXPR:
9564 tree val = treeop0;
9565 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
9567 if (!SAVE_EXPR_RESOLVED_P (exp))
9569 /* We can indeed still hit this case, typically via builtin
9570 expanders calling save_expr immediately before expanding
9571 something. Assume this means that we only have to deal
9572 with non-BLKmode values. */
9573 gcc_assert (GET_MODE (ret) != BLKmode);
9575 val = build_decl (curr_insn_location (),
9576 VAR_DECL, NULL, TREE_TYPE (exp));
9577 DECL_ARTIFICIAL (val) = 1;
9578 DECL_IGNORED_P (val) = 1;
9579 treeop0 = val;
9580 TREE_OPERAND (exp, 0) = treeop0;
9581 SAVE_EXPR_RESOLVED_P (exp) = 1;
9583 if (!CONSTANT_P (ret))
9584 ret = copy_to_reg (ret);
9585 SET_DECL_RTL (val, ret);
9588 return ret;
9592 case CONSTRUCTOR:
9593 /* If we don't need the result, just ensure we evaluate any
9594 subexpressions. */
9595 if (ignore)
9597 unsigned HOST_WIDE_INT idx;
9598 tree value;
9600 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9601 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9603 return const0_rtx;
9606 return expand_constructor (exp, target, modifier, false);
9608 case TARGET_MEM_REF:
9610 addr_space_t as
9611 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9612 enum insn_code icode;
9613 unsigned int align;
9615 op0 = addr_for_mem_ref (exp, as, true);
9616 op0 = memory_address_addr_space (mode, op0, as);
9617 temp = gen_rtx_MEM (mode, op0);
9618 set_mem_attributes (temp, exp, 0);
9619 set_mem_addr_space (temp, as);
9620 align = get_object_alignment (exp);
9621 if (modifier != EXPAND_WRITE
9622 && modifier != EXPAND_MEMORY
9623 && mode != BLKmode
9624 && align < GET_MODE_ALIGNMENT (mode)
9625 /* If the target does not have special handling for unaligned
9626 loads of mode then it can use regular moves for them. */
9627 && ((icode = optab_handler (movmisalign_optab, mode))
9628 != CODE_FOR_nothing))
9630 struct expand_operand ops[2];
9632 /* We've already validated the memory, and we're creating a
9633 new pseudo destination. The predicates really can't fail,
9634 nor can the generator. */
9635 create_output_operand (&ops[0], NULL_RTX, mode);
9636 create_fixed_operand (&ops[1], temp);
9637 expand_insn (icode, 2, ops);
9638 temp = ops[0].value;
9640 return temp;
9643 case MEM_REF:
9645 addr_space_t as
9646 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9647 enum machine_mode address_mode;
9648 tree base = TREE_OPERAND (exp, 0);
9649 gimple def_stmt;
9650 enum insn_code icode;
9651 unsigned align;
9652 /* Handle expansion of non-aliased memory with non-BLKmode. That
9653 might end up in a register. */
9654 if (mem_ref_refers_to_non_mem_p (exp))
9656 HOST_WIDE_INT offset = mem_ref_offset (exp).low;
9657 base = TREE_OPERAND (base, 0);
9658 if (offset == 0
9659 && tree_fits_uhwi_p (TYPE_SIZE (type))
9660 && (GET_MODE_BITSIZE (DECL_MODE (base))
9661 == TREE_INT_CST_LOW (TYPE_SIZE (type))))
9662 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
9663 target, tmode, modifier);
9664 if (TYPE_MODE (type) == BLKmode)
9666 temp = assign_stack_temp (DECL_MODE (base),
9667 GET_MODE_SIZE (DECL_MODE (base)));
9668 store_expr (base, temp, 0, false);
9669 temp = adjust_address (temp, BLKmode, offset);
9670 set_mem_size (temp, int_size_in_bytes (type));
9671 return temp;
9673 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
9674 bitsize_int (offset * BITS_PER_UNIT));
9675 return expand_expr (exp, target, tmode, modifier);
9677 address_mode = targetm.addr_space.address_mode (as);
9678 base = TREE_OPERAND (exp, 0);
9679 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9681 tree mask = gimple_assign_rhs2 (def_stmt);
9682 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9683 gimple_assign_rhs1 (def_stmt), mask);
9684 TREE_OPERAND (exp, 0) = base;
9686 align = get_object_alignment (exp);
9687 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9688 op0 = memory_address_addr_space (mode, op0, as);
9689 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9691 rtx off
9692 = immed_double_int_const (mem_ref_offset (exp), address_mode);
9693 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9694 op0 = memory_address_addr_space (mode, op0, as);
9696 temp = gen_rtx_MEM (mode, op0);
9697 set_mem_attributes (temp, exp, 0);
9698 set_mem_addr_space (temp, as);
9699 if (TREE_THIS_VOLATILE (exp))
9700 MEM_VOLATILE_P (temp) = 1;
9701 if (modifier != EXPAND_WRITE
9702 && modifier != EXPAND_MEMORY
9703 && mode != BLKmode
9704 && align < GET_MODE_ALIGNMENT (mode))
9706 if ((icode = optab_handler (movmisalign_optab, mode))
9707 != CODE_FOR_nothing)
9709 struct expand_operand ops[2];
9711 /* We've already validated the memory, and we're creating a
9712 new pseudo destination. The predicates really can't fail,
9713 nor can the generator. */
9714 create_output_operand (&ops[0], NULL_RTX, mode);
9715 create_fixed_operand (&ops[1], temp);
9716 expand_insn (icode, 2, ops);
9717 temp = ops[0].value;
9719 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9720 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9721 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9722 (modifier == EXPAND_STACK_PARM
9723 ? NULL_RTX : target),
9724 mode, mode);
9726 return temp;
9729 case ARRAY_REF:
9732 tree array = treeop0;
9733 tree index = treeop1;
9734 tree init;
9736 /* Fold an expression like: "foo"[2].
9737 This is not done in fold so it won't happen inside &.
9738 Don't fold if this is for wide characters since it's too
9739 difficult to do correctly and this is a very rare case. */
9741 if (modifier != EXPAND_CONST_ADDRESS
9742 && modifier != EXPAND_INITIALIZER
9743 && modifier != EXPAND_MEMORY)
9745 tree t = fold_read_from_constant_string (exp);
9747 if (t)
9748 return expand_expr (t, target, tmode, modifier);
9751 /* If this is a constant index into a constant array,
9752 just get the value from the array. Handle both the cases when
9753 we have an explicit constructor and when our operand is a variable
9754 that was declared const. */
9756 if (modifier != EXPAND_CONST_ADDRESS
9757 && modifier != EXPAND_INITIALIZER
9758 && modifier != EXPAND_MEMORY
9759 && TREE_CODE (array) == CONSTRUCTOR
9760 && ! TREE_SIDE_EFFECTS (array)
9761 && TREE_CODE (index) == INTEGER_CST)
9763 unsigned HOST_WIDE_INT ix;
9764 tree field, value;
9766 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9767 field, value)
9768 if (tree_int_cst_equal (field, index))
9770 if (!TREE_SIDE_EFFECTS (value))
9771 return expand_expr (fold (value), target, tmode, modifier);
9772 break;
9776 else if (optimize >= 1
9777 && modifier != EXPAND_CONST_ADDRESS
9778 && modifier != EXPAND_INITIALIZER
9779 && modifier != EXPAND_MEMORY
9780 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9781 && TREE_CODE (index) == INTEGER_CST
9782 && (TREE_CODE (array) == VAR_DECL
9783 || TREE_CODE (array) == CONST_DECL)
9784 && (init = ctor_for_folding (array)) != error_mark_node)
9786 if (TREE_CODE (init) == CONSTRUCTOR)
9788 unsigned HOST_WIDE_INT ix;
9789 tree field, value;
9791 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
9792 field, value)
9793 if (tree_int_cst_equal (field, index))
9795 if (TREE_SIDE_EFFECTS (value))
9796 break;
9798 if (TREE_CODE (value) == CONSTRUCTOR)
9800 /* If VALUE is a CONSTRUCTOR, this
9801 optimization is only useful if
9802 this doesn't store the CONSTRUCTOR
9803 into memory. If it does, it is more
9804 efficient to just load the data from
9805 the array directly. */
9806 rtx ret = expand_constructor (value, target,
9807 modifier, true);
9808 if (ret == NULL_RTX)
9809 break;
9812 return
9813 expand_expr (fold (value), target, tmode, modifier);
9816 else if (TREE_CODE (init) == STRING_CST)
9818 tree low_bound = array_ref_low_bound (exp);
9819 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
9821 /* Optimize the special case of a zero lower bound.
9823 We convert the lower bound to sizetype to avoid problems
9824 with constant folding. E.g. suppose the lower bound is
9825 1 and its mode is QI. Without the conversion
9826 (ARRAY + (INDEX - (unsigned char)1))
9827 becomes
9828 (ARRAY + (-(unsigned char)1) + INDEX)
9829 which becomes
9830 (ARRAY + 255 + INDEX). Oops! */
9831 if (!integer_zerop (low_bound))
9832 index1 = size_diffop_loc (loc, index1,
9833 fold_convert_loc (loc, sizetype,
9834 low_bound));
9836 if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
9838 tree type = TREE_TYPE (TREE_TYPE (init));
9839 enum machine_mode mode = TYPE_MODE (type);
9841 if (GET_MODE_CLASS (mode) == MODE_INT
9842 && GET_MODE_SIZE (mode) == 1)
9843 return gen_int_mode (TREE_STRING_POINTER (init)
9844 [TREE_INT_CST_LOW (index1)],
9845 mode);
9850 goto normal_inner_ref;
9852 case COMPONENT_REF:
9853 /* If the operand is a CONSTRUCTOR, we can just extract the
9854 appropriate field if it is present. */
9855 if (TREE_CODE (treeop0) == CONSTRUCTOR)
9857 unsigned HOST_WIDE_INT idx;
9858 tree field, value;
9860 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
9861 idx, field, value)
9862 if (field == treeop1
9863 /* We can normally use the value of the field in the
9864 CONSTRUCTOR. However, if this is a bitfield in
9865 an integral mode that we can fit in a HOST_WIDE_INT,
9866 we must mask only the number of bits in the bitfield,
9867 since this is done implicitly by the constructor. If
9868 the bitfield does not meet either of those conditions,
9869 we can't do this optimization. */
9870 && (! DECL_BIT_FIELD (field)
9871 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
9872 && (GET_MODE_PRECISION (DECL_MODE (field))
9873 <= HOST_BITS_PER_WIDE_INT))))
9875 if (DECL_BIT_FIELD (field)
9876 && modifier == EXPAND_STACK_PARM)
9877 target = 0;
9878 op0 = expand_expr (value, target, tmode, modifier);
9879 if (DECL_BIT_FIELD (field))
9881 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
9882 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
9884 if (TYPE_UNSIGNED (TREE_TYPE (field)))
9886 op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
9887 imode);
9888 op0 = expand_and (imode, op0, op1, target);
9890 else
9892 int count = GET_MODE_PRECISION (imode) - bitsize;
9894 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
9895 target, 0);
9896 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
9897 target, 0);
9901 return op0;
9904 goto normal_inner_ref;
9906 case BIT_FIELD_REF:
9907 case ARRAY_RANGE_REF:
9908 normal_inner_ref:
9910 enum machine_mode mode1, mode2;
9911 HOST_WIDE_INT bitsize, bitpos;
9912 tree offset;
9913 int volatilep = 0, must_force_mem;
9914 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9915 &mode1, &unsignedp, &volatilep, true);
9916 rtx orig_op0, memloc;
9917 bool mem_attrs_from_type = false;
9919 /* If we got back the original object, something is wrong. Perhaps
9920 we are evaluating an expression too early. In any event, don't
9921 infinitely recurse. */
9922 gcc_assert (tem != exp);
9924 /* If TEM's type is a union of variable size, pass TARGET to the inner
9925 computation, since it will need a temporary and TARGET is known
9926 to have to do. This occurs in unchecked conversion in Ada. */
9927 orig_op0 = op0
9928 = expand_expr (tem,
9929 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9930 && COMPLETE_TYPE_P (TREE_TYPE (tem))
9931 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9932 != INTEGER_CST)
9933 && modifier != EXPAND_STACK_PARM
9934 ? target : NULL_RTX),
9935 VOIDmode,
9936 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
9938 /* If the bitfield is volatile, we want to access it in the
9939 field's mode, not the computed mode.
9940 If a MEM has VOIDmode (external with incomplete type),
9941 use BLKmode for it instead. */
9942 if (MEM_P (op0))
9944 if (volatilep && flag_strict_volatile_bitfields > 0)
9945 op0 = adjust_address (op0, mode1, 0);
9946 else if (GET_MODE (op0) == VOIDmode)
9947 op0 = adjust_address (op0, BLKmode, 0);
9950 mode2
9951 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
9953 /* If we have either an offset, a BLKmode result, or a reference
9954 outside the underlying object, we must force it to memory.
9955 Such a case can occur in Ada if we have unchecked conversion
9956 of an expression from a scalar type to an aggregate type or
9957 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
9958 passed a partially uninitialized object or a view-conversion
9959 to a larger size. */
9960 must_force_mem = (offset
9961 || mode1 == BLKmode
9962 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
9964 /* Handle CONCAT first. */
9965 if (GET_CODE (op0) == CONCAT && !must_force_mem)
9967 if (bitpos == 0
9968 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
9969 return op0;
9970 if (bitpos == 0
9971 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9972 && bitsize)
9974 op0 = XEXP (op0, 0);
9975 mode2 = GET_MODE (op0);
9977 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9978 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
9979 && bitpos
9980 && bitsize)
9982 op0 = XEXP (op0, 1);
9983 bitpos = 0;
9984 mode2 = GET_MODE (op0);
9986 else
9987 /* Otherwise force into memory. */
9988 must_force_mem = 1;
9991 /* If this is a constant, put it in a register if it is a legitimate
9992 constant and we don't need a memory reference. */
9993 if (CONSTANT_P (op0)
9994 && mode2 != BLKmode
9995 && targetm.legitimate_constant_p (mode2, op0)
9996 && !must_force_mem)
9997 op0 = force_reg (mode2, op0);
9999 /* Otherwise, if this is a constant, try to force it to the constant
10000 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10001 is a legitimate constant. */
10002 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10003 op0 = validize_mem (memloc);
10005 /* Otherwise, if this is a constant or the object is not in memory
10006 and need be, put it there. */
10007 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10009 tree nt = build_qualified_type (TREE_TYPE (tem),
10010 (TYPE_QUALS (TREE_TYPE (tem))
10011 | TYPE_QUAL_CONST));
10012 memloc = assign_temp (nt, 1, 1);
10013 emit_move_insn (memloc, op0);
10014 op0 = memloc;
10015 mem_attrs_from_type = true;
10018 if (offset)
10020 enum machine_mode address_mode;
10021 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10022 EXPAND_SUM);
10024 gcc_assert (MEM_P (op0));
10026 address_mode = get_address_mode (op0);
10027 if (GET_MODE (offset_rtx) != address_mode)
10028 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10030 if (GET_MODE (op0) == BLKmode
10031 /* A constant address in OP0 can have VOIDmode, we must
10032 not try to call force_reg in that case. */
10033 && GET_MODE (XEXP (op0, 0)) != VOIDmode
10034 && bitsize != 0
10035 && (bitpos % bitsize) == 0
10036 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
10037 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
10039 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10040 bitpos = 0;
10043 op0 = offset_address (op0, offset_rtx,
10044 highest_pow2_factor (offset));
10047 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10048 record its alignment as BIGGEST_ALIGNMENT. */
10049 if (MEM_P (op0) && bitpos == 0 && offset != 0
10050 && is_aligning_offset (offset, tem))
10051 set_mem_align (op0, BIGGEST_ALIGNMENT);
10053 /* Don't forget about volatility even if this is a bitfield. */
10054 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10056 if (op0 == orig_op0)
10057 op0 = copy_rtx (op0);
10059 MEM_VOLATILE_P (op0) = 1;
10062 /* In cases where an aligned union has an unaligned object
10063 as a field, we might be extracting a BLKmode value from
10064 an integer-mode (e.g., SImode) object. Handle this case
10065 by doing the extract into an object as wide as the field
10066 (which we know to be the width of a basic mode), then
10067 storing into memory, and changing the mode to BLKmode. */
10068 if (mode1 == VOIDmode
10069 || REG_P (op0) || GET_CODE (op0) == SUBREG
10070 || (mode1 != BLKmode && ! direct_load[(int) mode1]
10071 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10072 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10073 && modifier != EXPAND_CONST_ADDRESS
10074 && modifier != EXPAND_INITIALIZER
10075 && modifier != EXPAND_MEMORY)
10076 /* If the field is volatile, we always want an aligned
10077 access. Do this in following two situations:
10078 1. the access is not already naturally
10079 aligned, otherwise "normal" (non-bitfield) volatile fields
10080 become non-addressable.
10081 2. the bitsize is narrower than the access size. Need
10082 to extract bitfields from the access. */
10083 || (volatilep && flag_strict_volatile_bitfields > 0
10084 && (bitpos % GET_MODE_ALIGNMENT (mode) != 0
10085 || (mode1 != BLKmode
10086 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)))
10087 /* If the field isn't aligned enough to fetch as a memref,
10088 fetch it as a bit field. */
10089 || (mode1 != BLKmode
10090 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10091 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10092 || (MEM_P (op0)
10093 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10094 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10095 && modifier != EXPAND_MEMORY
10096 && ((modifier == EXPAND_CONST_ADDRESS
10097 || modifier == EXPAND_INITIALIZER)
10098 ? STRICT_ALIGNMENT
10099 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10100 || (bitpos % BITS_PER_UNIT != 0)))
10101 /* If the type and the field are a constant size and the
10102 size of the type isn't the same size as the bitfield,
10103 we must use bitfield operations. */
10104 || (bitsize >= 0
10105 && TYPE_SIZE (TREE_TYPE (exp))
10106 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10107 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10108 bitsize)))
10110 enum machine_mode ext_mode = mode;
10112 if (ext_mode == BLKmode
10113 && ! (target != 0 && MEM_P (op0)
10114 && MEM_P (target)
10115 && bitpos % BITS_PER_UNIT == 0))
10116 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10118 if (ext_mode == BLKmode)
10120 if (target == 0)
10121 target = assign_temp (type, 1, 1);
10123 if (bitsize == 0)
10124 return target;
10126 /* In this case, BITPOS must start at a byte boundary and
10127 TARGET, if specified, must be a MEM. */
10128 gcc_assert (MEM_P (op0)
10129 && (!target || MEM_P (target))
10130 && !(bitpos % BITS_PER_UNIT));
10132 emit_block_move (target,
10133 adjust_address (op0, VOIDmode,
10134 bitpos / BITS_PER_UNIT),
10135 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10136 / BITS_PER_UNIT),
10137 (modifier == EXPAND_STACK_PARM
10138 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10140 return target;
10143 op0 = validize_mem (op0);
10145 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10146 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10148 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10149 (modifier == EXPAND_STACK_PARM
10150 ? NULL_RTX : target),
10151 ext_mode, ext_mode);
10153 /* If the result is a record type and BITSIZE is narrower than
10154 the mode of OP0, an integral mode, and this is a big endian
10155 machine, we must put the field into the high-order bits. */
10156 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10157 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10158 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10159 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10160 GET_MODE_BITSIZE (GET_MODE (op0))
10161 - bitsize, op0, 1);
10163 /* If the result type is BLKmode, store the data into a temporary
10164 of the appropriate type, but with the mode corresponding to the
10165 mode for the data we have (op0's mode). It's tempting to make
10166 this a constant type, since we know it's only being stored once,
10167 but that can cause problems if we are taking the address of this
10168 COMPONENT_REF because the MEM of any reference via that address
10169 will have flags corresponding to the type, which will not
10170 necessarily be constant. */
10171 if (mode == BLKmode)
10173 rtx new_rtx;
10175 new_rtx = assign_stack_temp_for_type (ext_mode,
10176 GET_MODE_BITSIZE (ext_mode),
10177 type);
10178 emit_move_insn (new_rtx, op0);
10179 op0 = copy_rtx (new_rtx);
10180 PUT_MODE (op0, BLKmode);
10183 return op0;
10186 /* If the result is BLKmode, use that to access the object
10187 now as well. */
10188 if (mode == BLKmode)
10189 mode1 = BLKmode;
10191 /* Get a reference to just this component. */
10192 if (modifier == EXPAND_CONST_ADDRESS
10193 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10194 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10195 else
10196 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10198 if (op0 == orig_op0)
10199 op0 = copy_rtx (op0);
10201 /* If op0 is a temporary because of forcing to memory, pass only the
10202 type to set_mem_attributes so that the original expression is never
10203 marked as ADDRESSABLE through MEM_EXPR of the temporary. */
10204 if (mem_attrs_from_type)
10205 set_mem_attributes (op0, type, 0);
10206 else
10207 set_mem_attributes (op0, exp, 0);
10209 if (REG_P (XEXP (op0, 0)))
10210 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10212 MEM_VOLATILE_P (op0) |= volatilep;
10213 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10214 || modifier == EXPAND_CONST_ADDRESS
10215 || modifier == EXPAND_INITIALIZER)
10216 return op0;
10218 if (target == 0)
10219 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10221 convert_move (target, op0, unsignedp);
10222 return target;
10225 case OBJ_TYPE_REF:
10226 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10228 case CALL_EXPR:
10229 /* All valid uses of __builtin_va_arg_pack () are removed during
10230 inlining. */
10231 if (CALL_EXPR_VA_ARG_PACK (exp))
10232 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10234 tree fndecl = get_callee_fndecl (exp), attr;
10236 if (fndecl
10237 && (attr = lookup_attribute ("error",
10238 DECL_ATTRIBUTES (fndecl))) != NULL)
10239 error ("%Kcall to %qs declared with attribute error: %s",
10240 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10241 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10242 if (fndecl
10243 && (attr = lookup_attribute ("warning",
10244 DECL_ATTRIBUTES (fndecl))) != NULL)
10245 warning_at (tree_nonartificial_location (exp),
10246 0, "%Kcall to %qs declared with attribute warning: %s",
10247 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10248 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10250 /* Check for a built-in function. */
10251 if (fndecl && DECL_BUILT_IN (fndecl))
10253 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10254 return expand_builtin (exp, target, subtarget, tmode, ignore);
10257 return expand_call (exp, target, ignore);
10259 case VIEW_CONVERT_EXPR:
10260 op0 = NULL_RTX;
10262 /* If we are converting to BLKmode, try to avoid an intermediate
10263 temporary by fetching an inner memory reference. */
10264 if (mode == BLKmode
10265 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10266 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10267 && handled_component_p (treeop0))
10269 enum machine_mode mode1;
10270 HOST_WIDE_INT bitsize, bitpos;
10271 tree offset;
10272 int unsignedp;
10273 int volatilep = 0;
10274 tree tem
10275 = get_inner_reference (treeop0, &bitsize, &bitpos,
10276 &offset, &mode1, &unsignedp, &volatilep,
10277 true);
10278 rtx orig_op0;
10280 /* ??? We should work harder and deal with non-zero offsets. */
10281 if (!offset
10282 && (bitpos % BITS_PER_UNIT) == 0
10283 && bitsize >= 0
10284 && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10286 /* See the normal_inner_ref case for the rationale. */
10287 orig_op0
10288 = expand_expr (tem,
10289 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10290 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10291 != INTEGER_CST)
10292 && modifier != EXPAND_STACK_PARM
10293 ? target : NULL_RTX),
10294 VOIDmode,
10295 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
10297 if (MEM_P (orig_op0))
10299 op0 = orig_op0;
10301 /* Get a reference to just this component. */
10302 if (modifier == EXPAND_CONST_ADDRESS
10303 || modifier == EXPAND_SUM
10304 || modifier == EXPAND_INITIALIZER)
10305 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10306 else
10307 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10309 if (op0 == orig_op0)
10310 op0 = copy_rtx (op0);
10312 set_mem_attributes (op0, treeop0, 0);
10313 if (REG_P (XEXP (op0, 0)))
10314 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10316 MEM_VOLATILE_P (op0) |= volatilep;
10321 if (!op0)
10322 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
10324 /* If the input and output modes are both the same, we are done. */
10325 if (mode == GET_MODE (op0))
10327 /* If neither mode is BLKmode, and both modes are the same size
10328 then we can use gen_lowpart. */
10329 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10330 && (GET_MODE_PRECISION (mode)
10331 == GET_MODE_PRECISION (GET_MODE (op0)))
10332 && !COMPLEX_MODE_P (GET_MODE (op0)))
10334 if (GET_CODE (op0) == SUBREG)
10335 op0 = force_reg (GET_MODE (op0), op0);
10336 temp = gen_lowpart_common (mode, op0);
10337 if (temp)
10338 op0 = temp;
10339 else
10341 if (!REG_P (op0) && !MEM_P (op0))
10342 op0 = force_reg (GET_MODE (op0), op0);
10343 op0 = gen_lowpart (mode, op0);
10346 /* If both types are integral, convert from one mode to the other. */
10347 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10348 op0 = convert_modes (mode, GET_MODE (op0), op0,
10349 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10350 /* As a last resort, spill op0 to memory, and reload it in a
10351 different mode. */
10352 else if (!MEM_P (op0))
10354 /* If the operand is not a MEM, force it into memory. Since we
10355 are going to be changing the mode of the MEM, don't call
10356 force_const_mem for constants because we don't allow pool
10357 constants to change mode. */
10358 tree inner_type = TREE_TYPE (treeop0);
10360 gcc_assert (!TREE_ADDRESSABLE (exp));
10362 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10363 target
10364 = assign_stack_temp_for_type
10365 (TYPE_MODE (inner_type),
10366 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10368 emit_move_insn (target, op0);
10369 op0 = target;
10372 /* At this point, OP0 is in the correct mode. If the output type is
10373 such that the operand is known to be aligned, indicate that it is.
10374 Otherwise, we need only be concerned about alignment for non-BLKmode
10375 results. */
10376 if (MEM_P (op0))
10378 enum insn_code icode;
10380 if (TYPE_ALIGN_OK (type))
10382 /* ??? Copying the MEM without substantially changing it might
10383 run afoul of the code handling volatile memory references in
10384 store_expr, which assumes that TARGET is returned unmodified
10385 if it has been used. */
10386 op0 = copy_rtx (op0);
10387 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10389 else if (mode != BLKmode
10390 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode)
10391 /* If the target does have special handling for unaligned
10392 loads of mode then use them. */
10393 && ((icode = optab_handler (movmisalign_optab, mode))
10394 != CODE_FOR_nothing))
10396 rtx reg, insn;
10398 op0 = adjust_address (op0, mode, 0);
10399 /* We've already validated the memory, and we're creating a
10400 new pseudo destination. The predicates really can't
10401 fail. */
10402 reg = gen_reg_rtx (mode);
10404 /* Nor can the insn generator. */
10405 insn = GEN_FCN (icode) (reg, op0);
10406 emit_insn (insn);
10407 return reg;
10409 else if (STRICT_ALIGNMENT
10410 && mode != BLKmode
10411 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10413 tree inner_type = TREE_TYPE (treeop0);
10414 HOST_WIDE_INT temp_size
10415 = MAX (int_size_in_bytes (inner_type),
10416 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10417 rtx new_rtx
10418 = assign_stack_temp_for_type (mode, temp_size, type);
10419 rtx new_with_op0_mode
10420 = adjust_address (new_rtx, GET_MODE (op0), 0);
10422 gcc_assert (!TREE_ADDRESSABLE (exp));
10424 if (GET_MODE (op0) == BLKmode)
10425 emit_block_move (new_with_op0_mode, op0,
10426 GEN_INT (GET_MODE_SIZE (mode)),
10427 (modifier == EXPAND_STACK_PARM
10428 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10429 else
10430 emit_move_insn (new_with_op0_mode, op0);
10432 op0 = new_rtx;
10435 op0 = adjust_address (op0, mode, 0);
10438 return op0;
10440 case MODIFY_EXPR:
10442 tree lhs = treeop0;
10443 tree rhs = treeop1;
10444 gcc_assert (ignore);
10446 /* Check for |= or &= of a bitfield of size one into another bitfield
10447 of size 1. In this case, (unless we need the result of the
10448 assignment) we can do this more efficiently with a
10449 test followed by an assignment, if necessary.
10451 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10452 things change so we do, this code should be enhanced to
10453 support it. */
10454 if (TREE_CODE (lhs) == COMPONENT_REF
10455 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10456 || TREE_CODE (rhs) == BIT_AND_EXPR)
10457 && TREE_OPERAND (rhs, 0) == lhs
10458 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10459 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10460 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10462 rtx label = gen_label_rtx ();
10463 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10464 do_jump (TREE_OPERAND (rhs, 1),
10465 value ? label : 0,
10466 value ? 0 : label, -1);
10467 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10468 false);
10469 do_pending_stack_adjust ();
10470 emit_label (label);
10471 return const0_rtx;
10474 expand_assignment (lhs, rhs, false);
10475 return const0_rtx;
10478 case ADDR_EXPR:
10479 return expand_expr_addr_expr (exp, target, tmode, modifier);
10481 case REALPART_EXPR:
10482 op0 = expand_normal (treeop0);
10483 return read_complex_part (op0, false);
10485 case IMAGPART_EXPR:
10486 op0 = expand_normal (treeop0);
10487 return read_complex_part (op0, true);
10489 case RETURN_EXPR:
10490 case LABEL_EXPR:
10491 case GOTO_EXPR:
10492 case SWITCH_EXPR:
10493 case ASM_EXPR:
10494 /* Expanded in cfgexpand.c. */
10495 gcc_unreachable ();
10497 case TRY_CATCH_EXPR:
10498 case CATCH_EXPR:
10499 case EH_FILTER_EXPR:
10500 case TRY_FINALLY_EXPR:
10501 /* Lowered by tree-eh.c. */
10502 gcc_unreachable ();
10504 case WITH_CLEANUP_EXPR:
10505 case CLEANUP_POINT_EXPR:
10506 case TARGET_EXPR:
10507 case CASE_LABEL_EXPR:
10508 case VA_ARG_EXPR:
10509 case BIND_EXPR:
10510 case INIT_EXPR:
10511 case CONJ_EXPR:
10512 case COMPOUND_EXPR:
10513 case PREINCREMENT_EXPR:
10514 case PREDECREMENT_EXPR:
10515 case POSTINCREMENT_EXPR:
10516 case POSTDECREMENT_EXPR:
10517 case LOOP_EXPR:
10518 case EXIT_EXPR:
10519 case COMPOUND_LITERAL_EXPR:
10520 /* Lowered by gimplify.c. */
10521 gcc_unreachable ();
10523 case FDESC_EXPR:
10524 /* Function descriptors are not valid except for as
10525 initialization constants, and should not be expanded. */
10526 gcc_unreachable ();
10528 case WITH_SIZE_EXPR:
10529 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10530 have pulled out the size to use in whatever context it needed. */
10531 return expand_expr_real (treeop0, original_target, tmode,
10532 modifier, alt_rtl);
10534 default:
10535 return expand_expr_real_2 (&ops, target, tmode, modifier);
10539 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10540 signedness of TYPE), possibly returning the result in TARGET. */
10541 static rtx
10542 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10544 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10545 if (target && GET_MODE (target) != GET_MODE (exp))
10546 target = 0;
10547 /* For constant values, reduce using build_int_cst_type. */
10548 if (CONST_INT_P (exp))
10550 HOST_WIDE_INT value = INTVAL (exp);
10551 tree t = build_int_cst_type (type, value);
10552 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10554 else if (TYPE_UNSIGNED (type))
10556 rtx mask = immed_double_int_const (double_int::mask (prec),
10557 GET_MODE (exp));
10558 return expand_and (GET_MODE (exp), exp, mask, target);
10560 else
10562 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10563 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10564 exp, count, target, 0);
10565 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10566 exp, count, target, 0);
10570 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10571 when applied to the address of EXP produces an address known to be
10572 aligned more than BIGGEST_ALIGNMENT. */
10574 static int
10575 is_aligning_offset (const_tree offset, const_tree exp)
10577 /* Strip off any conversions. */
10578 while (CONVERT_EXPR_P (offset))
10579 offset = TREE_OPERAND (offset, 0);
10581 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10582 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10583 if (TREE_CODE (offset) != BIT_AND_EXPR
10584 || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
10585 || compare_tree_int (TREE_OPERAND (offset, 1),
10586 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10587 || !exact_log2 (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1) < 0)
10588 return 0;
10590 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10591 It must be NEGATE_EXPR. Then strip any more conversions. */
10592 offset = TREE_OPERAND (offset, 0);
10593 while (CONVERT_EXPR_P (offset))
10594 offset = TREE_OPERAND (offset, 0);
10596 if (TREE_CODE (offset) != NEGATE_EXPR)
10597 return 0;
10599 offset = TREE_OPERAND (offset, 0);
10600 while (CONVERT_EXPR_P (offset))
10601 offset = TREE_OPERAND (offset, 0);
10603 /* This must now be the address of EXP. */
10604 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10607 /* Return the tree node if an ARG corresponds to a string constant or zero
10608 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10609 in bytes within the string that ARG is accessing. The type of the
10610 offset will be `sizetype'. */
10612 tree
10613 string_constant (tree arg, tree *ptr_offset)
10615 tree array, offset, lower_bound;
10616 STRIP_NOPS (arg);
10618 if (TREE_CODE (arg) == ADDR_EXPR)
10620 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10622 *ptr_offset = size_zero_node;
10623 return TREE_OPERAND (arg, 0);
10625 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10627 array = TREE_OPERAND (arg, 0);
10628 offset = size_zero_node;
10630 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10632 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10633 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10634 if (TREE_CODE (array) != STRING_CST
10635 && TREE_CODE (array) != VAR_DECL)
10636 return 0;
10638 /* Check if the array has a nonzero lower bound. */
10639 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10640 if (!integer_zerop (lower_bound))
10642 /* If the offset and base aren't both constants, return 0. */
10643 if (TREE_CODE (lower_bound) != INTEGER_CST)
10644 return 0;
10645 if (TREE_CODE (offset) != INTEGER_CST)
10646 return 0;
10647 /* Adjust offset by the lower bound. */
10648 offset = size_diffop (fold_convert (sizetype, offset),
10649 fold_convert (sizetype, lower_bound));
10652 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10654 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10655 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10656 if (TREE_CODE (array) != ADDR_EXPR)
10657 return 0;
10658 array = TREE_OPERAND (array, 0);
10659 if (TREE_CODE (array) != STRING_CST
10660 && TREE_CODE (array) != VAR_DECL)
10661 return 0;
10663 else
10664 return 0;
10666 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10668 tree arg0 = TREE_OPERAND (arg, 0);
10669 tree arg1 = TREE_OPERAND (arg, 1);
10671 STRIP_NOPS (arg0);
10672 STRIP_NOPS (arg1);
10674 if (TREE_CODE (arg0) == ADDR_EXPR
10675 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10676 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10678 array = TREE_OPERAND (arg0, 0);
10679 offset = arg1;
10681 else if (TREE_CODE (arg1) == ADDR_EXPR
10682 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10683 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10685 array = TREE_OPERAND (arg1, 0);
10686 offset = arg0;
10688 else
10689 return 0;
10691 else
10692 return 0;
10694 if (TREE_CODE (array) == STRING_CST)
10696 *ptr_offset = fold_convert (sizetype, offset);
10697 return array;
10699 else if (TREE_CODE (array) == VAR_DECL
10700 || TREE_CODE (array) == CONST_DECL)
10702 int length;
10703 tree init = ctor_for_folding (array);
10705 /* Variables initialized to string literals can be handled too. */
10706 if (init == error_mark_node
10707 || !init
10708 || TREE_CODE (init) != STRING_CST)
10709 return 0;
10711 /* Avoid const char foo[4] = "abcde"; */
10712 if (DECL_SIZE_UNIT (array) == NULL_TREE
10713 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10714 || (length = TREE_STRING_LENGTH (init)) <= 0
10715 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10716 return 0;
10718 /* If variable is bigger than the string literal, OFFSET must be constant
10719 and inside of the bounds of the string literal. */
10720 offset = fold_convert (sizetype, offset);
10721 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10722 && (! tree_fits_uhwi_p (offset)
10723 || compare_tree_int (offset, length) >= 0))
10724 return 0;
10726 *ptr_offset = offset;
10727 return init;
10730 return 0;
10733 /* Generate code to calculate OPS, and exploded expression
10734 using a store-flag instruction and return an rtx for the result.
10735 OPS reflects a comparison.
10737 If TARGET is nonzero, store the result there if convenient.
10739 Return zero if there is no suitable set-flag instruction
10740 available on this machine.
10742 Once expand_expr has been called on the arguments of the comparison,
10743 we are committed to doing the store flag, since it is not safe to
10744 re-evaluate the expression. We emit the store-flag insn by calling
10745 emit_store_flag, but only expand the arguments if we have a reason
10746 to believe that emit_store_flag will be successful. If we think that
10747 it will, but it isn't, we have to simulate the store-flag with a
10748 set/jump/set sequence. */
10750 static rtx
10751 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
10753 enum rtx_code code;
10754 tree arg0, arg1, type;
10755 tree tem;
10756 enum machine_mode operand_mode;
10757 int unsignedp;
10758 rtx op0, op1;
10759 rtx subtarget = target;
10760 location_t loc = ops->location;
10762 arg0 = ops->op0;
10763 arg1 = ops->op1;
10765 /* Don't crash if the comparison was erroneous. */
10766 if (arg0 == error_mark_node || arg1 == error_mark_node)
10767 return const0_rtx;
10769 type = TREE_TYPE (arg0);
10770 operand_mode = TYPE_MODE (type);
10771 unsignedp = TYPE_UNSIGNED (type);
10773 /* We won't bother with BLKmode store-flag operations because it would mean
10774 passing a lot of information to emit_store_flag. */
10775 if (operand_mode == BLKmode)
10776 return 0;
10778 /* We won't bother with store-flag operations involving function pointers
10779 when function pointers must be canonicalized before comparisons. */
10780 #ifdef HAVE_canonicalize_funcptr_for_compare
10781 if (HAVE_canonicalize_funcptr_for_compare
10782 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
10783 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
10784 == FUNCTION_TYPE))
10785 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
10786 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
10787 == FUNCTION_TYPE))))
10788 return 0;
10789 #endif
10791 STRIP_NOPS (arg0);
10792 STRIP_NOPS (arg1);
10794 /* For vector typed comparisons emit code to generate the desired
10795 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10796 expander for this. */
10797 if (TREE_CODE (ops->type) == VECTOR_TYPE)
10799 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10800 tree if_true = constant_boolean_node (true, ops->type);
10801 tree if_false = constant_boolean_node (false, ops->type);
10802 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10805 /* Get the rtx comparison code to use. We know that EXP is a comparison
10806 operation of some type. Some comparisons against 1 and -1 can be
10807 converted to comparisons with zero. Do so here so that the tests
10808 below will be aware that we have a comparison with zero. These
10809 tests will not catch constants in the first operand, but constants
10810 are rarely passed as the first operand. */
10812 switch (ops->code)
10814 case EQ_EXPR:
10815 code = EQ;
10816 break;
10817 case NE_EXPR:
10818 code = NE;
10819 break;
10820 case LT_EXPR:
10821 if (integer_onep (arg1))
10822 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10823 else
10824 code = unsignedp ? LTU : LT;
10825 break;
10826 case LE_EXPR:
10827 if (! unsignedp && integer_all_onesp (arg1))
10828 arg1 = integer_zero_node, code = LT;
10829 else
10830 code = unsignedp ? LEU : LE;
10831 break;
10832 case GT_EXPR:
10833 if (! unsignedp && integer_all_onesp (arg1))
10834 arg1 = integer_zero_node, code = GE;
10835 else
10836 code = unsignedp ? GTU : GT;
10837 break;
10838 case GE_EXPR:
10839 if (integer_onep (arg1))
10840 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10841 else
10842 code = unsignedp ? GEU : GE;
10843 break;
10845 case UNORDERED_EXPR:
10846 code = UNORDERED;
10847 break;
10848 case ORDERED_EXPR:
10849 code = ORDERED;
10850 break;
10851 case UNLT_EXPR:
10852 code = UNLT;
10853 break;
10854 case UNLE_EXPR:
10855 code = UNLE;
10856 break;
10857 case UNGT_EXPR:
10858 code = UNGT;
10859 break;
10860 case UNGE_EXPR:
10861 code = UNGE;
10862 break;
10863 case UNEQ_EXPR:
10864 code = UNEQ;
10865 break;
10866 case LTGT_EXPR:
10867 code = LTGT;
10868 break;
10870 default:
10871 gcc_unreachable ();
10874 /* Put a constant second. */
10875 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
10876 || TREE_CODE (arg0) == FIXED_CST)
10878 tem = arg0; arg0 = arg1; arg1 = tem;
10879 code = swap_condition (code);
10882 /* If this is an equality or inequality test of a single bit, we can
10883 do this by shifting the bit being tested to the low-order bit and
10884 masking the result with the constant 1. If the condition was EQ,
10885 we xor it with 1. This does not require an scc insn and is faster
10886 than an scc insn even if we have it.
10888 The code to make this transformation was moved into fold_single_bit_test,
10889 so we just call into the folder and expand its result. */
10891 if ((code == NE || code == EQ)
10892 && integer_zerop (arg1)
10893 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
10895 gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
10896 if (srcstmt
10897 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
10899 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
10900 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
10901 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
10902 gimple_assign_rhs1 (srcstmt),
10903 gimple_assign_rhs2 (srcstmt));
10904 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
10905 if (temp)
10906 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
10910 if (! get_subtarget (target)
10911 || GET_MODE (subtarget) != operand_mode)
10912 subtarget = 0;
10914 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
10916 if (target == 0)
10917 target = gen_reg_rtx (mode);
10919 /* Try a cstore if possible. */
10920 return emit_store_flag_force (target, code, op0, op1,
10921 operand_mode, unsignedp,
10922 (TYPE_PRECISION (ops->type) == 1
10923 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
10927 /* Stubs in case we haven't got a casesi insn. */
10928 #ifndef HAVE_casesi
10929 # define HAVE_casesi 0
10930 # define gen_casesi(a, b, c, d, e) (0)
10931 # define CODE_FOR_casesi CODE_FOR_nothing
10932 #endif
10934 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10935 0 otherwise (i.e. if there is no casesi instruction).
10937 DEFAULT_PROBABILITY is the probability of jumping to the default
10938 label. */
10940 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10941 rtx table_label, rtx default_label, rtx fallback_label,
10942 int default_probability)
10944 struct expand_operand ops[5];
10945 enum machine_mode index_mode = SImode;
10946 rtx op1, op2, index;
10948 if (! HAVE_casesi)
10949 return 0;
10951 /* Convert the index to SImode. */
10952 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10954 enum machine_mode omode = TYPE_MODE (index_type);
10955 rtx rangertx = expand_normal (range);
10957 /* We must handle the endpoints in the original mode. */
10958 index_expr = build2 (MINUS_EXPR, index_type,
10959 index_expr, minval);
10960 minval = integer_zero_node;
10961 index = expand_normal (index_expr);
10962 if (default_label)
10963 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10964 omode, 1, default_label,
10965 default_probability);
10966 /* Now we can safely truncate. */
10967 index = convert_to_mode (index_mode, index, 0);
10969 else
10971 if (TYPE_MODE (index_type) != index_mode)
10973 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
10974 index_expr = fold_convert (index_type, index_expr);
10977 index = expand_normal (index_expr);
10980 do_pending_stack_adjust ();
10982 op1 = expand_normal (minval);
10983 op2 = expand_normal (range);
10985 create_input_operand (&ops[0], index, index_mode);
10986 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
10987 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
10988 create_fixed_operand (&ops[3], table_label);
10989 create_fixed_operand (&ops[4], (default_label
10990 ? default_label
10991 : fallback_label));
10992 expand_jump_insn (CODE_FOR_casesi, 5, ops);
10993 return 1;
10996 /* Attempt to generate a tablejump instruction; same concept. */
10997 #ifndef HAVE_tablejump
10998 #define HAVE_tablejump 0
10999 #define gen_tablejump(x, y) (0)
11000 #endif
11002 /* Subroutine of the next function.
11004 INDEX is the value being switched on, with the lowest value
11005 in the table already subtracted.
11006 MODE is its expected mode (needed if INDEX is constant).
11007 RANGE is the length of the jump table.
11008 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11010 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11011 index value is out of range.
11012 DEFAULT_PROBABILITY is the probability of jumping to
11013 the default label. */
11015 static void
11016 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
11017 rtx default_label, int default_probability)
11019 rtx temp, vector;
11021 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11022 cfun->cfg->max_jumptable_ents = INTVAL (range);
11024 /* Do an unsigned comparison (in the proper mode) between the index
11025 expression and the value which represents the length of the range.
11026 Since we just finished subtracting the lower bound of the range
11027 from the index expression, this comparison allows us to simultaneously
11028 check that the original index expression value is both greater than
11029 or equal to the minimum value of the range and less than or equal to
11030 the maximum value of the range. */
11032 if (default_label)
11033 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11034 default_label, default_probability);
11037 /* If index is in range, it must fit in Pmode.
11038 Convert to Pmode so we can index with it. */
11039 if (mode != Pmode)
11040 index = convert_to_mode (Pmode, index, 1);
11042 /* Don't let a MEM slip through, because then INDEX that comes
11043 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11044 and break_out_memory_refs will go to work on it and mess it up. */
11045 #ifdef PIC_CASE_VECTOR_ADDRESS
11046 if (flag_pic && !REG_P (index))
11047 index = copy_to_mode_reg (Pmode, index);
11048 #endif
11050 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11051 GET_MODE_SIZE, because this indicates how large insns are. The other
11052 uses should all be Pmode, because they are addresses. This code
11053 could fail if addresses and insns are not the same size. */
11054 index = gen_rtx_PLUS
11055 (Pmode,
11056 gen_rtx_MULT (Pmode, index,
11057 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE), Pmode)),
11058 gen_rtx_LABEL_REF (Pmode, table_label));
11059 #ifdef PIC_CASE_VECTOR_ADDRESS
11060 if (flag_pic)
11061 index = PIC_CASE_VECTOR_ADDRESS (index);
11062 else
11063 #endif
11064 index = memory_address (CASE_VECTOR_MODE, index);
11065 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11066 vector = gen_const_mem (CASE_VECTOR_MODE, index);
11067 convert_move (temp, vector, 0);
11069 emit_jump_insn (gen_tablejump (temp, table_label));
11071 /* If we are generating PIC code or if the table is PC-relative, the
11072 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11073 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11074 emit_barrier ();
11078 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11079 rtx table_label, rtx default_label, int default_probability)
11081 rtx index;
11083 if (! HAVE_tablejump)
11084 return 0;
11086 index_expr = fold_build2 (MINUS_EXPR, index_type,
11087 fold_convert (index_type, index_expr),
11088 fold_convert (index_type, minval));
11089 index = expand_normal (index_expr);
11090 do_pending_stack_adjust ();
11092 do_tablejump (index, TYPE_MODE (index_type),
11093 convert_modes (TYPE_MODE (index_type),
11094 TYPE_MODE (TREE_TYPE (range)),
11095 expand_normal (range),
11096 TYPE_UNSIGNED (TREE_TYPE (range))),
11097 table_label, default_label, default_probability);
11098 return 1;
11101 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11102 static rtx
11103 const_vector_from_tree (tree exp)
11105 rtvec v;
11106 unsigned i;
11107 int units;
11108 tree elt;
11109 enum machine_mode inner, mode;
11111 mode = TYPE_MODE (TREE_TYPE (exp));
11113 if (initializer_zerop (exp))
11114 return CONST0_RTX (mode);
11116 units = GET_MODE_NUNITS (mode);
11117 inner = GET_MODE_INNER (mode);
11119 v = rtvec_alloc (units);
11121 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11123 elt = VECTOR_CST_ELT (exp, i);
11125 if (TREE_CODE (elt) == REAL_CST)
11126 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11127 inner);
11128 else if (TREE_CODE (elt) == FIXED_CST)
11129 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11130 inner);
11131 else
11132 RTVEC_ELT (v, i) = immed_double_int_const (tree_to_double_int (elt),
11133 inner);
11136 return gen_rtx_CONST_VECTOR (mode, v);
11139 /* Build a decl for a personality function given a language prefix. */
11141 tree
11142 build_personality_function (const char *lang)
11144 const char *unwind_and_version;
11145 tree decl, type;
11146 char *name;
11148 switch (targetm_common.except_unwind_info (&global_options))
11150 case UI_NONE:
11151 return NULL;
11152 case UI_SJLJ:
11153 unwind_and_version = "_sj0";
11154 break;
11155 case UI_DWARF2:
11156 case UI_TARGET:
11157 unwind_and_version = "_v0";
11158 break;
11159 case UI_SEH:
11160 unwind_and_version = "_seh0";
11161 break;
11162 default:
11163 gcc_unreachable ();
11166 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11168 type = build_function_type_list (integer_type_node, integer_type_node,
11169 long_long_unsigned_type_node,
11170 ptr_type_node, ptr_type_node, NULL_TREE);
11171 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11172 get_identifier (name), type);
11173 DECL_ARTIFICIAL (decl) = 1;
11174 DECL_EXTERNAL (decl) = 1;
11175 TREE_PUBLIC (decl) = 1;
11177 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11178 are the flags assigned by targetm.encode_section_info. */
11179 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11181 return decl;
11184 /* Extracts the personality function of DECL and returns the corresponding
11185 libfunc. */
11188 get_personality_function (tree decl)
11190 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11191 enum eh_personality_kind pk;
11193 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11194 if (pk == eh_personality_none)
11195 return NULL;
11197 if (!personality
11198 && pk == eh_personality_any)
11199 personality = lang_hooks.eh_personality ();
11201 if (pk == eh_personality_lang)
11202 gcc_assert (personality != NULL_TREE);
11204 return XEXP (DECL_RTL (personality), 0);
11207 #include "gt-expr.h"