[AArch64] Remove useless parameter base_rtx.
[official-gcc.git] / gcc / expr.c
blobd8328eebfbf62e676420115ba6c3d110895e5615
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
29 #include "attribs.h"
30 #include "varasm.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
39 #include "expr.h"
40 #include "optabs.h"
41 #include "libfuncs.h"
42 #include "recog.h"
43 #include "reload.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "langhooks.h"
47 #include "intl.h"
48 #include "tm_p.h"
49 #include "tree-iterator.h"
50 #include "basic-block.h"
51 #include "tree-ssa-alias.h"
52 #include "internal-fn.h"
53 #include "gimple-expr.h"
54 #include "is-a.h"
55 #include "gimple.h"
56 #include "gimple-ssa.h"
57 #include "cgraph.h"
58 #include "tree-ssanames.h"
59 #include "target.h"
60 #include "common/common-target.h"
61 #include "timevar.h"
62 #include "df.h"
63 #include "diagnostic.h"
64 #include "tree-ssa-live.h"
65 #include "tree-outof-ssa.h"
66 #include "target-globals.h"
67 #include "params.h"
68 #include "tree-ssa-address.h"
69 #include "cfgexpand.h"
70 #include "builtins.h"
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
75 #else
76 #define STACK_PUSH_CODE PRE_INC
77 #endif
78 #endif
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87 int cse_not_expected;
89 /* This structure is used by move_by_pieces to describe the move to
90 be performed. */
91 struct move_by_pieces_d
93 rtx to;
94 rtx to_addr;
95 int autinc_to;
96 int explicit_inc_to;
97 rtx from;
98 rtx from_addr;
99 int autinc_from;
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
103 int reverse;
106 /* This structure is used by store_by_pieces to describe the clear to
107 be performed. */
109 struct store_by_pieces_d
111 rtx to;
112 rtx to_addr;
113 int autinc_to;
114 int explicit_inc_to;
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
118 void *constfundata;
119 int reverse;
122 static void move_by_pieces_1 (insn_gen_fn, machine_mode,
123 struct move_by_pieces_d *);
124 static bool block_move_libcall_safe_for_call_parm (void);
125 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
126 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
127 unsigned HOST_WIDE_INT);
128 static tree emit_block_move_libcall_fn (int);
129 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
130 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
131 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
132 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
133 static void store_by_pieces_2 (insn_gen_fn, machine_mode,
134 struct store_by_pieces_d *);
135 static tree clear_storage_libcall_fn (int);
136 static rtx compress_float_constant (rtx, rtx);
137 static rtx get_subtarget (rtx);
138 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
139 HOST_WIDE_INT, enum machine_mode,
140 tree, int, alias_set_type);
141 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
142 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
143 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
144 enum machine_mode, tree, alias_set_type, bool);
146 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
148 static int is_aligning_offset (const_tree, const_tree);
149 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
150 enum expand_modifier);
151 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
152 static rtx do_store_flag (sepops, rtx, enum machine_mode);
153 #ifdef PUSH_ROUNDING
154 static void emit_single_push_insn (enum machine_mode, rtx, tree);
155 #endif
156 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx, int);
157 static rtx const_vector_from_tree (tree);
158 static void write_complex_part (rtx, rtx, bool);
160 /* This macro is used to determine whether move_by_pieces should be called
161 to perform a structure copy. */
162 #ifndef MOVE_BY_PIECES_P
163 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
164 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
165 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
166 #endif
168 /* This macro is used to determine whether clear_by_pieces should be
169 called to clear storage. */
170 #ifndef CLEAR_BY_PIECES_P
171 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
172 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
173 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
174 #endif
176 /* This macro is used to determine whether store_by_pieces should be
177 called to "memset" storage with byte values other than zero. */
178 #ifndef SET_BY_PIECES_P
179 #define SET_BY_PIECES_P(SIZE, ALIGN) \
180 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
181 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
182 #endif
184 /* This macro is used to determine whether store_by_pieces should be
185 called to "memcpy" storage when the source is a constant string. */
186 #ifndef STORE_BY_PIECES_P
187 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
188 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
189 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
190 #endif
192 /* This is run to set up which modes can be used
193 directly in memory and to initialize the block move optab. It is run
194 at the beginning of compilation and when the target is reinitialized. */
196 void
197 init_expr_target (void)
199 rtx insn, pat;
200 enum machine_mode mode;
201 int num_clobbers;
202 rtx mem, mem1;
203 rtx reg;
205 /* Try indexing by frame ptr and try by stack ptr.
206 It is known that on the Convex the stack ptr isn't a valid index.
207 With luck, one or the other is valid on any machine. */
208 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
209 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
211 /* A scratch register we can modify in-place below to avoid
212 useless RTL allocations. */
213 reg = gen_rtx_REG (VOIDmode, -1);
215 insn = rtx_alloc (INSN);
216 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
217 PATTERN (insn) = pat;
219 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
220 mode = (enum machine_mode) ((int) mode + 1))
222 int regno;
224 direct_load[(int) mode] = direct_store[(int) mode] = 0;
225 PUT_MODE (mem, mode);
226 PUT_MODE (mem1, mode);
227 PUT_MODE (reg, mode);
229 /* See if there is some register that can be used in this mode and
230 directly loaded or stored from memory. */
232 if (mode != VOIDmode && mode != BLKmode)
233 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
234 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
235 regno++)
237 if (! HARD_REGNO_MODE_OK (regno, mode))
238 continue;
240 SET_REGNO (reg, regno);
242 SET_SRC (pat) = mem;
243 SET_DEST (pat) = reg;
244 if (recog (pat, insn, &num_clobbers) >= 0)
245 direct_load[(int) mode] = 1;
247 SET_SRC (pat) = mem1;
248 SET_DEST (pat) = reg;
249 if (recog (pat, insn, &num_clobbers) >= 0)
250 direct_load[(int) mode] = 1;
252 SET_SRC (pat) = reg;
253 SET_DEST (pat) = mem;
254 if (recog (pat, insn, &num_clobbers) >= 0)
255 direct_store[(int) mode] = 1;
257 SET_SRC (pat) = reg;
258 SET_DEST (pat) = mem1;
259 if (recog (pat, insn, &num_clobbers) >= 0)
260 direct_store[(int) mode] = 1;
264 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
266 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
267 mode = GET_MODE_WIDER_MODE (mode))
269 enum machine_mode srcmode;
270 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
271 srcmode = GET_MODE_WIDER_MODE (srcmode))
273 enum insn_code ic;
275 ic = can_extend_p (mode, srcmode, 0);
276 if (ic == CODE_FOR_nothing)
277 continue;
279 PUT_MODE (mem, srcmode);
281 if (insn_operand_matches (ic, 1, mem))
282 float_extend_from_mem[mode][srcmode] = true;
287 /* This is run at the start of compiling a function. */
289 void
290 init_expr (void)
292 memset (&crtl->expr, 0, sizeof (crtl->expr));
295 /* Copy data from FROM to TO, where the machine modes are not the same.
296 Both modes may be integer, or both may be floating, or both may be
297 fixed-point.
298 UNSIGNEDP should be nonzero if FROM is an unsigned type.
299 This causes zero-extension instead of sign-extension. */
301 void
302 convert_move (rtx to, rtx from, int unsignedp)
304 enum machine_mode to_mode = GET_MODE (to);
305 enum machine_mode from_mode = GET_MODE (from);
306 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
307 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
308 enum insn_code code;
309 rtx libcall;
311 /* rtx code for making an equivalent value. */
312 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
313 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
316 gcc_assert (to_real == from_real);
317 gcc_assert (to_mode != BLKmode);
318 gcc_assert (from_mode != BLKmode);
320 /* If the source and destination are already the same, then there's
321 nothing to do. */
322 if (to == from)
323 return;
325 /* If FROM is a SUBREG that indicates that we have already done at least
326 the required extension, strip it. We don't handle such SUBREGs as
327 TO here. */
329 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
330 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
331 >= GET_MODE_PRECISION (to_mode))
332 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
333 from = gen_lowpart (to_mode, from), from_mode = to_mode;
335 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
337 if (to_mode == from_mode
338 || (from_mode == VOIDmode && CONSTANT_P (from)))
340 emit_move_insn (to, from);
341 return;
344 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
346 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
348 if (VECTOR_MODE_P (to_mode))
349 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
350 else
351 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
353 emit_move_insn (to, from);
354 return;
357 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
359 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
360 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
361 return;
364 if (to_real)
366 rtx value, insns;
367 convert_optab tab;
369 gcc_assert ((GET_MODE_PRECISION (from_mode)
370 != GET_MODE_PRECISION (to_mode))
371 || (DECIMAL_FLOAT_MODE_P (from_mode)
372 != DECIMAL_FLOAT_MODE_P (to_mode)));
374 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
375 /* Conversion between decimal float and binary float, same size. */
376 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
377 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
378 tab = sext_optab;
379 else
380 tab = trunc_optab;
382 /* Try converting directly if the insn is supported. */
384 code = convert_optab_handler (tab, to_mode, from_mode);
385 if (code != CODE_FOR_nothing)
387 emit_unop_insn (code, to, from,
388 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
389 return;
392 /* Otherwise use a libcall. */
393 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
395 /* Is this conversion implemented yet? */
396 gcc_assert (libcall);
398 start_sequence ();
399 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
400 1, from, from_mode);
401 insns = get_insns ();
402 end_sequence ();
403 emit_libcall_block (insns, to, value,
404 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
405 from)
406 : gen_rtx_FLOAT_EXTEND (to_mode, from));
407 return;
410 /* Handle pointer conversion. */ /* SPEE 900220. */
411 /* Targets are expected to provide conversion insns between PxImode and
412 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
413 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
415 enum machine_mode full_mode
416 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
418 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
419 != CODE_FOR_nothing);
421 if (full_mode != from_mode)
422 from = convert_to_mode (full_mode, from, unsignedp);
423 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
424 to, from, UNKNOWN);
425 return;
427 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
429 rtx new_from;
430 enum machine_mode full_mode
431 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
432 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
433 enum insn_code icode;
435 icode = convert_optab_handler (ctab, full_mode, from_mode);
436 gcc_assert (icode != CODE_FOR_nothing);
438 if (to_mode == full_mode)
440 emit_unop_insn (icode, to, from, UNKNOWN);
441 return;
444 new_from = gen_reg_rtx (full_mode);
445 emit_unop_insn (icode, new_from, from, UNKNOWN);
447 /* else proceed to integer conversions below. */
448 from_mode = full_mode;
449 from = new_from;
452 /* Make sure both are fixed-point modes or both are not. */
453 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
454 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
455 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
457 /* If we widen from_mode to to_mode and they are in the same class,
458 we won't saturate the result.
459 Otherwise, always saturate the result to play safe. */
460 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
461 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
462 expand_fixed_convert (to, from, 0, 0);
463 else
464 expand_fixed_convert (to, from, 0, 1);
465 return;
468 /* Now both modes are integers. */
470 /* Handle expanding beyond a word. */
471 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
472 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
474 rtx insns;
475 rtx lowpart;
476 rtx fill_value;
477 rtx lowfrom;
478 int i;
479 enum machine_mode lowpart_mode;
480 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
482 /* Try converting directly if the insn is supported. */
483 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
484 != CODE_FOR_nothing)
486 /* If FROM is a SUBREG, put it into a register. Do this
487 so that we always generate the same set of insns for
488 better cse'ing; if an intermediate assignment occurred,
489 we won't be doing the operation directly on the SUBREG. */
490 if (optimize > 0 && GET_CODE (from) == SUBREG)
491 from = force_reg (from_mode, from);
492 emit_unop_insn (code, to, from, equiv_code);
493 return;
495 /* Next, try converting via full word. */
496 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
497 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
498 != CODE_FOR_nothing))
500 rtx word_to = gen_reg_rtx (word_mode);
501 if (REG_P (to))
503 if (reg_overlap_mentioned_p (to, from))
504 from = force_reg (from_mode, from);
505 emit_clobber (to);
507 convert_move (word_to, from, unsignedp);
508 emit_unop_insn (code, to, word_to, equiv_code);
509 return;
512 /* No special multiword conversion insn; do it by hand. */
513 start_sequence ();
515 /* Since we will turn this into a no conflict block, we must ensure the
516 the source does not overlap the target so force it into an isolated
517 register when maybe so. Likewise for any MEM input, since the
518 conversion sequence might require several references to it and we
519 must ensure we're getting the same value every time. */
521 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
522 from = force_reg (from_mode, from);
524 /* Get a copy of FROM widened to a word, if necessary. */
525 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
526 lowpart_mode = word_mode;
527 else
528 lowpart_mode = from_mode;
530 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
532 lowpart = gen_lowpart (lowpart_mode, to);
533 emit_move_insn (lowpart, lowfrom);
535 /* Compute the value to put in each remaining word. */
536 if (unsignedp)
537 fill_value = const0_rtx;
538 else
539 fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
540 LT, lowfrom, const0_rtx,
541 lowpart_mode, 0, -1);
543 /* Fill the remaining words. */
544 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
546 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
547 rtx subword = operand_subword (to, index, 1, to_mode);
549 gcc_assert (subword);
551 if (fill_value != subword)
552 emit_move_insn (subword, fill_value);
555 insns = get_insns ();
556 end_sequence ();
558 emit_insn (insns);
559 return;
562 /* Truncating multi-word to a word or less. */
563 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
564 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
566 if (!((MEM_P (from)
567 && ! MEM_VOLATILE_P (from)
568 && direct_load[(int) to_mode]
569 && ! mode_dependent_address_p (XEXP (from, 0),
570 MEM_ADDR_SPACE (from)))
571 || REG_P (from)
572 || GET_CODE (from) == SUBREG))
573 from = force_reg (from_mode, from);
574 convert_move (to, gen_lowpart (word_mode, from), 0);
575 return;
578 /* Now follow all the conversions between integers
579 no more than a word long. */
581 /* For truncation, usually we can just refer to FROM in a narrower mode. */
582 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
583 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
585 if (!((MEM_P (from)
586 && ! MEM_VOLATILE_P (from)
587 && direct_load[(int) to_mode]
588 && ! mode_dependent_address_p (XEXP (from, 0),
589 MEM_ADDR_SPACE (from)))
590 || REG_P (from)
591 || GET_CODE (from) == SUBREG))
592 from = force_reg (from_mode, from);
593 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
594 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
595 from = copy_to_reg (from);
596 emit_move_insn (to, gen_lowpart (to_mode, from));
597 return;
600 /* Handle extension. */
601 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
603 /* Convert directly if that works. */
604 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
605 != CODE_FOR_nothing)
607 emit_unop_insn (code, to, from, equiv_code);
608 return;
610 else
612 enum machine_mode intermediate;
613 rtx tmp;
614 int shift_amount;
616 /* Search for a mode to convert via. */
617 for (intermediate = from_mode; intermediate != VOIDmode;
618 intermediate = GET_MODE_WIDER_MODE (intermediate))
619 if (((can_extend_p (to_mode, intermediate, unsignedp)
620 != CODE_FOR_nothing)
621 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
622 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
623 && (can_extend_p (intermediate, from_mode, unsignedp)
624 != CODE_FOR_nothing))
626 convert_move (to, convert_to_mode (intermediate, from,
627 unsignedp), unsignedp);
628 return;
631 /* No suitable intermediate mode.
632 Generate what we need with shifts. */
633 shift_amount = (GET_MODE_PRECISION (to_mode)
634 - GET_MODE_PRECISION (from_mode));
635 from = gen_lowpart (to_mode, force_reg (from_mode, from));
636 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
637 to, unsignedp);
638 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
639 to, unsignedp);
640 if (tmp != to)
641 emit_move_insn (to, tmp);
642 return;
646 /* Support special truncate insns for certain modes. */
647 if (convert_optab_handler (trunc_optab, to_mode,
648 from_mode) != CODE_FOR_nothing)
650 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
651 to, from, UNKNOWN);
652 return;
655 /* Handle truncation of volatile memrefs, and so on;
656 the things that couldn't be truncated directly,
657 and for which there was no special instruction.
659 ??? Code above formerly short-circuited this, for most integer
660 mode pairs, with a force_reg in from_mode followed by a recursive
661 call to this routine. Appears always to have been wrong. */
662 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
664 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
665 emit_move_insn (to, temp);
666 return;
669 /* Mode combination is not recognized. */
670 gcc_unreachable ();
673 /* Return an rtx for a value that would result
674 from converting X to mode MODE.
675 Both X and MODE may be floating, or both integer.
676 UNSIGNEDP is nonzero if X is an unsigned value.
677 This can be done by referring to a part of X in place
678 or by copying to a new temporary with conversion. */
681 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
683 return convert_modes (mode, VOIDmode, x, unsignedp);
686 /* Return an rtx for a value that would result
687 from converting X from mode OLDMODE to mode MODE.
688 Both modes may be floating, or both integer.
689 UNSIGNEDP is nonzero if X is an unsigned value.
691 This can be done by referring to a part of X in place
692 or by copying to a new temporary with conversion.
694 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
697 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
699 rtx temp;
701 /* If FROM is a SUBREG that indicates that we have already done at least
702 the required extension, strip it. */
704 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
705 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
706 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
707 x = gen_lowpart (mode, SUBREG_REG (x));
709 if (GET_MODE (x) != VOIDmode)
710 oldmode = GET_MODE (x);
712 if (mode == oldmode)
713 return x;
715 if (CONST_SCALAR_INT_P (x) && GET_MODE_CLASS (mode) == MODE_INT)
717 /* If the caller did not tell us the old mode, then there is not
718 much to do with respect to canonicalization. We have to
719 assume that all the bits are significant. */
720 if (GET_MODE_CLASS (oldmode) != MODE_INT)
721 oldmode = MAX_MODE_INT;
722 wide_int w = wide_int::from (std::make_pair (x, oldmode),
723 GET_MODE_PRECISION (mode),
724 unsignedp ? UNSIGNED : SIGNED);
725 return immed_wide_int_const (w, mode);
728 /* We can do this with a gen_lowpart if both desired and current modes
729 are integer, and this is either a constant integer, a register, or a
730 non-volatile MEM. */
731 if (GET_MODE_CLASS (mode) == MODE_INT
732 && GET_MODE_CLASS (oldmode) == MODE_INT
733 && GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
734 && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) mode])
735 || (REG_P (x)
736 && (!HARD_REGISTER_P (x)
737 || HARD_REGNO_MODE_OK (REGNO (x), mode))
738 && TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (x)))))
740 return gen_lowpart (mode, x);
742 /* Converting from integer constant into mode is always equivalent to an
743 subreg operation. */
744 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
746 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
747 return simplify_gen_subreg (mode, x, oldmode, 0);
750 temp = gen_reg_rtx (mode);
751 convert_move (temp, x, unsignedp);
752 return temp;
755 /* Return the largest alignment we can use for doing a move (or store)
756 of MAX_PIECES. ALIGN is the largest alignment we could use. */
758 static unsigned int
759 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
761 enum machine_mode tmode;
763 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
764 if (align >= GET_MODE_ALIGNMENT (tmode))
765 align = GET_MODE_ALIGNMENT (tmode);
766 else
768 enum machine_mode tmode, xmode;
770 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
771 tmode != VOIDmode;
772 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
773 if (GET_MODE_SIZE (tmode) > max_pieces
774 || SLOW_UNALIGNED_ACCESS (tmode, align))
775 break;
777 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
780 return align;
783 /* Return the widest integer mode no wider than SIZE. If no such mode
784 can be found, return VOIDmode. */
786 static enum machine_mode
787 widest_int_mode_for_size (unsigned int size)
789 enum machine_mode tmode, mode = VOIDmode;
791 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
792 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
793 if (GET_MODE_SIZE (tmode) < size)
794 mode = tmode;
796 return mode;
799 /* STORE_MAX_PIECES is the number of bytes at a time that we can
800 store efficiently. Due to internal GCC limitations, this is
801 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
802 for an immediate constant. */
804 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
806 /* Determine whether the LEN bytes can be moved by using several move
807 instructions. Return nonzero if a call to move_by_pieces should
808 succeed. */
811 can_move_by_pieces (unsigned HOST_WIDE_INT len ATTRIBUTE_UNUSED,
812 unsigned int align ATTRIBUTE_UNUSED)
814 return MOVE_BY_PIECES_P (len, align);
817 /* Generate several move instructions to copy LEN bytes from block FROM to
818 block TO. (These are MEM rtx's with BLKmode).
820 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
821 used to push FROM to the stack.
823 ALIGN is maximum stack alignment we can assume.
825 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
826 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
827 stpcpy. */
830 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
831 unsigned int align, int endp)
833 struct move_by_pieces_d data;
834 enum machine_mode to_addr_mode;
835 enum machine_mode from_addr_mode = get_address_mode (from);
836 rtx to_addr, from_addr = XEXP (from, 0);
837 unsigned int max_size = MOVE_MAX_PIECES + 1;
838 enum insn_code icode;
840 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
842 data.offset = 0;
843 data.from_addr = from_addr;
844 if (to)
846 to_addr_mode = get_address_mode (to);
847 to_addr = XEXP (to, 0);
848 data.to = to;
849 data.autinc_to
850 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
851 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
852 data.reverse
853 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
855 else
857 to_addr_mode = VOIDmode;
858 to_addr = NULL_RTX;
859 data.to = NULL_RTX;
860 data.autinc_to = 1;
861 #ifdef STACK_GROWS_DOWNWARD
862 data.reverse = 1;
863 #else
864 data.reverse = 0;
865 #endif
867 data.to_addr = to_addr;
868 data.from = from;
869 data.autinc_from
870 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
871 || GET_CODE (from_addr) == POST_INC
872 || GET_CODE (from_addr) == POST_DEC);
874 data.explicit_inc_from = 0;
875 data.explicit_inc_to = 0;
876 if (data.reverse) data.offset = len;
877 data.len = len;
879 /* If copying requires more than two move insns,
880 copy addresses to registers (to make displacements shorter)
881 and use post-increment if available. */
882 if (!(data.autinc_from && data.autinc_to)
883 && move_by_pieces_ninsns (len, align, max_size) > 2)
885 /* Find the mode of the largest move...
886 MODE might not be used depending on the definitions of the
887 USE_* macros below. */
888 enum machine_mode mode ATTRIBUTE_UNUSED
889 = widest_int_mode_for_size (max_size);
891 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
893 data.from_addr = copy_to_mode_reg (from_addr_mode,
894 plus_constant (from_addr_mode,
895 from_addr, len));
896 data.autinc_from = 1;
897 data.explicit_inc_from = -1;
899 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
901 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
902 data.autinc_from = 1;
903 data.explicit_inc_from = 1;
905 if (!data.autinc_from && CONSTANT_P (from_addr))
906 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
907 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
909 data.to_addr = copy_to_mode_reg (to_addr_mode,
910 plus_constant (to_addr_mode,
911 to_addr, len));
912 data.autinc_to = 1;
913 data.explicit_inc_to = -1;
915 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
917 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
918 data.autinc_to = 1;
919 data.explicit_inc_to = 1;
921 if (!data.autinc_to && CONSTANT_P (to_addr))
922 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
925 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
927 /* First move what we can in the largest integer mode, then go to
928 successively smaller modes. */
930 while (max_size > 1 && data.len > 0)
932 enum machine_mode mode = widest_int_mode_for_size (max_size);
934 if (mode == VOIDmode)
935 break;
937 icode = optab_handler (mov_optab, mode);
938 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
939 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
941 max_size = GET_MODE_SIZE (mode);
944 /* The code above should have handled everything. */
945 gcc_assert (!data.len);
947 if (endp)
949 rtx to1;
951 gcc_assert (!data.reverse);
952 if (data.autinc_to)
954 if (endp == 2)
956 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
957 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
958 else
959 data.to_addr = copy_to_mode_reg (to_addr_mode,
960 plus_constant (to_addr_mode,
961 data.to_addr,
962 -1));
964 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
965 data.offset);
967 else
969 if (endp == 2)
970 --data.offset;
971 to1 = adjust_address (data.to, QImode, data.offset);
973 return to1;
975 else
976 return data.to;
979 /* Return number of insns required to move L bytes by pieces.
980 ALIGN (in bits) is maximum alignment we can assume. */
982 unsigned HOST_WIDE_INT
983 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
984 unsigned int max_size)
986 unsigned HOST_WIDE_INT n_insns = 0;
988 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
990 while (max_size > 1 && l > 0)
992 enum machine_mode mode;
993 enum insn_code icode;
995 mode = widest_int_mode_for_size (max_size);
997 if (mode == VOIDmode)
998 break;
1000 icode = optab_handler (mov_optab, mode);
1001 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1002 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1004 max_size = GET_MODE_SIZE (mode);
1007 gcc_assert (!l);
1008 return n_insns;
1011 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1012 with move instructions for mode MODE. GENFUN is the gen_... function
1013 to make a move insn for that mode. DATA has all the other info. */
1015 static void
1016 move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
1017 struct move_by_pieces_d *data)
1019 unsigned int size = GET_MODE_SIZE (mode);
1020 rtx to1 = NULL_RTX, from1;
1022 while (data->len >= size)
1024 if (data->reverse)
1025 data->offset -= size;
1027 if (data->to)
1029 if (data->autinc_to)
1030 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1031 data->offset);
1032 else
1033 to1 = adjust_address (data->to, mode, data->offset);
1036 if (data->autinc_from)
1037 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1038 data->offset);
1039 else
1040 from1 = adjust_address (data->from, mode, data->offset);
1042 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1043 emit_insn (gen_add2_insn (data->to_addr,
1044 gen_int_mode (-(HOST_WIDE_INT) size,
1045 GET_MODE (data->to_addr))));
1046 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1047 emit_insn (gen_add2_insn (data->from_addr,
1048 gen_int_mode (-(HOST_WIDE_INT) size,
1049 GET_MODE (data->from_addr))));
1051 if (data->to)
1052 emit_insn ((*genfun) (to1, from1));
1053 else
1055 #ifdef PUSH_ROUNDING
1056 emit_single_push_insn (mode, from1, NULL);
1057 #else
1058 gcc_unreachable ();
1059 #endif
1062 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1063 emit_insn (gen_add2_insn (data->to_addr,
1064 gen_int_mode (size,
1065 GET_MODE (data->to_addr))));
1066 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1067 emit_insn (gen_add2_insn (data->from_addr,
1068 gen_int_mode (size,
1069 GET_MODE (data->from_addr))));
1071 if (! data->reverse)
1072 data->offset += size;
1074 data->len -= size;
1078 /* Emit code to move a block Y to a block X. This may be done with
1079 string-move instructions, with multiple scalar move instructions,
1080 or with a library call.
1082 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1083 SIZE is an rtx that says how long they are.
1084 ALIGN is the maximum alignment we can assume they have.
1085 METHOD describes what kind of copy this is, and what mechanisms may be used.
1086 MIN_SIZE is the minimal size of block to move
1087 MAX_SIZE is the maximal size of block to move, if it can not be represented
1088 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1090 Return the address of the new block, if memcpy is called and returns it,
1091 0 otherwise. */
1094 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1095 unsigned int expected_align, HOST_WIDE_INT expected_size,
1096 unsigned HOST_WIDE_INT min_size,
1097 unsigned HOST_WIDE_INT max_size,
1098 unsigned HOST_WIDE_INT probable_max_size)
1100 bool may_use_call;
1101 rtx retval = 0;
1102 unsigned int align;
1104 gcc_assert (size);
1105 if (CONST_INT_P (size)
1106 && INTVAL (size) == 0)
1107 return 0;
1109 switch (method)
1111 case BLOCK_OP_NORMAL:
1112 case BLOCK_OP_TAILCALL:
1113 may_use_call = true;
1114 break;
1116 case BLOCK_OP_CALL_PARM:
1117 may_use_call = block_move_libcall_safe_for_call_parm ();
1119 /* Make inhibit_defer_pop nonzero around the library call
1120 to force it to pop the arguments right away. */
1121 NO_DEFER_POP;
1122 break;
1124 case BLOCK_OP_NO_LIBCALL:
1125 may_use_call = false;
1126 break;
1128 default:
1129 gcc_unreachable ();
1132 gcc_assert (MEM_P (x) && MEM_P (y));
1133 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1134 gcc_assert (align >= BITS_PER_UNIT);
1136 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1137 block copy is more efficient for other large modes, e.g. DCmode. */
1138 x = adjust_address (x, BLKmode, 0);
1139 y = adjust_address (y, BLKmode, 0);
1141 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1142 can be incorrect is coming from __builtin_memcpy. */
1143 if (CONST_INT_P (size))
1145 x = shallow_copy_rtx (x);
1146 y = shallow_copy_rtx (y);
1147 set_mem_size (x, INTVAL (size));
1148 set_mem_size (y, INTVAL (size));
1151 if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1152 move_by_pieces (x, y, INTVAL (size), align, 0);
1153 else if (emit_block_move_via_movmem (x, y, size, align,
1154 expected_align, expected_size,
1155 min_size, max_size, probable_max_size))
1157 else if (may_use_call
1158 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1159 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1161 /* Since x and y are passed to a libcall, mark the corresponding
1162 tree EXPR as addressable. */
1163 tree y_expr = MEM_EXPR (y);
1164 tree x_expr = MEM_EXPR (x);
1165 if (y_expr)
1166 mark_addressable (y_expr);
1167 if (x_expr)
1168 mark_addressable (x_expr);
1169 retval = emit_block_move_via_libcall (x, y, size,
1170 method == BLOCK_OP_TAILCALL);
1173 else
1174 emit_block_move_via_loop (x, y, size, align);
1176 if (method == BLOCK_OP_CALL_PARM)
1177 OK_DEFER_POP;
1179 return retval;
1183 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1185 unsigned HOST_WIDE_INT max, min = 0;
1186 if (GET_CODE (size) == CONST_INT)
1187 min = max = UINTVAL (size);
1188 else
1189 max = GET_MODE_MASK (GET_MODE (size));
1190 return emit_block_move_hints (x, y, size, method, 0, -1,
1191 min, max, max);
1194 /* A subroutine of emit_block_move. Returns true if calling the
1195 block move libcall will not clobber any parameters which may have
1196 already been placed on the stack. */
1198 static bool
1199 block_move_libcall_safe_for_call_parm (void)
1201 #if defined (REG_PARM_STACK_SPACE)
1202 tree fn;
1203 #endif
1205 /* If arguments are pushed on the stack, then they're safe. */
1206 if (PUSH_ARGS)
1207 return true;
1209 /* If registers go on the stack anyway, any argument is sure to clobber
1210 an outgoing argument. */
1211 #if defined (REG_PARM_STACK_SPACE)
1212 fn = emit_block_move_libcall_fn (false);
1213 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1214 depend on its argument. */
1215 (void) fn;
1216 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1217 && REG_PARM_STACK_SPACE (fn) != 0)
1218 return false;
1219 #endif
1221 /* If any argument goes in memory, then it might clobber an outgoing
1222 argument. */
1224 CUMULATIVE_ARGS args_so_far_v;
1225 cumulative_args_t args_so_far;
1226 tree fn, arg;
1228 fn = emit_block_move_libcall_fn (false);
1229 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1230 args_so_far = pack_cumulative_args (&args_so_far_v);
1232 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1233 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1235 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1236 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1237 NULL_TREE, true);
1238 if (!tmp || !REG_P (tmp))
1239 return false;
1240 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1241 return false;
1242 targetm.calls.function_arg_advance (args_so_far, mode,
1243 NULL_TREE, true);
1246 return true;
1249 /* A subroutine of emit_block_move. Expand a movmem pattern;
1250 return true if successful. */
1252 static bool
1253 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1254 unsigned int expected_align, HOST_WIDE_INT expected_size,
1255 unsigned HOST_WIDE_INT min_size,
1256 unsigned HOST_WIDE_INT max_size,
1257 unsigned HOST_WIDE_INT probable_max_size)
1259 int save_volatile_ok = volatile_ok;
1260 enum machine_mode mode;
1262 if (expected_align < align)
1263 expected_align = align;
1264 if (expected_size != -1)
1266 if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1267 expected_size = probable_max_size;
1268 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1269 expected_size = min_size;
1272 /* Since this is a move insn, we don't care about volatility. */
1273 volatile_ok = 1;
1275 /* Try the most limited insn first, because there's no point
1276 including more than one in the machine description unless
1277 the more limited one has some advantage. */
1279 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1280 mode = GET_MODE_WIDER_MODE (mode))
1282 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1284 if (code != CODE_FOR_nothing
1285 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1286 here because if SIZE is less than the mode mask, as it is
1287 returned by the macro, it will definitely be less than the
1288 actual mode mask. Since SIZE is within the Pmode address
1289 space, we limit MODE to Pmode. */
1290 && ((CONST_INT_P (size)
1291 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1292 <= (GET_MODE_MASK (mode) >> 1)))
1293 || max_size <= (GET_MODE_MASK (mode) >> 1)
1294 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1296 struct expand_operand ops[9];
1297 unsigned int nops;
1299 /* ??? When called via emit_block_move_for_call, it'd be
1300 nice if there were some way to inform the backend, so
1301 that it doesn't fail the expansion because it thinks
1302 emitting the libcall would be more efficient. */
1303 nops = insn_data[(int) code].n_generator_args;
1304 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1306 create_fixed_operand (&ops[0], x);
1307 create_fixed_operand (&ops[1], y);
1308 /* The check above guarantees that this size conversion is valid. */
1309 create_convert_operand_to (&ops[2], size, mode, true);
1310 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1311 if (nops >= 6)
1313 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1314 create_integer_operand (&ops[5], expected_size);
1316 if (nops >= 8)
1318 create_integer_operand (&ops[6], min_size);
1319 /* If we can not represent the maximal size,
1320 make parameter NULL. */
1321 if ((HOST_WIDE_INT) max_size != -1)
1322 create_integer_operand (&ops[7], max_size);
1323 else
1324 create_fixed_operand (&ops[7], NULL);
1326 if (nops == 9)
1328 /* If we can not represent the maximal size,
1329 make parameter NULL. */
1330 if ((HOST_WIDE_INT) probable_max_size != -1)
1331 create_integer_operand (&ops[8], probable_max_size);
1332 else
1333 create_fixed_operand (&ops[8], NULL);
1335 if (maybe_expand_insn (code, nops, ops))
1337 volatile_ok = save_volatile_ok;
1338 return true;
1343 volatile_ok = save_volatile_ok;
1344 return false;
1347 /* A subroutine of emit_block_move. Expand a call to memcpy.
1348 Return the return value from memcpy, 0 otherwise. */
1351 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1353 rtx dst_addr, src_addr;
1354 tree call_expr, fn, src_tree, dst_tree, size_tree;
1355 enum machine_mode size_mode;
1356 rtx retval;
1358 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1359 pseudos. We can then place those new pseudos into a VAR_DECL and
1360 use them later. */
1362 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1363 src_addr = copy_addr_to_reg (XEXP (src, 0));
1365 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1366 src_addr = convert_memory_address (ptr_mode, src_addr);
1368 dst_tree = make_tree (ptr_type_node, dst_addr);
1369 src_tree = make_tree (ptr_type_node, src_addr);
1371 size_mode = TYPE_MODE (sizetype);
1373 size = convert_to_mode (size_mode, size, 1);
1374 size = copy_to_mode_reg (size_mode, size);
1376 /* It is incorrect to use the libcall calling conventions to call
1377 memcpy in this context. This could be a user call to memcpy and
1378 the user may wish to examine the return value from memcpy. For
1379 targets where libcalls and normal calls have different conventions
1380 for returning pointers, we could end up generating incorrect code. */
1382 size_tree = make_tree (sizetype, size);
1384 fn = emit_block_move_libcall_fn (true);
1385 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1386 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1388 retval = expand_normal (call_expr);
1390 return retval;
1393 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1394 for the function we use for block copies. */
1396 static GTY(()) tree block_move_fn;
1398 void
1399 init_block_move_fn (const char *asmspec)
1401 if (!block_move_fn)
1403 tree args, fn, attrs, attr_args;
1405 fn = get_identifier ("memcpy");
1406 args = build_function_type_list (ptr_type_node, ptr_type_node,
1407 const_ptr_type_node, sizetype,
1408 NULL_TREE);
1410 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1411 DECL_EXTERNAL (fn) = 1;
1412 TREE_PUBLIC (fn) = 1;
1413 DECL_ARTIFICIAL (fn) = 1;
1414 TREE_NOTHROW (fn) = 1;
1415 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1416 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1418 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1419 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1421 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1423 block_move_fn = fn;
1426 if (asmspec)
1427 set_user_assembler_name (block_move_fn, asmspec);
1430 static tree
1431 emit_block_move_libcall_fn (int for_call)
1433 static bool emitted_extern;
1435 if (!block_move_fn)
1436 init_block_move_fn (NULL);
1438 if (for_call && !emitted_extern)
1440 emitted_extern = true;
1441 make_decl_rtl (block_move_fn);
1444 return block_move_fn;
1447 /* A subroutine of emit_block_move. Copy the data via an explicit
1448 loop. This is used only when libcalls are forbidden. */
1449 /* ??? It'd be nice to copy in hunks larger than QImode. */
1451 static void
1452 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1453 unsigned int align ATTRIBUTE_UNUSED)
1455 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1456 enum machine_mode x_addr_mode = get_address_mode (x);
1457 enum machine_mode y_addr_mode = get_address_mode (y);
1458 enum machine_mode iter_mode;
1460 iter_mode = GET_MODE (size);
1461 if (iter_mode == VOIDmode)
1462 iter_mode = word_mode;
1464 top_label = gen_label_rtx ();
1465 cmp_label = gen_label_rtx ();
1466 iter = gen_reg_rtx (iter_mode);
1468 emit_move_insn (iter, const0_rtx);
1470 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1471 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1472 do_pending_stack_adjust ();
1474 emit_jump (cmp_label);
1475 emit_label (top_label);
1477 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1478 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1480 if (x_addr_mode != y_addr_mode)
1481 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1482 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1484 x = change_address (x, QImode, x_addr);
1485 y = change_address (y, QImode, y_addr);
1487 emit_move_insn (x, y);
1489 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1490 true, OPTAB_LIB_WIDEN);
1491 if (tmp != iter)
1492 emit_move_insn (iter, tmp);
1494 emit_label (cmp_label);
1496 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1497 true, top_label, REG_BR_PROB_BASE * 90 / 100);
1500 /* Copy all or part of a value X into registers starting at REGNO.
1501 The number of registers to be filled is NREGS. */
1503 void
1504 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1506 int i;
1507 #ifdef HAVE_load_multiple
1508 rtx pat;
1509 rtx last;
1510 #endif
1512 if (nregs == 0)
1513 return;
1515 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1516 x = validize_mem (force_const_mem (mode, x));
1518 /* See if the machine can do this with a load multiple insn. */
1519 #ifdef HAVE_load_multiple
1520 if (HAVE_load_multiple)
1522 last = get_last_insn ();
1523 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1524 GEN_INT (nregs));
1525 if (pat)
1527 emit_insn (pat);
1528 return;
1530 else
1531 delete_insns_since (last);
1533 #endif
1535 for (i = 0; i < nregs; i++)
1536 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1537 operand_subword_force (x, i, mode));
1540 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1541 The number of registers to be filled is NREGS. */
1543 void
1544 move_block_from_reg (int regno, rtx x, int nregs)
1546 int i;
1548 if (nregs == 0)
1549 return;
1551 /* See if the machine can do this with a store multiple insn. */
1552 #ifdef HAVE_store_multiple
1553 if (HAVE_store_multiple)
1555 rtx last = get_last_insn ();
1556 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1557 GEN_INT (nregs));
1558 if (pat)
1560 emit_insn (pat);
1561 return;
1563 else
1564 delete_insns_since (last);
1566 #endif
1568 for (i = 0; i < nregs; i++)
1570 rtx tem = operand_subword (x, i, 1, BLKmode);
1572 gcc_assert (tem);
1574 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1578 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1579 ORIG, where ORIG is a non-consecutive group of registers represented by
1580 a PARALLEL. The clone is identical to the original except in that the
1581 original set of registers is replaced by a new set of pseudo registers.
1582 The new set has the same modes as the original set. */
1585 gen_group_rtx (rtx orig)
1587 int i, length;
1588 rtx *tmps;
1590 gcc_assert (GET_CODE (orig) == PARALLEL);
1592 length = XVECLEN (orig, 0);
1593 tmps = XALLOCAVEC (rtx, length);
1595 /* Skip a NULL entry in first slot. */
1596 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1598 if (i)
1599 tmps[0] = 0;
1601 for (; i < length; i++)
1603 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1604 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1606 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1609 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1612 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1613 except that values are placed in TMPS[i], and must later be moved
1614 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1616 static void
1617 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1619 rtx src;
1620 int start, i;
1621 enum machine_mode m = GET_MODE (orig_src);
1623 gcc_assert (GET_CODE (dst) == PARALLEL);
1625 if (m != VOIDmode
1626 && !SCALAR_INT_MODE_P (m)
1627 && !MEM_P (orig_src)
1628 && GET_CODE (orig_src) != CONCAT)
1630 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1631 if (imode == BLKmode)
1632 src = assign_stack_temp (GET_MODE (orig_src), ssize);
1633 else
1634 src = gen_reg_rtx (imode);
1635 if (imode != BLKmode)
1636 src = gen_lowpart (GET_MODE (orig_src), src);
1637 emit_move_insn (src, orig_src);
1638 /* ...and back again. */
1639 if (imode != BLKmode)
1640 src = gen_lowpart (imode, src);
1641 emit_group_load_1 (tmps, dst, src, type, ssize);
1642 return;
1645 /* Check for a NULL entry, used to indicate that the parameter goes
1646 both on the stack and in registers. */
1647 if (XEXP (XVECEXP (dst, 0, 0), 0))
1648 start = 0;
1649 else
1650 start = 1;
1652 /* Process the pieces. */
1653 for (i = start; i < XVECLEN (dst, 0); i++)
1655 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1656 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1657 unsigned int bytelen = GET_MODE_SIZE (mode);
1658 int shift = 0;
1660 /* Handle trailing fragments that run over the size of the struct. */
1661 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1663 /* Arrange to shift the fragment to where it belongs.
1664 extract_bit_field loads to the lsb of the reg. */
1665 if (
1666 #ifdef BLOCK_REG_PADDING
1667 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1668 == (BYTES_BIG_ENDIAN ? upward : downward)
1669 #else
1670 BYTES_BIG_ENDIAN
1671 #endif
1673 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1674 bytelen = ssize - bytepos;
1675 gcc_assert (bytelen > 0);
1678 /* If we won't be loading directly from memory, protect the real source
1679 from strange tricks we might play; but make sure that the source can
1680 be loaded directly into the destination. */
1681 src = orig_src;
1682 if (!MEM_P (orig_src)
1683 && (!CONSTANT_P (orig_src)
1684 || (GET_MODE (orig_src) != mode
1685 && GET_MODE (orig_src) != VOIDmode)))
1687 if (GET_MODE (orig_src) == VOIDmode)
1688 src = gen_reg_rtx (mode);
1689 else
1690 src = gen_reg_rtx (GET_MODE (orig_src));
1692 emit_move_insn (src, orig_src);
1695 /* Optimize the access just a bit. */
1696 if (MEM_P (src)
1697 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1698 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1699 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1700 && bytelen == GET_MODE_SIZE (mode))
1702 tmps[i] = gen_reg_rtx (mode);
1703 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1705 else if (COMPLEX_MODE_P (mode)
1706 && GET_MODE (src) == mode
1707 && bytelen == GET_MODE_SIZE (mode))
1708 /* Let emit_move_complex do the bulk of the work. */
1709 tmps[i] = src;
1710 else if (GET_CODE (src) == CONCAT)
1712 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1713 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1715 if ((bytepos == 0 && bytelen == slen0)
1716 || (bytepos != 0 && bytepos + bytelen <= slen))
1718 /* The following assumes that the concatenated objects all
1719 have the same size. In this case, a simple calculation
1720 can be used to determine the object and the bit field
1721 to be extracted. */
1722 tmps[i] = XEXP (src, bytepos / slen0);
1723 if (! CONSTANT_P (tmps[i])
1724 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1725 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1726 (bytepos % slen0) * BITS_PER_UNIT,
1727 1, NULL_RTX, mode, mode);
1729 else
1731 rtx mem;
1733 gcc_assert (!bytepos);
1734 mem = assign_stack_temp (GET_MODE (src), slen);
1735 emit_move_insn (mem, src);
1736 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1737 0, 1, NULL_RTX, mode, mode);
1740 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1741 SIMD register, which is currently broken. While we get GCC
1742 to emit proper RTL for these cases, let's dump to memory. */
1743 else if (VECTOR_MODE_P (GET_MODE (dst))
1744 && REG_P (src))
1746 int slen = GET_MODE_SIZE (GET_MODE (src));
1747 rtx mem;
1749 mem = assign_stack_temp (GET_MODE (src), slen);
1750 emit_move_insn (mem, src);
1751 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1753 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1754 && XVECLEN (dst, 0) > 1)
1755 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
1756 else if (CONSTANT_P (src))
1758 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1760 if (len == ssize)
1761 tmps[i] = src;
1762 else
1764 rtx first, second;
1766 /* TODO: const_wide_int can have sizes other than this... */
1767 gcc_assert (2 * len == ssize);
1768 split_double (src, &first, &second);
1769 if (i)
1770 tmps[i] = second;
1771 else
1772 tmps[i] = first;
1775 else if (REG_P (src) && GET_MODE (src) == mode)
1776 tmps[i] = src;
1777 else
1778 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1779 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1780 mode, mode);
1782 if (shift)
1783 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1784 shift, tmps[i], 0);
1788 /* Emit code to move a block SRC of type TYPE to a block DST,
1789 where DST is non-consecutive registers represented by a PARALLEL.
1790 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1791 if not known. */
1793 void
1794 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1796 rtx *tmps;
1797 int i;
1799 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1800 emit_group_load_1 (tmps, dst, src, type, ssize);
1802 /* Copy the extracted pieces into the proper (probable) hard regs. */
1803 for (i = 0; i < XVECLEN (dst, 0); i++)
1805 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1806 if (d == NULL)
1807 continue;
1808 emit_move_insn (d, tmps[i]);
1812 /* Similar, but load SRC into new pseudos in a format that looks like
1813 PARALLEL. This can later be fed to emit_group_move to get things
1814 in the right place. */
1817 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1819 rtvec vec;
1820 int i;
1822 vec = rtvec_alloc (XVECLEN (parallel, 0));
1823 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1825 /* Convert the vector to look just like the original PARALLEL, except
1826 with the computed values. */
1827 for (i = 0; i < XVECLEN (parallel, 0); i++)
1829 rtx e = XVECEXP (parallel, 0, i);
1830 rtx d = XEXP (e, 0);
1832 if (d)
1834 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1835 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1837 RTVEC_ELT (vec, i) = e;
1840 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1843 /* Emit code to move a block SRC to block DST, where SRC and DST are
1844 non-consecutive groups of registers, each represented by a PARALLEL. */
1846 void
1847 emit_group_move (rtx dst, rtx src)
1849 int i;
1851 gcc_assert (GET_CODE (src) == PARALLEL
1852 && GET_CODE (dst) == PARALLEL
1853 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1855 /* Skip first entry if NULL. */
1856 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1857 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1858 XEXP (XVECEXP (src, 0, i), 0));
1861 /* Move a group of registers represented by a PARALLEL into pseudos. */
1864 emit_group_move_into_temps (rtx src)
1866 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1867 int i;
1869 for (i = 0; i < XVECLEN (src, 0); i++)
1871 rtx e = XVECEXP (src, 0, i);
1872 rtx d = XEXP (e, 0);
1874 if (d)
1875 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1876 RTVEC_ELT (vec, i) = e;
1879 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1882 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1883 where SRC is non-consecutive registers represented by a PARALLEL.
1884 SSIZE represents the total size of block ORIG_DST, or -1 if not
1885 known. */
1887 void
1888 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1890 rtx *tmps, dst;
1891 int start, finish, i;
1892 enum machine_mode m = GET_MODE (orig_dst);
1894 gcc_assert (GET_CODE (src) == PARALLEL);
1896 if (!SCALAR_INT_MODE_P (m)
1897 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1899 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1900 if (imode == BLKmode)
1901 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1902 else
1903 dst = gen_reg_rtx (imode);
1904 emit_group_store (dst, src, type, ssize);
1905 if (imode != BLKmode)
1906 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1907 emit_move_insn (orig_dst, dst);
1908 return;
1911 /* Check for a NULL entry, used to indicate that the parameter goes
1912 both on the stack and in registers. */
1913 if (XEXP (XVECEXP (src, 0, 0), 0))
1914 start = 0;
1915 else
1916 start = 1;
1917 finish = XVECLEN (src, 0);
1919 tmps = XALLOCAVEC (rtx, finish);
1921 /* Copy the (probable) hard regs into pseudos. */
1922 for (i = start; i < finish; i++)
1924 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1925 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1927 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1928 emit_move_insn (tmps[i], reg);
1930 else
1931 tmps[i] = reg;
1934 /* If we won't be storing directly into memory, protect the real destination
1935 from strange tricks we might play. */
1936 dst = orig_dst;
1937 if (GET_CODE (dst) == PARALLEL)
1939 rtx temp;
1941 /* We can get a PARALLEL dst if there is a conditional expression in
1942 a return statement. In that case, the dst and src are the same,
1943 so no action is necessary. */
1944 if (rtx_equal_p (dst, src))
1945 return;
1947 /* It is unclear if we can ever reach here, but we may as well handle
1948 it. Allocate a temporary, and split this into a store/load to/from
1949 the temporary. */
1950 temp = assign_stack_temp (GET_MODE (dst), ssize);
1951 emit_group_store (temp, src, type, ssize);
1952 emit_group_load (dst, temp, type, ssize);
1953 return;
1955 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1957 enum machine_mode outer = GET_MODE (dst);
1958 enum machine_mode inner;
1959 HOST_WIDE_INT bytepos;
1960 bool done = false;
1961 rtx temp;
1963 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1964 dst = gen_reg_rtx (outer);
1966 /* Make life a bit easier for combine. */
1967 /* If the first element of the vector is the low part
1968 of the destination mode, use a paradoxical subreg to
1969 initialize the destination. */
1970 if (start < finish)
1972 inner = GET_MODE (tmps[start]);
1973 bytepos = subreg_lowpart_offset (inner, outer);
1974 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1976 temp = simplify_gen_subreg (outer, tmps[start],
1977 inner, 0);
1978 if (temp)
1980 emit_move_insn (dst, temp);
1981 done = true;
1982 start++;
1987 /* If the first element wasn't the low part, try the last. */
1988 if (!done
1989 && start < finish - 1)
1991 inner = GET_MODE (tmps[finish - 1]);
1992 bytepos = subreg_lowpart_offset (inner, outer);
1993 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1995 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1996 inner, 0);
1997 if (temp)
1999 emit_move_insn (dst, temp);
2000 done = true;
2001 finish--;
2006 /* Otherwise, simply initialize the result to zero. */
2007 if (!done)
2008 emit_move_insn (dst, CONST0_RTX (outer));
2011 /* Process the pieces. */
2012 for (i = start; i < finish; i++)
2014 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2015 enum machine_mode mode = GET_MODE (tmps[i]);
2016 unsigned int bytelen = GET_MODE_SIZE (mode);
2017 unsigned int adj_bytelen;
2018 rtx dest = dst;
2020 /* Handle trailing fragments that run over the size of the struct. */
2021 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2022 adj_bytelen = ssize - bytepos;
2023 else
2024 adj_bytelen = bytelen;
2026 if (GET_CODE (dst) == CONCAT)
2028 if (bytepos + adj_bytelen
2029 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2030 dest = XEXP (dst, 0);
2031 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2033 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2034 dest = XEXP (dst, 1);
2036 else
2038 enum machine_mode dest_mode = GET_MODE (dest);
2039 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2041 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2043 if (GET_MODE_ALIGNMENT (dest_mode)
2044 >= GET_MODE_ALIGNMENT (tmp_mode))
2046 dest = assign_stack_temp (dest_mode,
2047 GET_MODE_SIZE (dest_mode));
2048 emit_move_insn (adjust_address (dest,
2049 tmp_mode,
2050 bytepos),
2051 tmps[i]);
2052 dst = dest;
2054 else
2056 dest = assign_stack_temp (tmp_mode,
2057 GET_MODE_SIZE (tmp_mode));
2058 emit_move_insn (dest, tmps[i]);
2059 dst = adjust_address (dest, dest_mode, bytepos);
2061 break;
2065 /* Handle trailing fragments that run over the size of the struct. */
2066 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2068 /* store_bit_field always takes its value from the lsb.
2069 Move the fragment to the lsb if it's not already there. */
2070 if (
2071 #ifdef BLOCK_REG_PADDING
2072 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2073 == (BYTES_BIG_ENDIAN ? upward : downward)
2074 #else
2075 BYTES_BIG_ENDIAN
2076 #endif
2079 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2080 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2081 shift, tmps[i], 0);
2084 /* Make sure not to write past the end of the struct. */
2085 store_bit_field (dest,
2086 adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2087 bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2088 VOIDmode, tmps[i]);
2091 /* Optimize the access just a bit. */
2092 else if (MEM_P (dest)
2093 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2094 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2095 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2096 && bytelen == GET_MODE_SIZE (mode))
2097 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2099 else
2100 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2101 0, 0, mode, tmps[i]);
2104 /* Copy from the pseudo into the (probable) hard reg. */
2105 if (orig_dst != dst)
2106 emit_move_insn (orig_dst, dst);
2109 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2110 of the value stored in X. */
2113 maybe_emit_group_store (rtx x, tree type)
2115 enum machine_mode mode = TYPE_MODE (type);
2116 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2117 if (GET_CODE (x) == PARALLEL)
2119 rtx result = gen_reg_rtx (mode);
2120 emit_group_store (result, x, type, int_size_in_bytes (type));
2121 return result;
2123 return x;
2126 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2128 This is used on targets that return BLKmode values in registers. */
2130 void
2131 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2133 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2134 rtx src = NULL, dst = NULL;
2135 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2136 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2137 enum machine_mode mode = GET_MODE (srcreg);
2138 enum machine_mode tmode = GET_MODE (target);
2139 enum machine_mode copy_mode;
2141 /* BLKmode registers created in the back-end shouldn't have survived. */
2142 gcc_assert (mode != BLKmode);
2144 /* If the structure doesn't take up a whole number of words, see whether
2145 SRCREG is padded on the left or on the right. If it's on the left,
2146 set PADDING_CORRECTION to the number of bits to skip.
2148 In most ABIs, the structure will be returned at the least end of
2149 the register, which translates to right padding on little-endian
2150 targets and left padding on big-endian targets. The opposite
2151 holds if the structure is returned at the most significant
2152 end of the register. */
2153 if (bytes % UNITS_PER_WORD != 0
2154 && (targetm.calls.return_in_msb (type)
2155 ? !BYTES_BIG_ENDIAN
2156 : BYTES_BIG_ENDIAN))
2157 padding_correction
2158 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2160 /* We can use a single move if we have an exact mode for the size. */
2161 else if (MEM_P (target)
2162 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2163 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2164 && bytes == GET_MODE_SIZE (mode))
2166 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2167 return;
2170 /* And if we additionally have the same mode for a register. */
2171 else if (REG_P (target)
2172 && GET_MODE (target) == mode
2173 && bytes == GET_MODE_SIZE (mode))
2175 emit_move_insn (target, srcreg);
2176 return;
2179 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2180 into a new pseudo which is a full word. */
2181 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2183 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2184 mode = word_mode;
2187 /* Copy the structure BITSIZE bits at a time. If the target lives in
2188 memory, take care of not reading/writing past its end by selecting
2189 a copy mode suited to BITSIZE. This should always be possible given
2190 how it is computed.
2192 If the target lives in register, make sure not to select a copy mode
2193 larger than the mode of the register.
2195 We could probably emit more efficient code for machines which do not use
2196 strict alignment, but it doesn't seem worth the effort at the current
2197 time. */
2199 copy_mode = word_mode;
2200 if (MEM_P (target))
2202 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2203 if (mem_mode != BLKmode)
2204 copy_mode = mem_mode;
2206 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2207 copy_mode = tmode;
2209 for (bitpos = 0, xbitpos = padding_correction;
2210 bitpos < bytes * BITS_PER_UNIT;
2211 bitpos += bitsize, xbitpos += bitsize)
2213 /* We need a new source operand each time xbitpos is on a
2214 word boundary and when xbitpos == padding_correction
2215 (the first time through). */
2216 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2217 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2219 /* We need a new destination operand each time bitpos is on
2220 a word boundary. */
2221 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2222 dst = target;
2223 else if (bitpos % BITS_PER_WORD == 0)
2224 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2226 /* Use xbitpos for the source extraction (right justified) and
2227 bitpos for the destination store (left justified). */
2228 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2229 extract_bit_field (src, bitsize,
2230 xbitpos % BITS_PER_WORD, 1,
2231 NULL_RTX, copy_mode, copy_mode));
2235 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2236 register if it contains any data, otherwise return null.
2238 This is used on targets that return BLKmode values in registers. */
2241 copy_blkmode_to_reg (enum machine_mode mode, tree src)
2243 int i, n_regs;
2244 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2245 unsigned int bitsize;
2246 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2247 enum machine_mode dst_mode;
2249 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2251 x = expand_normal (src);
2253 bytes = int_size_in_bytes (TREE_TYPE (src));
2254 if (bytes == 0)
2255 return NULL_RTX;
2257 /* If the structure doesn't take up a whole number of words, see
2258 whether the register value should be padded on the left or on
2259 the right. Set PADDING_CORRECTION to the number of padding
2260 bits needed on the left side.
2262 In most ABIs, the structure will be returned at the least end of
2263 the register, which translates to right padding on little-endian
2264 targets and left padding on big-endian targets. The opposite
2265 holds if the structure is returned at the most significant
2266 end of the register. */
2267 if (bytes % UNITS_PER_WORD != 0
2268 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2269 ? !BYTES_BIG_ENDIAN
2270 : BYTES_BIG_ENDIAN))
2271 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2272 * BITS_PER_UNIT));
2274 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2275 dst_words = XALLOCAVEC (rtx, n_regs);
2276 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2278 /* Copy the structure BITSIZE bits at a time. */
2279 for (bitpos = 0, xbitpos = padding_correction;
2280 bitpos < bytes * BITS_PER_UNIT;
2281 bitpos += bitsize, xbitpos += bitsize)
2283 /* We need a new destination pseudo each time xbitpos is
2284 on a word boundary and when xbitpos == padding_correction
2285 (the first time through). */
2286 if (xbitpos % BITS_PER_WORD == 0
2287 || xbitpos == padding_correction)
2289 /* Generate an appropriate register. */
2290 dst_word = gen_reg_rtx (word_mode);
2291 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2293 /* Clear the destination before we move anything into it. */
2294 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2297 /* We need a new source operand each time bitpos is on a word
2298 boundary. */
2299 if (bitpos % BITS_PER_WORD == 0)
2300 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2302 /* Use bitpos for the source extraction (left justified) and
2303 xbitpos for the destination store (right justified). */
2304 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2305 0, 0, word_mode,
2306 extract_bit_field (src_word, bitsize,
2307 bitpos % BITS_PER_WORD, 1,
2308 NULL_RTX, word_mode, word_mode));
2311 if (mode == BLKmode)
2313 /* Find the smallest integer mode large enough to hold the
2314 entire structure. */
2315 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2316 mode != VOIDmode;
2317 mode = GET_MODE_WIDER_MODE (mode))
2318 /* Have we found a large enough mode? */
2319 if (GET_MODE_SIZE (mode) >= bytes)
2320 break;
2322 /* A suitable mode should have been found. */
2323 gcc_assert (mode != VOIDmode);
2326 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2327 dst_mode = word_mode;
2328 else
2329 dst_mode = mode;
2330 dst = gen_reg_rtx (dst_mode);
2332 for (i = 0; i < n_regs; i++)
2333 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2335 if (mode != dst_mode)
2336 dst = gen_lowpart (mode, dst);
2338 return dst;
2341 /* Add a USE expression for REG to the (possibly empty) list pointed
2342 to by CALL_FUSAGE. REG must denote a hard register. */
2344 void
2345 use_reg_mode (rtx *call_fusage, rtx reg, enum machine_mode mode)
2347 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2349 *call_fusage
2350 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2353 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2354 to by CALL_FUSAGE. REG must denote a hard register. */
2356 void
2357 clobber_reg_mode (rtx *call_fusage, rtx reg, enum machine_mode mode)
2359 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2361 *call_fusage
2362 = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2365 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2366 starting at REGNO. All of these registers must be hard registers. */
2368 void
2369 use_regs (rtx *call_fusage, int regno, int nregs)
2371 int i;
2373 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2375 for (i = 0; i < nregs; i++)
2376 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2379 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2380 PARALLEL REGS. This is for calls that pass values in multiple
2381 non-contiguous locations. The Irix 6 ABI has examples of this. */
2383 void
2384 use_group_regs (rtx *call_fusage, rtx regs)
2386 int i;
2388 for (i = 0; i < XVECLEN (regs, 0); i++)
2390 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2392 /* A NULL entry means the parameter goes both on the stack and in
2393 registers. This can also be a MEM for targets that pass values
2394 partially on the stack and partially in registers. */
2395 if (reg != 0 && REG_P (reg))
2396 use_reg (call_fusage, reg);
2400 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2401 assigment and the code of the expresion on the RHS is CODE. Return
2402 NULL otherwise. */
2404 static gimple
2405 get_def_for_expr (tree name, enum tree_code code)
2407 gimple def_stmt;
2409 if (TREE_CODE (name) != SSA_NAME)
2410 return NULL;
2412 def_stmt = get_gimple_for_ssa_name (name);
2413 if (!def_stmt
2414 || gimple_assign_rhs_code (def_stmt) != code)
2415 return NULL;
2417 return def_stmt;
2420 #ifdef HAVE_conditional_move
2421 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2422 assigment and the class of the expresion on the RHS is CLASS. Return
2423 NULL otherwise. */
2425 static gimple
2426 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2428 gimple def_stmt;
2430 if (TREE_CODE (name) != SSA_NAME)
2431 return NULL;
2433 def_stmt = get_gimple_for_ssa_name (name);
2434 if (!def_stmt
2435 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2436 return NULL;
2438 return def_stmt;
2440 #endif
2443 /* Determine whether the LEN bytes generated by CONSTFUN can be
2444 stored to memory using several move instructions. CONSTFUNDATA is
2445 a pointer which will be passed as argument in every CONSTFUN call.
2446 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2447 a memset operation and false if it's a copy of a constant string.
2448 Return nonzero if a call to store_by_pieces should succeed. */
2451 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2452 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2453 void *constfundata, unsigned int align, bool memsetp)
2455 unsigned HOST_WIDE_INT l;
2456 unsigned int max_size;
2457 HOST_WIDE_INT offset = 0;
2458 enum machine_mode mode;
2459 enum insn_code icode;
2460 int reverse;
2461 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2462 rtx cst ATTRIBUTE_UNUSED;
2464 if (len == 0)
2465 return 1;
2467 if (! (memsetp
2468 ? SET_BY_PIECES_P (len, align)
2469 : STORE_BY_PIECES_P (len, align)))
2470 return 0;
2472 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2474 /* We would first store what we can in the largest integer mode, then go to
2475 successively smaller modes. */
2477 for (reverse = 0;
2478 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2479 reverse++)
2481 l = len;
2482 max_size = STORE_MAX_PIECES + 1;
2483 while (max_size > 1 && l > 0)
2485 mode = widest_int_mode_for_size (max_size);
2487 if (mode == VOIDmode)
2488 break;
2490 icode = optab_handler (mov_optab, mode);
2491 if (icode != CODE_FOR_nothing
2492 && align >= GET_MODE_ALIGNMENT (mode))
2494 unsigned int size = GET_MODE_SIZE (mode);
2496 while (l >= size)
2498 if (reverse)
2499 offset -= size;
2501 cst = (*constfun) (constfundata, offset, mode);
2502 if (!targetm.legitimate_constant_p (mode, cst))
2503 return 0;
2505 if (!reverse)
2506 offset += size;
2508 l -= size;
2512 max_size = GET_MODE_SIZE (mode);
2515 /* The code above should have handled everything. */
2516 gcc_assert (!l);
2519 return 1;
2522 /* Generate several move instructions to store LEN bytes generated by
2523 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2524 pointer which will be passed as argument in every CONSTFUN call.
2525 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2526 a memset operation and false if it's a copy of a constant string.
2527 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2528 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2529 stpcpy. */
2532 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2533 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2534 void *constfundata, unsigned int align, bool memsetp, int endp)
2536 enum machine_mode to_addr_mode = get_address_mode (to);
2537 struct store_by_pieces_d data;
2539 if (len == 0)
2541 gcc_assert (endp != 2);
2542 return to;
2545 gcc_assert (memsetp
2546 ? SET_BY_PIECES_P (len, align)
2547 : STORE_BY_PIECES_P (len, align));
2548 data.constfun = constfun;
2549 data.constfundata = constfundata;
2550 data.len = len;
2551 data.to = to;
2552 store_by_pieces_1 (&data, align);
2553 if (endp)
2555 rtx to1;
2557 gcc_assert (!data.reverse);
2558 if (data.autinc_to)
2560 if (endp == 2)
2562 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2563 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2564 else
2565 data.to_addr = copy_to_mode_reg (to_addr_mode,
2566 plus_constant (to_addr_mode,
2567 data.to_addr,
2568 -1));
2570 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2571 data.offset);
2573 else
2575 if (endp == 2)
2576 --data.offset;
2577 to1 = adjust_address (data.to, QImode, data.offset);
2579 return to1;
2581 else
2582 return data.to;
2585 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2586 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2588 static void
2589 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2591 struct store_by_pieces_d data;
2593 if (len == 0)
2594 return;
2596 data.constfun = clear_by_pieces_1;
2597 data.constfundata = NULL;
2598 data.len = len;
2599 data.to = to;
2600 store_by_pieces_1 (&data, align);
2603 /* Callback routine for clear_by_pieces.
2604 Return const0_rtx unconditionally. */
2606 static rtx
2607 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2608 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2609 enum machine_mode mode ATTRIBUTE_UNUSED)
2611 return const0_rtx;
2614 /* Subroutine of clear_by_pieces and store_by_pieces.
2615 Generate several move instructions to store LEN bytes of block TO. (A MEM
2616 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2618 static void
2619 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2620 unsigned int align ATTRIBUTE_UNUSED)
2622 enum machine_mode to_addr_mode = get_address_mode (data->to);
2623 rtx to_addr = XEXP (data->to, 0);
2624 unsigned int max_size = STORE_MAX_PIECES + 1;
2625 enum insn_code icode;
2627 data->offset = 0;
2628 data->to_addr = to_addr;
2629 data->autinc_to
2630 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2631 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2633 data->explicit_inc_to = 0;
2634 data->reverse
2635 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2636 if (data->reverse)
2637 data->offset = data->len;
2639 /* If storing requires more than two move insns,
2640 copy addresses to registers (to make displacements shorter)
2641 and use post-increment if available. */
2642 if (!data->autinc_to
2643 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2645 /* Determine the main mode we'll be using.
2646 MODE might not be used depending on the definitions of the
2647 USE_* macros below. */
2648 enum machine_mode mode ATTRIBUTE_UNUSED
2649 = widest_int_mode_for_size (max_size);
2651 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2653 data->to_addr = copy_to_mode_reg (to_addr_mode,
2654 plus_constant (to_addr_mode,
2655 to_addr,
2656 data->len));
2657 data->autinc_to = 1;
2658 data->explicit_inc_to = -1;
2661 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2662 && ! data->autinc_to)
2664 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2665 data->autinc_to = 1;
2666 data->explicit_inc_to = 1;
2669 if ( !data->autinc_to && CONSTANT_P (to_addr))
2670 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2673 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2675 /* First store what we can in the largest integer mode, then go to
2676 successively smaller modes. */
2678 while (max_size > 1 && data->len > 0)
2680 enum machine_mode mode = widest_int_mode_for_size (max_size);
2682 if (mode == VOIDmode)
2683 break;
2685 icode = optab_handler (mov_optab, mode);
2686 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2687 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2689 max_size = GET_MODE_SIZE (mode);
2692 /* The code above should have handled everything. */
2693 gcc_assert (!data->len);
2696 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2697 with move instructions for mode MODE. GENFUN is the gen_... function
2698 to make a move insn for that mode. DATA has all the other info. */
2700 static void
2701 store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
2702 struct store_by_pieces_d *data)
2704 unsigned int size = GET_MODE_SIZE (mode);
2705 rtx to1, cst;
2707 while (data->len >= size)
2709 if (data->reverse)
2710 data->offset -= size;
2712 if (data->autinc_to)
2713 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2714 data->offset);
2715 else
2716 to1 = adjust_address (data->to, mode, data->offset);
2718 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2719 emit_insn (gen_add2_insn (data->to_addr,
2720 gen_int_mode (-(HOST_WIDE_INT) size,
2721 GET_MODE (data->to_addr))));
2723 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2724 emit_insn ((*genfun) (to1, cst));
2726 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2727 emit_insn (gen_add2_insn (data->to_addr,
2728 gen_int_mode (size,
2729 GET_MODE (data->to_addr))));
2731 if (! data->reverse)
2732 data->offset += size;
2734 data->len -= size;
2738 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2739 its length in bytes. */
2742 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2743 unsigned int expected_align, HOST_WIDE_INT expected_size,
2744 unsigned HOST_WIDE_INT min_size,
2745 unsigned HOST_WIDE_INT max_size,
2746 unsigned HOST_WIDE_INT probable_max_size)
2748 enum machine_mode mode = GET_MODE (object);
2749 unsigned int align;
2751 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2753 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2754 just move a zero. Otherwise, do this a piece at a time. */
2755 if (mode != BLKmode
2756 && CONST_INT_P (size)
2757 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2759 rtx zero = CONST0_RTX (mode);
2760 if (zero != NULL)
2762 emit_move_insn (object, zero);
2763 return NULL;
2766 if (COMPLEX_MODE_P (mode))
2768 zero = CONST0_RTX (GET_MODE_INNER (mode));
2769 if (zero != NULL)
2771 write_complex_part (object, zero, 0);
2772 write_complex_part (object, zero, 1);
2773 return NULL;
2778 if (size == const0_rtx)
2779 return NULL;
2781 align = MEM_ALIGN (object);
2783 if (CONST_INT_P (size)
2784 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2785 clear_by_pieces (object, INTVAL (size), align);
2786 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2787 expected_align, expected_size,
2788 min_size, max_size, probable_max_size))
2790 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2791 return set_storage_via_libcall (object, size, const0_rtx,
2792 method == BLOCK_OP_TAILCALL);
2793 else
2794 gcc_unreachable ();
2796 return NULL;
2800 clear_storage (rtx object, rtx size, enum block_op_methods method)
2802 unsigned HOST_WIDE_INT max, min = 0;
2803 if (GET_CODE (size) == CONST_INT)
2804 min = max = UINTVAL (size);
2805 else
2806 max = GET_MODE_MASK (GET_MODE (size));
2807 return clear_storage_hints (object, size, method, 0, -1, min, max, max);
2811 /* A subroutine of clear_storage. Expand a call to memset.
2812 Return the return value of memset, 0 otherwise. */
2815 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2817 tree call_expr, fn, object_tree, size_tree, val_tree;
2818 enum machine_mode size_mode;
2819 rtx retval;
2821 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2822 place those into new pseudos into a VAR_DECL and use them later. */
2824 object = copy_addr_to_reg (XEXP (object, 0));
2826 size_mode = TYPE_MODE (sizetype);
2827 size = convert_to_mode (size_mode, size, 1);
2828 size = copy_to_mode_reg (size_mode, size);
2830 /* It is incorrect to use the libcall calling conventions to call
2831 memset in this context. This could be a user call to memset and
2832 the user may wish to examine the return value from memset. For
2833 targets where libcalls and normal calls have different conventions
2834 for returning pointers, we could end up generating incorrect code. */
2836 object_tree = make_tree (ptr_type_node, object);
2837 if (!CONST_INT_P (val))
2838 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2839 size_tree = make_tree (sizetype, size);
2840 val_tree = make_tree (integer_type_node, val);
2842 fn = clear_storage_libcall_fn (true);
2843 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2844 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2846 retval = expand_normal (call_expr);
2848 return retval;
2851 /* A subroutine of set_storage_via_libcall. Create the tree node
2852 for the function we use for block clears. */
2854 tree block_clear_fn;
2856 void
2857 init_block_clear_fn (const char *asmspec)
2859 if (!block_clear_fn)
2861 tree fn, args;
2863 fn = get_identifier ("memset");
2864 args = build_function_type_list (ptr_type_node, ptr_type_node,
2865 integer_type_node, sizetype,
2866 NULL_TREE);
2868 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2869 DECL_EXTERNAL (fn) = 1;
2870 TREE_PUBLIC (fn) = 1;
2871 DECL_ARTIFICIAL (fn) = 1;
2872 TREE_NOTHROW (fn) = 1;
2873 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2874 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2876 block_clear_fn = fn;
2879 if (asmspec)
2880 set_user_assembler_name (block_clear_fn, asmspec);
2883 static tree
2884 clear_storage_libcall_fn (int for_call)
2886 static bool emitted_extern;
2888 if (!block_clear_fn)
2889 init_block_clear_fn (NULL);
2891 if (for_call && !emitted_extern)
2893 emitted_extern = true;
2894 make_decl_rtl (block_clear_fn);
2897 return block_clear_fn;
2900 /* Expand a setmem pattern; return true if successful. */
2902 bool
2903 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2904 unsigned int expected_align, HOST_WIDE_INT expected_size,
2905 unsigned HOST_WIDE_INT min_size,
2906 unsigned HOST_WIDE_INT max_size,
2907 unsigned HOST_WIDE_INT probable_max_size)
2909 /* Try the most limited insn first, because there's no point
2910 including more than one in the machine description unless
2911 the more limited one has some advantage. */
2913 enum machine_mode mode;
2915 if (expected_align < align)
2916 expected_align = align;
2917 if (expected_size != -1)
2919 if ((unsigned HOST_WIDE_INT)expected_size > max_size)
2920 expected_size = max_size;
2921 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
2922 expected_size = min_size;
2925 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2926 mode = GET_MODE_WIDER_MODE (mode))
2928 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2930 if (code != CODE_FOR_nothing
2931 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2932 here because if SIZE is less than the mode mask, as it is
2933 returned by the macro, it will definitely be less than the
2934 actual mode mask. Since SIZE is within the Pmode address
2935 space, we limit MODE to Pmode. */
2936 && ((CONST_INT_P (size)
2937 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2938 <= (GET_MODE_MASK (mode) >> 1)))
2939 || max_size <= (GET_MODE_MASK (mode) >> 1)
2940 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
2942 struct expand_operand ops[9];
2943 unsigned int nops;
2945 nops = insn_data[(int) code].n_generator_args;
2946 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
2948 create_fixed_operand (&ops[0], object);
2949 /* The check above guarantees that this size conversion is valid. */
2950 create_convert_operand_to (&ops[1], size, mode, true);
2951 create_convert_operand_from (&ops[2], val, byte_mode, true);
2952 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2953 if (nops >= 6)
2955 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2956 create_integer_operand (&ops[5], expected_size);
2958 if (nops >= 8)
2960 create_integer_operand (&ops[6], min_size);
2961 /* If we can not represent the maximal size,
2962 make parameter NULL. */
2963 if ((HOST_WIDE_INT) max_size != -1)
2964 create_integer_operand (&ops[7], max_size);
2965 else
2966 create_fixed_operand (&ops[7], NULL);
2968 if (nops == 9)
2970 /* If we can not represent the maximal size,
2971 make parameter NULL. */
2972 if ((HOST_WIDE_INT) probable_max_size != -1)
2973 create_integer_operand (&ops[8], probable_max_size);
2974 else
2975 create_fixed_operand (&ops[8], NULL);
2977 if (maybe_expand_insn (code, nops, ops))
2978 return true;
2982 return false;
2986 /* Write to one of the components of the complex value CPLX. Write VAL to
2987 the real part if IMAG_P is false, and the imaginary part if its true. */
2989 static void
2990 write_complex_part (rtx cplx, rtx val, bool imag_p)
2992 enum machine_mode cmode;
2993 enum machine_mode imode;
2994 unsigned ibitsize;
2996 if (GET_CODE (cplx) == CONCAT)
2998 emit_move_insn (XEXP (cplx, imag_p), val);
2999 return;
3002 cmode = GET_MODE (cplx);
3003 imode = GET_MODE_INNER (cmode);
3004 ibitsize = GET_MODE_BITSIZE (imode);
3006 /* For MEMs simplify_gen_subreg may generate an invalid new address
3007 because, e.g., the original address is considered mode-dependent
3008 by the target, which restricts simplify_subreg from invoking
3009 adjust_address_nv. Instead of preparing fallback support for an
3010 invalid address, we call adjust_address_nv directly. */
3011 if (MEM_P (cplx))
3013 emit_move_insn (adjust_address_nv (cplx, imode,
3014 imag_p ? GET_MODE_SIZE (imode) : 0),
3015 val);
3016 return;
3019 /* If the sub-object is at least word sized, then we know that subregging
3020 will work. This special case is important, since store_bit_field
3021 wants to operate on integer modes, and there's rarely an OImode to
3022 correspond to TCmode. */
3023 if (ibitsize >= BITS_PER_WORD
3024 /* For hard regs we have exact predicates. Assume we can split
3025 the original object if it spans an even number of hard regs.
3026 This special case is important for SCmode on 64-bit platforms
3027 where the natural size of floating-point regs is 32-bit. */
3028 || (REG_P (cplx)
3029 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3030 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3032 rtx part = simplify_gen_subreg (imode, cplx, cmode,
3033 imag_p ? GET_MODE_SIZE (imode) : 0);
3034 if (part)
3036 emit_move_insn (part, val);
3037 return;
3039 else
3040 /* simplify_gen_subreg may fail for sub-word MEMs. */
3041 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3044 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
3047 /* Extract one of the components of the complex value CPLX. Extract the
3048 real part if IMAG_P is false, and the imaginary part if it's true. */
3050 static rtx
3051 read_complex_part (rtx cplx, bool imag_p)
3053 enum machine_mode cmode, imode;
3054 unsigned ibitsize;
3056 if (GET_CODE (cplx) == CONCAT)
3057 return XEXP (cplx, imag_p);
3059 cmode = GET_MODE (cplx);
3060 imode = GET_MODE_INNER (cmode);
3061 ibitsize = GET_MODE_BITSIZE (imode);
3063 /* Special case reads from complex constants that got spilled to memory. */
3064 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3066 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3067 if (decl && TREE_CODE (decl) == COMPLEX_CST)
3069 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3070 if (CONSTANT_CLASS_P (part))
3071 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3075 /* For MEMs simplify_gen_subreg may generate an invalid new address
3076 because, e.g., the original address is considered mode-dependent
3077 by the target, which restricts simplify_subreg from invoking
3078 adjust_address_nv. Instead of preparing fallback support for an
3079 invalid address, we call adjust_address_nv directly. */
3080 if (MEM_P (cplx))
3081 return adjust_address_nv (cplx, imode,
3082 imag_p ? GET_MODE_SIZE (imode) : 0);
3084 /* If the sub-object is at least word sized, then we know that subregging
3085 will work. This special case is important, since extract_bit_field
3086 wants to operate on integer modes, and there's rarely an OImode to
3087 correspond to TCmode. */
3088 if (ibitsize >= BITS_PER_WORD
3089 /* For hard regs we have exact predicates. Assume we can split
3090 the original object if it spans an even number of hard regs.
3091 This special case is important for SCmode on 64-bit platforms
3092 where the natural size of floating-point regs is 32-bit. */
3093 || (REG_P (cplx)
3094 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3095 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3097 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3098 imag_p ? GET_MODE_SIZE (imode) : 0);
3099 if (ret)
3100 return ret;
3101 else
3102 /* simplify_gen_subreg may fail for sub-word MEMs. */
3103 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3106 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3107 true, NULL_RTX, imode, imode);
3110 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3111 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3112 represented in NEW_MODE. If FORCE is true, this will never happen, as
3113 we'll force-create a SUBREG if needed. */
3115 static rtx
3116 emit_move_change_mode (enum machine_mode new_mode,
3117 enum machine_mode old_mode, rtx x, bool force)
3119 rtx ret;
3121 if (push_operand (x, GET_MODE (x)))
3123 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3124 MEM_COPY_ATTRIBUTES (ret, x);
3126 else if (MEM_P (x))
3128 /* We don't have to worry about changing the address since the
3129 size in bytes is supposed to be the same. */
3130 if (reload_in_progress)
3132 /* Copy the MEM to change the mode and move any
3133 substitutions from the old MEM to the new one. */
3134 ret = adjust_address_nv (x, new_mode, 0);
3135 copy_replacements (x, ret);
3137 else
3138 ret = adjust_address (x, new_mode, 0);
3140 else
3142 /* Note that we do want simplify_subreg's behavior of validating
3143 that the new mode is ok for a hard register. If we were to use
3144 simplify_gen_subreg, we would create the subreg, but would
3145 probably run into the target not being able to implement it. */
3146 /* Except, of course, when FORCE is true, when this is exactly what
3147 we want. Which is needed for CCmodes on some targets. */
3148 if (force)
3149 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3150 else
3151 ret = simplify_subreg (new_mode, x, old_mode, 0);
3154 return ret;
3157 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3158 an integer mode of the same size as MODE. Returns the instruction
3159 emitted, or NULL if such a move could not be generated. */
3161 static rtx
3162 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
3164 enum machine_mode imode;
3165 enum insn_code code;
3167 /* There must exist a mode of the exact size we require. */
3168 imode = int_mode_for_mode (mode);
3169 if (imode == BLKmode)
3170 return NULL_RTX;
3172 /* The target must support moves in this mode. */
3173 code = optab_handler (mov_optab, imode);
3174 if (code == CODE_FOR_nothing)
3175 return NULL_RTX;
3177 x = emit_move_change_mode (imode, mode, x, force);
3178 if (x == NULL_RTX)
3179 return NULL_RTX;
3180 y = emit_move_change_mode (imode, mode, y, force);
3181 if (y == NULL_RTX)
3182 return NULL_RTX;
3183 return emit_insn (GEN_FCN (code) (x, y));
3186 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3187 Return an equivalent MEM that does not use an auto-increment. */
3190 emit_move_resolve_push (enum machine_mode mode, rtx x)
3192 enum rtx_code code = GET_CODE (XEXP (x, 0));
3193 HOST_WIDE_INT adjust;
3194 rtx temp;
3196 adjust = GET_MODE_SIZE (mode);
3197 #ifdef PUSH_ROUNDING
3198 adjust = PUSH_ROUNDING (adjust);
3199 #endif
3200 if (code == PRE_DEC || code == POST_DEC)
3201 adjust = -adjust;
3202 else if (code == PRE_MODIFY || code == POST_MODIFY)
3204 rtx expr = XEXP (XEXP (x, 0), 1);
3205 HOST_WIDE_INT val;
3207 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3208 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3209 val = INTVAL (XEXP (expr, 1));
3210 if (GET_CODE (expr) == MINUS)
3211 val = -val;
3212 gcc_assert (adjust == val || adjust == -val);
3213 adjust = val;
3216 /* Do not use anti_adjust_stack, since we don't want to update
3217 stack_pointer_delta. */
3218 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3219 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3220 0, OPTAB_LIB_WIDEN);
3221 if (temp != stack_pointer_rtx)
3222 emit_move_insn (stack_pointer_rtx, temp);
3224 switch (code)
3226 case PRE_INC:
3227 case PRE_DEC:
3228 case PRE_MODIFY:
3229 temp = stack_pointer_rtx;
3230 break;
3231 case POST_INC:
3232 case POST_DEC:
3233 case POST_MODIFY:
3234 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3235 break;
3236 default:
3237 gcc_unreachable ();
3240 return replace_equiv_address (x, temp);
3243 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3244 X is known to satisfy push_operand, and MODE is known to be complex.
3245 Returns the last instruction emitted. */
3248 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3250 enum machine_mode submode = GET_MODE_INNER (mode);
3251 bool imag_first;
3253 #ifdef PUSH_ROUNDING
3254 unsigned int submodesize = GET_MODE_SIZE (submode);
3256 /* In case we output to the stack, but the size is smaller than the
3257 machine can push exactly, we need to use move instructions. */
3258 if (PUSH_ROUNDING (submodesize) != submodesize)
3260 x = emit_move_resolve_push (mode, x);
3261 return emit_move_insn (x, y);
3263 #endif
3265 /* Note that the real part always precedes the imag part in memory
3266 regardless of machine's endianness. */
3267 switch (GET_CODE (XEXP (x, 0)))
3269 case PRE_DEC:
3270 case POST_DEC:
3271 imag_first = true;
3272 break;
3273 case PRE_INC:
3274 case POST_INC:
3275 imag_first = false;
3276 break;
3277 default:
3278 gcc_unreachable ();
3281 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3282 read_complex_part (y, imag_first));
3283 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3284 read_complex_part (y, !imag_first));
3287 /* A subroutine of emit_move_complex. Perform the move from Y to X
3288 via two moves of the parts. Returns the last instruction emitted. */
3291 emit_move_complex_parts (rtx x, rtx y)
3293 /* Show the output dies here. This is necessary for SUBREGs
3294 of pseudos since we cannot track their lifetimes correctly;
3295 hard regs shouldn't appear here except as return values. */
3296 if (!reload_completed && !reload_in_progress
3297 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3298 emit_clobber (x);
3300 write_complex_part (x, read_complex_part (y, false), false);
3301 write_complex_part (x, read_complex_part (y, true), true);
3303 return get_last_insn ();
3306 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3307 MODE is known to be complex. Returns the last instruction emitted. */
3309 static rtx
3310 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3312 bool try_int;
3314 /* Need to take special care for pushes, to maintain proper ordering
3315 of the data, and possibly extra padding. */
3316 if (push_operand (x, mode))
3317 return emit_move_complex_push (mode, x, y);
3319 /* See if we can coerce the target into moving both values at once, except
3320 for floating point where we favor moving as parts if this is easy. */
3321 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3322 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3323 && !(REG_P (x)
3324 && HARD_REGISTER_P (x)
3325 && hard_regno_nregs[REGNO (x)][mode] == 1)
3326 && !(REG_P (y)
3327 && HARD_REGISTER_P (y)
3328 && hard_regno_nregs[REGNO (y)][mode] == 1))
3329 try_int = false;
3330 /* Not possible if the values are inherently not adjacent. */
3331 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3332 try_int = false;
3333 /* Is possible if both are registers (or subregs of registers). */
3334 else if (register_operand (x, mode) && register_operand (y, mode))
3335 try_int = true;
3336 /* If one of the operands is a memory, and alignment constraints
3337 are friendly enough, we may be able to do combined memory operations.
3338 We do not attempt this if Y is a constant because that combination is
3339 usually better with the by-parts thing below. */
3340 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3341 && (!STRICT_ALIGNMENT
3342 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3343 try_int = true;
3344 else
3345 try_int = false;
3347 if (try_int)
3349 rtx ret;
3351 /* For memory to memory moves, optimal behavior can be had with the
3352 existing block move logic. */
3353 if (MEM_P (x) && MEM_P (y))
3355 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3356 BLOCK_OP_NO_LIBCALL);
3357 return get_last_insn ();
3360 ret = emit_move_via_integer (mode, x, y, true);
3361 if (ret)
3362 return ret;
3365 return emit_move_complex_parts (x, y);
3368 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3369 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3371 static rtx
3372 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3374 rtx ret;
3376 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3377 if (mode != CCmode)
3379 enum insn_code code = optab_handler (mov_optab, CCmode);
3380 if (code != CODE_FOR_nothing)
3382 x = emit_move_change_mode (CCmode, mode, x, true);
3383 y = emit_move_change_mode (CCmode, mode, y, true);
3384 return emit_insn (GEN_FCN (code) (x, y));
3388 /* Otherwise, find the MODE_INT mode of the same width. */
3389 ret = emit_move_via_integer (mode, x, y, false);
3390 gcc_assert (ret != NULL);
3391 return ret;
3394 /* Return true if word I of OP lies entirely in the
3395 undefined bits of a paradoxical subreg. */
3397 static bool
3398 undefined_operand_subword_p (const_rtx op, int i)
3400 enum machine_mode innermode, innermostmode;
3401 int offset;
3402 if (GET_CODE (op) != SUBREG)
3403 return false;
3404 innermode = GET_MODE (op);
3405 innermostmode = GET_MODE (SUBREG_REG (op));
3406 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3407 /* The SUBREG_BYTE represents offset, as if the value were stored in
3408 memory, except for a paradoxical subreg where we define
3409 SUBREG_BYTE to be 0; undo this exception as in
3410 simplify_subreg. */
3411 if (SUBREG_BYTE (op) == 0
3412 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3414 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3415 if (WORDS_BIG_ENDIAN)
3416 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3417 if (BYTES_BIG_ENDIAN)
3418 offset += difference % UNITS_PER_WORD;
3420 if (offset >= GET_MODE_SIZE (innermostmode)
3421 || offset <= -GET_MODE_SIZE (word_mode))
3422 return true;
3423 return false;
3426 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3427 MODE is any multi-word or full-word mode that lacks a move_insn
3428 pattern. Note that you will get better code if you define such
3429 patterns, even if they must turn into multiple assembler instructions. */
3431 static rtx
3432 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3434 rtx last_insn = 0;
3435 rtx seq, inner;
3436 bool need_clobber;
3437 int i;
3439 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3441 /* If X is a push on the stack, do the push now and replace
3442 X with a reference to the stack pointer. */
3443 if (push_operand (x, mode))
3444 x = emit_move_resolve_push (mode, x);
3446 /* If we are in reload, see if either operand is a MEM whose address
3447 is scheduled for replacement. */
3448 if (reload_in_progress && MEM_P (x)
3449 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3450 x = replace_equiv_address_nv (x, inner);
3451 if (reload_in_progress && MEM_P (y)
3452 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3453 y = replace_equiv_address_nv (y, inner);
3455 start_sequence ();
3457 need_clobber = false;
3458 for (i = 0;
3459 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3460 i++)
3462 rtx xpart = operand_subword (x, i, 1, mode);
3463 rtx ypart;
3465 /* Do not generate code for a move if it would come entirely
3466 from the undefined bits of a paradoxical subreg. */
3467 if (undefined_operand_subword_p (y, i))
3468 continue;
3470 ypart = operand_subword (y, i, 1, mode);
3472 /* If we can't get a part of Y, put Y into memory if it is a
3473 constant. Otherwise, force it into a register. Then we must
3474 be able to get a part of Y. */
3475 if (ypart == 0 && CONSTANT_P (y))
3477 y = use_anchored_address (force_const_mem (mode, y));
3478 ypart = operand_subword (y, i, 1, mode);
3480 else if (ypart == 0)
3481 ypart = operand_subword_force (y, i, mode);
3483 gcc_assert (xpart && ypart);
3485 need_clobber |= (GET_CODE (xpart) == SUBREG);
3487 last_insn = emit_move_insn (xpart, ypart);
3490 seq = get_insns ();
3491 end_sequence ();
3493 /* Show the output dies here. This is necessary for SUBREGs
3494 of pseudos since we cannot track their lifetimes correctly;
3495 hard regs shouldn't appear here except as return values.
3496 We never want to emit such a clobber after reload. */
3497 if (x != y
3498 && ! (reload_in_progress || reload_completed)
3499 && need_clobber != 0)
3500 emit_clobber (x);
3502 emit_insn (seq);
3504 return last_insn;
3507 /* Low level part of emit_move_insn.
3508 Called just like emit_move_insn, but assumes X and Y
3509 are basically valid. */
3512 emit_move_insn_1 (rtx x, rtx y)
3514 enum machine_mode mode = GET_MODE (x);
3515 enum insn_code code;
3517 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3519 code = optab_handler (mov_optab, mode);
3520 if (code != CODE_FOR_nothing)
3521 return emit_insn (GEN_FCN (code) (x, y));
3523 /* Expand complex moves by moving real part and imag part. */
3524 if (COMPLEX_MODE_P (mode))
3525 return emit_move_complex (mode, x, y);
3527 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3528 || ALL_FIXED_POINT_MODE_P (mode))
3530 rtx result = emit_move_via_integer (mode, x, y, true);
3532 /* If we can't find an integer mode, use multi words. */
3533 if (result)
3534 return result;
3535 else
3536 return emit_move_multi_word (mode, x, y);
3539 if (GET_MODE_CLASS (mode) == MODE_CC)
3540 return emit_move_ccmode (mode, x, y);
3542 /* Try using a move pattern for the corresponding integer mode. This is
3543 only safe when simplify_subreg can convert MODE constants into integer
3544 constants. At present, it can only do this reliably if the value
3545 fits within a HOST_WIDE_INT. */
3546 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3548 rtx ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3550 if (ret)
3552 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3553 return ret;
3557 return emit_move_multi_word (mode, x, y);
3560 /* Generate code to copy Y into X.
3561 Both Y and X must have the same mode, except that
3562 Y can be a constant with VOIDmode.
3563 This mode cannot be BLKmode; use emit_block_move for that.
3565 Return the last instruction emitted. */
3568 emit_move_insn (rtx x, rtx y)
3570 enum machine_mode mode = GET_MODE (x);
3571 rtx y_cst = NULL_RTX;
3572 rtx last_insn, set;
3574 gcc_assert (mode != BLKmode
3575 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3577 if (CONSTANT_P (y))
3579 if (optimize
3580 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3581 && (last_insn = compress_float_constant (x, y)))
3582 return last_insn;
3584 y_cst = y;
3586 if (!targetm.legitimate_constant_p (mode, y))
3588 y = force_const_mem (mode, y);
3590 /* If the target's cannot_force_const_mem prevented the spill,
3591 assume that the target's move expanders will also take care
3592 of the non-legitimate constant. */
3593 if (!y)
3594 y = y_cst;
3595 else
3596 y = use_anchored_address (y);
3600 /* If X or Y are memory references, verify that their addresses are valid
3601 for the machine. */
3602 if (MEM_P (x)
3603 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3604 MEM_ADDR_SPACE (x))
3605 && ! push_operand (x, GET_MODE (x))))
3606 x = validize_mem (x);
3608 if (MEM_P (y)
3609 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3610 MEM_ADDR_SPACE (y)))
3611 y = validize_mem (y);
3613 gcc_assert (mode != BLKmode);
3615 last_insn = emit_move_insn_1 (x, y);
3617 if (y_cst && REG_P (x)
3618 && (set = single_set (last_insn)) != NULL_RTX
3619 && SET_DEST (set) == x
3620 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3621 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3623 return last_insn;
3626 /* If Y is representable exactly in a narrower mode, and the target can
3627 perform the extension directly from constant or memory, then emit the
3628 move as an extension. */
3630 static rtx
3631 compress_float_constant (rtx x, rtx y)
3633 enum machine_mode dstmode = GET_MODE (x);
3634 enum machine_mode orig_srcmode = GET_MODE (y);
3635 enum machine_mode srcmode;
3636 REAL_VALUE_TYPE r;
3637 int oldcost, newcost;
3638 bool speed = optimize_insn_for_speed_p ();
3640 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3642 if (targetm.legitimate_constant_p (dstmode, y))
3643 oldcost = set_src_cost (y, speed);
3644 else
3645 oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3647 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3648 srcmode != orig_srcmode;
3649 srcmode = GET_MODE_WIDER_MODE (srcmode))
3651 enum insn_code ic;
3652 rtx trunc_y, last_insn;
3654 /* Skip if the target can't extend this way. */
3655 ic = can_extend_p (dstmode, srcmode, 0);
3656 if (ic == CODE_FOR_nothing)
3657 continue;
3659 /* Skip if the narrowed value isn't exact. */
3660 if (! exact_real_truncate (srcmode, &r))
3661 continue;
3663 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3665 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3667 /* Skip if the target needs extra instructions to perform
3668 the extension. */
3669 if (!insn_operand_matches (ic, 1, trunc_y))
3670 continue;
3671 /* This is valid, but may not be cheaper than the original. */
3672 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3673 speed);
3674 if (oldcost < newcost)
3675 continue;
3677 else if (float_extend_from_mem[dstmode][srcmode])
3679 trunc_y = force_const_mem (srcmode, trunc_y);
3680 /* This is valid, but may not be cheaper than the original. */
3681 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3682 speed);
3683 if (oldcost < newcost)
3684 continue;
3685 trunc_y = validize_mem (trunc_y);
3687 else
3688 continue;
3690 /* For CSE's benefit, force the compressed constant pool entry
3691 into a new pseudo. This constant may be used in different modes,
3692 and if not, combine will put things back together for us. */
3693 trunc_y = force_reg (srcmode, trunc_y);
3695 /* If x is a hard register, perform the extension into a pseudo,
3696 so that e.g. stack realignment code is aware of it. */
3697 rtx target = x;
3698 if (REG_P (x) && HARD_REGISTER_P (x))
3699 target = gen_reg_rtx (dstmode);
3701 emit_unop_insn (ic, target, trunc_y, UNKNOWN);
3702 last_insn = get_last_insn ();
3704 if (REG_P (target))
3705 set_unique_reg_note (last_insn, REG_EQUAL, y);
3707 if (target != x)
3708 return emit_move_insn (x, target);
3709 return last_insn;
3712 return NULL_RTX;
3715 /* Pushing data onto the stack. */
3717 /* Push a block of length SIZE (perhaps variable)
3718 and return an rtx to address the beginning of the block.
3719 The value may be virtual_outgoing_args_rtx.
3721 EXTRA is the number of bytes of padding to push in addition to SIZE.
3722 BELOW nonzero means this padding comes at low addresses;
3723 otherwise, the padding comes at high addresses. */
3726 push_block (rtx size, int extra, int below)
3728 rtx temp;
3730 size = convert_modes (Pmode, ptr_mode, size, 1);
3731 if (CONSTANT_P (size))
3732 anti_adjust_stack (plus_constant (Pmode, size, extra));
3733 else if (REG_P (size) && extra == 0)
3734 anti_adjust_stack (size);
3735 else
3737 temp = copy_to_mode_reg (Pmode, size);
3738 if (extra != 0)
3739 temp = expand_binop (Pmode, add_optab, temp,
3740 gen_int_mode (extra, Pmode),
3741 temp, 0, OPTAB_LIB_WIDEN);
3742 anti_adjust_stack (temp);
3745 #ifndef STACK_GROWS_DOWNWARD
3746 if (0)
3747 #else
3748 if (1)
3749 #endif
3751 temp = virtual_outgoing_args_rtx;
3752 if (extra != 0 && below)
3753 temp = plus_constant (Pmode, temp, extra);
3755 else
3757 if (CONST_INT_P (size))
3758 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3759 -INTVAL (size) - (below ? 0 : extra));
3760 else if (extra != 0 && !below)
3761 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3762 negate_rtx (Pmode, plus_constant (Pmode, size,
3763 extra)));
3764 else
3765 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3766 negate_rtx (Pmode, size));
3769 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3772 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3774 static rtx
3775 mem_autoinc_base (rtx mem)
3777 if (MEM_P (mem))
3779 rtx addr = XEXP (mem, 0);
3780 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3781 return XEXP (addr, 0);
3783 return NULL;
3786 /* A utility routine used here, in reload, and in try_split. The insns
3787 after PREV up to and including LAST are known to adjust the stack,
3788 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3789 placing notes as appropriate. PREV may be NULL, indicating the
3790 entire insn sequence prior to LAST should be scanned.
3792 The set of allowed stack pointer modifications is small:
3793 (1) One or more auto-inc style memory references (aka pushes),
3794 (2) One or more addition/subtraction with the SP as destination,
3795 (3) A single move insn with the SP as destination,
3796 (4) A call_pop insn,
3797 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3799 Insns in the sequence that do not modify the SP are ignored,
3800 except for noreturn calls.
3802 The return value is the amount of adjustment that can be trivially
3803 verified, via immediate operand or auto-inc. If the adjustment
3804 cannot be trivially extracted, the return value is INT_MIN. */
3806 HOST_WIDE_INT
3807 find_args_size_adjust (rtx insn)
3809 rtx dest, set, pat;
3810 int i;
3812 pat = PATTERN (insn);
3813 set = NULL;
3815 /* Look for a call_pop pattern. */
3816 if (CALL_P (insn))
3818 /* We have to allow non-call_pop patterns for the case
3819 of emit_single_push_insn of a TLS address. */
3820 if (GET_CODE (pat) != PARALLEL)
3821 return 0;
3823 /* All call_pop have a stack pointer adjust in the parallel.
3824 The call itself is always first, and the stack adjust is
3825 usually last, so search from the end. */
3826 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3828 set = XVECEXP (pat, 0, i);
3829 if (GET_CODE (set) != SET)
3830 continue;
3831 dest = SET_DEST (set);
3832 if (dest == stack_pointer_rtx)
3833 break;
3835 /* We'd better have found the stack pointer adjust. */
3836 if (i == 0)
3837 return 0;
3838 /* Fall through to process the extracted SET and DEST
3839 as if it was a standalone insn. */
3841 else if (GET_CODE (pat) == SET)
3842 set = pat;
3843 else if ((set = single_set (insn)) != NULL)
3845 else if (GET_CODE (pat) == PARALLEL)
3847 /* ??? Some older ports use a parallel with a stack adjust
3848 and a store for a PUSH_ROUNDING pattern, rather than a
3849 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3850 /* ??? See h8300 and m68k, pushqi1. */
3851 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3853 set = XVECEXP (pat, 0, i);
3854 if (GET_CODE (set) != SET)
3855 continue;
3856 dest = SET_DEST (set);
3857 if (dest == stack_pointer_rtx)
3858 break;
3860 /* We do not expect an auto-inc of the sp in the parallel. */
3861 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3862 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3863 != stack_pointer_rtx);
3865 if (i < 0)
3866 return 0;
3868 else
3869 return 0;
3871 dest = SET_DEST (set);
3873 /* Look for direct modifications of the stack pointer. */
3874 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3876 /* Look for a trivial adjustment, otherwise assume nothing. */
3877 /* Note that the SPU restore_stack_block pattern refers to
3878 the stack pointer in V4SImode. Consider that non-trivial. */
3879 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3880 && GET_CODE (SET_SRC (set)) == PLUS
3881 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3882 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3883 return INTVAL (XEXP (SET_SRC (set), 1));
3884 /* ??? Reload can generate no-op moves, which will be cleaned
3885 up later. Recognize it and continue searching. */
3886 else if (rtx_equal_p (dest, SET_SRC (set)))
3887 return 0;
3888 else
3889 return HOST_WIDE_INT_MIN;
3891 else
3893 rtx mem, addr;
3895 /* Otherwise only think about autoinc patterns. */
3896 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3898 mem = dest;
3899 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3900 != stack_pointer_rtx);
3902 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3903 mem = SET_SRC (set);
3904 else
3905 return 0;
3907 addr = XEXP (mem, 0);
3908 switch (GET_CODE (addr))
3910 case PRE_INC:
3911 case POST_INC:
3912 return GET_MODE_SIZE (GET_MODE (mem));
3913 case PRE_DEC:
3914 case POST_DEC:
3915 return -GET_MODE_SIZE (GET_MODE (mem));
3916 case PRE_MODIFY:
3917 case POST_MODIFY:
3918 addr = XEXP (addr, 1);
3919 gcc_assert (GET_CODE (addr) == PLUS);
3920 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3921 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3922 return INTVAL (XEXP (addr, 1));
3923 default:
3924 gcc_unreachable ();
3930 fixup_args_size_notes (rtx prev, rtx last, int end_args_size)
3932 int args_size = end_args_size;
3933 bool saw_unknown = false;
3934 rtx insn;
3936 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3938 HOST_WIDE_INT this_delta;
3940 if (!NONDEBUG_INSN_P (insn))
3941 continue;
3943 this_delta = find_args_size_adjust (insn);
3944 if (this_delta == 0)
3946 if (!CALL_P (insn)
3947 || ACCUMULATE_OUTGOING_ARGS
3948 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3949 continue;
3952 gcc_assert (!saw_unknown);
3953 if (this_delta == HOST_WIDE_INT_MIN)
3954 saw_unknown = true;
3956 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3957 #ifdef STACK_GROWS_DOWNWARD
3958 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
3959 #endif
3960 args_size -= this_delta;
3963 return saw_unknown ? INT_MIN : args_size;
3966 #ifdef PUSH_ROUNDING
3967 /* Emit single push insn. */
3969 static void
3970 emit_single_push_insn_1 (enum machine_mode mode, rtx x, tree type)
3972 rtx dest_addr;
3973 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3974 rtx dest;
3975 enum insn_code icode;
3977 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3978 /* If there is push pattern, use it. Otherwise try old way of throwing
3979 MEM representing push operation to move expander. */
3980 icode = optab_handler (push_optab, mode);
3981 if (icode != CODE_FOR_nothing)
3983 struct expand_operand ops[1];
3985 create_input_operand (&ops[0], x, mode);
3986 if (maybe_expand_insn (icode, 1, ops))
3987 return;
3989 if (GET_MODE_SIZE (mode) == rounded_size)
3990 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3991 /* If we are to pad downward, adjust the stack pointer first and
3992 then store X into the stack location using an offset. This is
3993 because emit_move_insn does not know how to pad; it does not have
3994 access to type. */
3995 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3997 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3998 HOST_WIDE_INT offset;
4000 emit_move_insn (stack_pointer_rtx,
4001 expand_binop (Pmode,
4002 #ifdef STACK_GROWS_DOWNWARD
4003 sub_optab,
4004 #else
4005 add_optab,
4006 #endif
4007 stack_pointer_rtx,
4008 gen_int_mode (rounded_size, Pmode),
4009 NULL_RTX, 0, OPTAB_LIB_WIDEN));
4011 offset = (HOST_WIDE_INT) padding_size;
4012 #ifdef STACK_GROWS_DOWNWARD
4013 if (STACK_PUSH_CODE == POST_DEC)
4014 /* We have already decremented the stack pointer, so get the
4015 previous value. */
4016 offset += (HOST_WIDE_INT) rounded_size;
4017 #else
4018 if (STACK_PUSH_CODE == POST_INC)
4019 /* We have already incremented the stack pointer, so get the
4020 previous value. */
4021 offset -= (HOST_WIDE_INT) rounded_size;
4022 #endif
4023 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4024 gen_int_mode (offset, Pmode));
4026 else
4028 #ifdef STACK_GROWS_DOWNWARD
4029 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4030 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4031 gen_int_mode (-(HOST_WIDE_INT) rounded_size,
4032 Pmode));
4033 #else
4034 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4035 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4036 gen_int_mode (rounded_size, Pmode));
4037 #endif
4038 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4041 dest = gen_rtx_MEM (mode, dest_addr);
4043 if (type != 0)
4045 set_mem_attributes (dest, type, 1);
4047 if (cfun->tail_call_marked)
4048 /* Function incoming arguments may overlap with sibling call
4049 outgoing arguments and we cannot allow reordering of reads
4050 from function arguments with stores to outgoing arguments
4051 of sibling calls. */
4052 set_mem_alias_set (dest, 0);
4054 emit_move_insn (dest, x);
4057 /* Emit and annotate a single push insn. */
4059 static void
4060 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
4062 int delta, old_delta = stack_pointer_delta;
4063 rtx prev = get_last_insn ();
4064 rtx last;
4066 emit_single_push_insn_1 (mode, x, type);
4068 last = get_last_insn ();
4070 /* Notice the common case where we emitted exactly one insn. */
4071 if (PREV_INSN (last) == prev)
4073 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4074 return;
4077 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4078 gcc_assert (delta == INT_MIN || delta == old_delta);
4080 #endif
4082 /* Generate code to push X onto the stack, assuming it has mode MODE and
4083 type TYPE.
4084 MODE is redundant except when X is a CONST_INT (since they don't
4085 carry mode info).
4086 SIZE is an rtx for the size of data to be copied (in bytes),
4087 needed only if X is BLKmode.
4089 ALIGN (in bits) is maximum alignment we can assume.
4091 If PARTIAL and REG are both nonzero, then copy that many of the first
4092 bytes of X into registers starting with REG, and push the rest of X.
4093 The amount of space pushed is decreased by PARTIAL bytes.
4094 REG must be a hard register in this case.
4095 If REG is zero but PARTIAL is not, take any all others actions for an
4096 argument partially in registers, but do not actually load any
4097 registers.
4099 EXTRA is the amount in bytes of extra space to leave next to this arg.
4100 This is ignored if an argument block has already been allocated.
4102 On a machine that lacks real push insns, ARGS_ADDR is the address of
4103 the bottom of the argument block for this call. We use indexing off there
4104 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4105 argument block has not been preallocated.
4107 ARGS_SO_FAR is the size of args previously pushed for this call.
4109 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4110 for arguments passed in registers. If nonzero, it will be the number
4111 of bytes required. */
4113 void
4114 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
4115 unsigned int align, int partial, rtx reg, int extra,
4116 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4117 rtx alignment_pad)
4119 rtx xinner;
4120 enum direction stack_direction
4121 #ifdef STACK_GROWS_DOWNWARD
4122 = downward;
4123 #else
4124 = upward;
4125 #endif
4127 /* Decide where to pad the argument: `downward' for below,
4128 `upward' for above, or `none' for don't pad it.
4129 Default is below for small data on big-endian machines; else above. */
4130 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4132 /* Invert direction if stack is post-decrement.
4133 FIXME: why? */
4134 if (STACK_PUSH_CODE == POST_DEC)
4135 if (where_pad != none)
4136 where_pad = (where_pad == downward ? upward : downward);
4138 xinner = x;
4140 if (mode == BLKmode
4141 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4143 /* Copy a block into the stack, entirely or partially. */
4145 rtx temp;
4146 int used;
4147 int offset;
4148 int skip;
4150 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4151 used = partial - offset;
4153 if (mode != BLKmode)
4155 /* A value is to be stored in an insufficiently aligned
4156 stack slot; copy via a suitably aligned slot if
4157 necessary. */
4158 size = GEN_INT (GET_MODE_SIZE (mode));
4159 if (!MEM_P (xinner))
4161 temp = assign_temp (type, 1, 1);
4162 emit_move_insn (temp, xinner);
4163 xinner = temp;
4167 gcc_assert (size);
4169 /* USED is now the # of bytes we need not copy to the stack
4170 because registers will take care of them. */
4172 if (partial != 0)
4173 xinner = adjust_address (xinner, BLKmode, used);
4175 /* If the partial register-part of the arg counts in its stack size,
4176 skip the part of stack space corresponding to the registers.
4177 Otherwise, start copying to the beginning of the stack space,
4178 by setting SKIP to 0. */
4179 skip = (reg_parm_stack_space == 0) ? 0 : used;
4181 #ifdef PUSH_ROUNDING
4182 /* Do it with several push insns if that doesn't take lots of insns
4183 and if there is no difficulty with push insns that skip bytes
4184 on the stack for alignment purposes. */
4185 if (args_addr == 0
4186 && PUSH_ARGS
4187 && CONST_INT_P (size)
4188 && skip == 0
4189 && MEM_ALIGN (xinner) >= align
4190 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
4191 /* Here we avoid the case of a structure whose weak alignment
4192 forces many pushes of a small amount of data,
4193 and such small pushes do rounding that causes trouble. */
4194 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4195 || align >= BIGGEST_ALIGNMENT
4196 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4197 == (align / BITS_PER_UNIT)))
4198 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4200 /* Push padding now if padding above and stack grows down,
4201 or if padding below and stack grows up.
4202 But if space already allocated, this has already been done. */
4203 if (extra && args_addr == 0
4204 && where_pad != none && where_pad != stack_direction)
4205 anti_adjust_stack (GEN_INT (extra));
4207 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4209 else
4210 #endif /* PUSH_ROUNDING */
4212 rtx target;
4214 /* Otherwise make space on the stack and copy the data
4215 to the address of that space. */
4217 /* Deduct words put into registers from the size we must copy. */
4218 if (partial != 0)
4220 if (CONST_INT_P (size))
4221 size = GEN_INT (INTVAL (size) - used);
4222 else
4223 size = expand_binop (GET_MODE (size), sub_optab, size,
4224 gen_int_mode (used, GET_MODE (size)),
4225 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4228 /* Get the address of the stack space.
4229 In this case, we do not deal with EXTRA separately.
4230 A single stack adjust will do. */
4231 if (! args_addr)
4233 temp = push_block (size, extra, where_pad == downward);
4234 extra = 0;
4236 else if (CONST_INT_P (args_so_far))
4237 temp = memory_address (BLKmode,
4238 plus_constant (Pmode, args_addr,
4239 skip + INTVAL (args_so_far)));
4240 else
4241 temp = memory_address (BLKmode,
4242 plus_constant (Pmode,
4243 gen_rtx_PLUS (Pmode,
4244 args_addr,
4245 args_so_far),
4246 skip));
4248 if (!ACCUMULATE_OUTGOING_ARGS)
4250 /* If the source is referenced relative to the stack pointer,
4251 copy it to another register to stabilize it. We do not need
4252 to do this if we know that we won't be changing sp. */
4254 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4255 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4256 temp = copy_to_reg (temp);
4259 target = gen_rtx_MEM (BLKmode, temp);
4261 /* We do *not* set_mem_attributes here, because incoming arguments
4262 may overlap with sibling call outgoing arguments and we cannot
4263 allow reordering of reads from function arguments with stores
4264 to outgoing arguments of sibling calls. We do, however, want
4265 to record the alignment of the stack slot. */
4266 /* ALIGN may well be better aligned than TYPE, e.g. due to
4267 PARM_BOUNDARY. Assume the caller isn't lying. */
4268 set_mem_align (target, align);
4270 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4273 else if (partial > 0)
4275 /* Scalar partly in registers. */
4277 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4278 int i;
4279 int not_stack;
4280 /* # bytes of start of argument
4281 that we must make space for but need not store. */
4282 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4283 int args_offset = INTVAL (args_so_far);
4284 int skip;
4286 /* Push padding now if padding above and stack grows down,
4287 or if padding below and stack grows up.
4288 But if space already allocated, this has already been done. */
4289 if (extra && args_addr == 0
4290 && where_pad != none && where_pad != stack_direction)
4291 anti_adjust_stack (GEN_INT (extra));
4293 /* If we make space by pushing it, we might as well push
4294 the real data. Otherwise, we can leave OFFSET nonzero
4295 and leave the space uninitialized. */
4296 if (args_addr == 0)
4297 offset = 0;
4299 /* Now NOT_STACK gets the number of words that we don't need to
4300 allocate on the stack. Convert OFFSET to words too. */
4301 not_stack = (partial - offset) / UNITS_PER_WORD;
4302 offset /= UNITS_PER_WORD;
4304 /* If the partial register-part of the arg counts in its stack size,
4305 skip the part of stack space corresponding to the registers.
4306 Otherwise, start copying to the beginning of the stack space,
4307 by setting SKIP to 0. */
4308 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4310 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4311 x = validize_mem (force_const_mem (mode, x));
4313 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4314 SUBREGs of such registers are not allowed. */
4315 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4316 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4317 x = copy_to_reg (x);
4319 /* Loop over all the words allocated on the stack for this arg. */
4320 /* We can do it by words, because any scalar bigger than a word
4321 has a size a multiple of a word. */
4322 for (i = size - 1; i >= not_stack; i--)
4323 if (i >= not_stack + offset)
4324 emit_push_insn (operand_subword_force (x, i, mode),
4325 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4326 0, args_addr,
4327 GEN_INT (args_offset + ((i - not_stack + skip)
4328 * UNITS_PER_WORD)),
4329 reg_parm_stack_space, alignment_pad);
4331 else
4333 rtx addr;
4334 rtx dest;
4336 /* Push padding now if padding above and stack grows down,
4337 or if padding below and stack grows up.
4338 But if space already allocated, this has already been done. */
4339 if (extra && args_addr == 0
4340 && where_pad != none && where_pad != stack_direction)
4341 anti_adjust_stack (GEN_INT (extra));
4343 #ifdef PUSH_ROUNDING
4344 if (args_addr == 0 && PUSH_ARGS)
4345 emit_single_push_insn (mode, x, type);
4346 else
4347 #endif
4349 if (CONST_INT_P (args_so_far))
4350 addr
4351 = memory_address (mode,
4352 plus_constant (Pmode, args_addr,
4353 INTVAL (args_so_far)));
4354 else
4355 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4356 args_so_far));
4357 dest = gen_rtx_MEM (mode, addr);
4359 /* We do *not* set_mem_attributes here, because incoming arguments
4360 may overlap with sibling call outgoing arguments and we cannot
4361 allow reordering of reads from function arguments with stores
4362 to outgoing arguments of sibling calls. We do, however, want
4363 to record the alignment of the stack slot. */
4364 /* ALIGN may well be better aligned than TYPE, e.g. due to
4365 PARM_BOUNDARY. Assume the caller isn't lying. */
4366 set_mem_align (dest, align);
4368 emit_move_insn (dest, x);
4372 /* If part should go in registers, copy that part
4373 into the appropriate registers. Do this now, at the end,
4374 since mem-to-mem copies above may do function calls. */
4375 if (partial > 0 && reg != 0)
4377 /* Handle calls that pass values in multiple non-contiguous locations.
4378 The Irix 6 ABI has examples of this. */
4379 if (GET_CODE (reg) == PARALLEL)
4380 emit_group_load (reg, x, type, -1);
4381 else
4383 gcc_assert (partial % UNITS_PER_WORD == 0);
4384 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4388 if (extra && args_addr == 0 && where_pad == stack_direction)
4389 anti_adjust_stack (GEN_INT (extra));
4391 if (alignment_pad && args_addr == 0)
4392 anti_adjust_stack (alignment_pad);
4395 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4396 operations. */
4398 static rtx
4399 get_subtarget (rtx x)
4401 return (optimize
4402 || x == 0
4403 /* Only registers can be subtargets. */
4404 || !REG_P (x)
4405 /* Don't use hard regs to avoid extending their life. */
4406 || REGNO (x) < FIRST_PSEUDO_REGISTER
4407 ? 0 : x);
4410 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4411 FIELD is a bitfield. Returns true if the optimization was successful,
4412 and there's nothing else to do. */
4414 static bool
4415 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4416 unsigned HOST_WIDE_INT bitpos,
4417 unsigned HOST_WIDE_INT bitregion_start,
4418 unsigned HOST_WIDE_INT bitregion_end,
4419 enum machine_mode mode1, rtx str_rtx,
4420 tree to, tree src)
4422 enum machine_mode str_mode = GET_MODE (str_rtx);
4423 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4424 tree op0, op1;
4425 rtx value, result;
4426 optab binop;
4427 gimple srcstmt;
4428 enum tree_code code;
4430 if (mode1 != VOIDmode
4431 || bitsize >= BITS_PER_WORD
4432 || str_bitsize > BITS_PER_WORD
4433 || TREE_SIDE_EFFECTS (to)
4434 || TREE_THIS_VOLATILE (to))
4435 return false;
4437 STRIP_NOPS (src);
4438 if (TREE_CODE (src) != SSA_NAME)
4439 return false;
4440 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4441 return false;
4443 srcstmt = get_gimple_for_ssa_name (src);
4444 if (!srcstmt
4445 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4446 return false;
4448 code = gimple_assign_rhs_code (srcstmt);
4450 op0 = gimple_assign_rhs1 (srcstmt);
4452 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4453 to find its initialization. Hopefully the initialization will
4454 be from a bitfield load. */
4455 if (TREE_CODE (op0) == SSA_NAME)
4457 gimple op0stmt = get_gimple_for_ssa_name (op0);
4459 /* We want to eventually have OP0 be the same as TO, which
4460 should be a bitfield. */
4461 if (!op0stmt
4462 || !is_gimple_assign (op0stmt)
4463 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4464 return false;
4465 op0 = gimple_assign_rhs1 (op0stmt);
4468 op1 = gimple_assign_rhs2 (srcstmt);
4470 if (!operand_equal_p (to, op0, 0))
4471 return false;
4473 if (MEM_P (str_rtx))
4475 unsigned HOST_WIDE_INT offset1;
4477 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4478 str_mode = word_mode;
4479 str_mode = get_best_mode (bitsize, bitpos,
4480 bitregion_start, bitregion_end,
4481 MEM_ALIGN (str_rtx), str_mode, 0);
4482 if (str_mode == VOIDmode)
4483 return false;
4484 str_bitsize = GET_MODE_BITSIZE (str_mode);
4486 offset1 = bitpos;
4487 bitpos %= str_bitsize;
4488 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4489 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4491 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4492 return false;
4494 /* If the bit field covers the whole REG/MEM, store_field
4495 will likely generate better code. */
4496 if (bitsize >= str_bitsize)
4497 return false;
4499 /* We can't handle fields split across multiple entities. */
4500 if (bitpos + bitsize > str_bitsize)
4501 return false;
4503 if (BYTES_BIG_ENDIAN)
4504 bitpos = str_bitsize - bitpos - bitsize;
4506 switch (code)
4508 case PLUS_EXPR:
4509 case MINUS_EXPR:
4510 /* For now, just optimize the case of the topmost bitfield
4511 where we don't need to do any masking and also
4512 1 bit bitfields where xor can be used.
4513 We might win by one instruction for the other bitfields
4514 too if insv/extv instructions aren't used, so that
4515 can be added later. */
4516 if (bitpos + bitsize != str_bitsize
4517 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4518 break;
4520 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4521 value = convert_modes (str_mode,
4522 TYPE_MODE (TREE_TYPE (op1)), value,
4523 TYPE_UNSIGNED (TREE_TYPE (op1)));
4525 /* We may be accessing data outside the field, which means
4526 we can alias adjacent data. */
4527 if (MEM_P (str_rtx))
4529 str_rtx = shallow_copy_rtx (str_rtx);
4530 set_mem_alias_set (str_rtx, 0);
4531 set_mem_expr (str_rtx, 0);
4534 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4535 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4537 value = expand_and (str_mode, value, const1_rtx, NULL);
4538 binop = xor_optab;
4540 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4541 result = expand_binop (str_mode, binop, str_rtx,
4542 value, str_rtx, 1, OPTAB_WIDEN);
4543 if (result != str_rtx)
4544 emit_move_insn (str_rtx, result);
4545 return true;
4547 case BIT_IOR_EXPR:
4548 case BIT_XOR_EXPR:
4549 if (TREE_CODE (op1) != INTEGER_CST)
4550 break;
4551 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4552 value = convert_modes (str_mode,
4553 TYPE_MODE (TREE_TYPE (op1)), value,
4554 TYPE_UNSIGNED (TREE_TYPE (op1)));
4556 /* We may be accessing data outside the field, which means
4557 we can alias adjacent data. */
4558 if (MEM_P (str_rtx))
4560 str_rtx = shallow_copy_rtx (str_rtx);
4561 set_mem_alias_set (str_rtx, 0);
4562 set_mem_expr (str_rtx, 0);
4565 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4566 if (bitpos + bitsize != str_bitsize)
4568 rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4569 str_mode);
4570 value = expand_and (str_mode, value, mask, NULL_RTX);
4572 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4573 result = expand_binop (str_mode, binop, str_rtx,
4574 value, str_rtx, 1, OPTAB_WIDEN);
4575 if (result != str_rtx)
4576 emit_move_insn (str_rtx, result);
4577 return true;
4579 default:
4580 break;
4583 return false;
4586 /* In the C++ memory model, consecutive bit fields in a structure are
4587 considered one memory location.
4589 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4590 returns the bit range of consecutive bits in which this COMPONENT_REF
4591 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4592 and *OFFSET may be adjusted in the process.
4594 If the access does not need to be restricted, 0 is returned in both
4595 *BITSTART and *BITEND. */
4597 static void
4598 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4599 unsigned HOST_WIDE_INT *bitend,
4600 tree exp,
4601 HOST_WIDE_INT *bitpos,
4602 tree *offset)
4604 HOST_WIDE_INT bitoffset;
4605 tree field, repr;
4607 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4609 field = TREE_OPERAND (exp, 1);
4610 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4611 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4612 need to limit the range we can access. */
4613 if (!repr)
4615 *bitstart = *bitend = 0;
4616 return;
4619 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4620 part of a larger bit field, then the representative does not serve any
4621 useful purpose. This can occur in Ada. */
4622 if (handled_component_p (TREE_OPERAND (exp, 0)))
4624 enum machine_mode rmode;
4625 HOST_WIDE_INT rbitsize, rbitpos;
4626 tree roffset;
4627 int unsignedp;
4628 int volatilep = 0;
4629 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4630 &roffset, &rmode, &unsignedp, &volatilep, false);
4631 if ((rbitpos % BITS_PER_UNIT) != 0)
4633 *bitstart = *bitend = 0;
4634 return;
4638 /* Compute the adjustment to bitpos from the offset of the field
4639 relative to the representative. DECL_FIELD_OFFSET of field and
4640 repr are the same by construction if they are not constants,
4641 see finish_bitfield_layout. */
4642 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
4643 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
4644 bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
4645 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
4646 else
4647 bitoffset = 0;
4648 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4649 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4651 /* If the adjustment is larger than bitpos, we would have a negative bit
4652 position for the lower bound and this may wreak havoc later. Adjust
4653 offset and bitpos to make the lower bound non-negative in that case. */
4654 if (bitoffset > *bitpos)
4656 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4657 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4659 *bitpos += adjust;
4660 if (*offset == NULL_TREE)
4661 *offset = size_int (-adjust / BITS_PER_UNIT);
4662 else
4663 *offset
4664 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4665 *bitstart = 0;
4667 else
4668 *bitstart = *bitpos - bitoffset;
4670 *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4673 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4674 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4675 DECL_RTL was not set yet, return NORTL. */
4677 static inline bool
4678 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4680 if (TREE_CODE (addr) != ADDR_EXPR)
4681 return false;
4683 tree base = TREE_OPERAND (addr, 0);
4685 if (!DECL_P (base)
4686 || TREE_ADDRESSABLE (base)
4687 || DECL_MODE (base) == BLKmode)
4688 return false;
4690 if (!DECL_RTL_SET_P (base))
4691 return nortl;
4693 return (!MEM_P (DECL_RTL (base)));
4696 /* Returns true if the MEM_REF REF refers to an object that does not
4697 reside in memory and has non-BLKmode. */
4699 static inline bool
4700 mem_ref_refers_to_non_mem_p (tree ref)
4702 tree base = TREE_OPERAND (ref, 0);
4703 return addr_expr_of_non_mem_decl_p_1 (base, false);
4706 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4707 is true, try generating a nontemporal store. */
4709 void
4710 expand_assignment (tree to, tree from, bool nontemporal)
4712 rtx to_rtx = 0;
4713 rtx result;
4714 enum machine_mode mode;
4715 unsigned int align;
4716 enum insn_code icode;
4718 /* Don't crash if the lhs of the assignment was erroneous. */
4719 if (TREE_CODE (to) == ERROR_MARK)
4721 expand_normal (from);
4722 return;
4725 /* Optimize away no-op moves without side-effects. */
4726 if (operand_equal_p (to, from, 0))
4727 return;
4729 /* Handle misaligned stores. */
4730 mode = TYPE_MODE (TREE_TYPE (to));
4731 if ((TREE_CODE (to) == MEM_REF
4732 || TREE_CODE (to) == TARGET_MEM_REF)
4733 && mode != BLKmode
4734 && !mem_ref_refers_to_non_mem_p (to)
4735 && ((align = get_object_alignment (to))
4736 < GET_MODE_ALIGNMENT (mode))
4737 && (((icode = optab_handler (movmisalign_optab, mode))
4738 != CODE_FOR_nothing)
4739 || SLOW_UNALIGNED_ACCESS (mode, align)))
4741 rtx reg, mem;
4743 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4744 reg = force_not_mem (reg);
4745 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4747 if (icode != CODE_FOR_nothing)
4749 struct expand_operand ops[2];
4751 create_fixed_operand (&ops[0], mem);
4752 create_input_operand (&ops[1], reg, mode);
4753 /* The movmisalign<mode> pattern cannot fail, else the assignment
4754 would silently be omitted. */
4755 expand_insn (icode, 2, ops);
4757 else
4758 store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg);
4759 return;
4762 /* Assignment of a structure component needs special treatment
4763 if the structure component's rtx is not simply a MEM.
4764 Assignment of an array element at a constant index, and assignment of
4765 an array element in an unaligned packed structure field, has the same
4766 problem. Same for (partially) storing into a non-memory object. */
4767 if (handled_component_p (to)
4768 || (TREE_CODE (to) == MEM_REF
4769 && mem_ref_refers_to_non_mem_p (to))
4770 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4772 enum machine_mode mode1;
4773 HOST_WIDE_INT bitsize, bitpos;
4774 unsigned HOST_WIDE_INT bitregion_start = 0;
4775 unsigned HOST_WIDE_INT bitregion_end = 0;
4776 tree offset;
4777 int unsignedp;
4778 int volatilep = 0;
4779 tree tem;
4781 push_temp_slots ();
4782 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4783 &unsignedp, &volatilep, true);
4785 /* Make sure bitpos is not negative, it can wreak havoc later. */
4786 if (bitpos < 0)
4788 gcc_assert (offset == NULL_TREE);
4789 offset = size_int (bitpos >> (BITS_PER_UNIT == 8
4790 ? 3 : exact_log2 (BITS_PER_UNIT)));
4791 bitpos &= BITS_PER_UNIT - 1;
4794 if (TREE_CODE (to) == COMPONENT_REF
4795 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4796 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4797 /* The C++ memory model naturally applies to byte-aligned fields.
4798 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4799 BITSIZE are not byte-aligned, there is no need to limit the range
4800 we can access. This can occur with packed structures in Ada. */
4801 else if (bitsize > 0
4802 && bitsize % BITS_PER_UNIT == 0
4803 && bitpos % BITS_PER_UNIT == 0)
4805 bitregion_start = bitpos;
4806 bitregion_end = bitpos + bitsize - 1;
4809 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4811 /* If the field has a mode, we want to access it in the
4812 field's mode, not the computed mode.
4813 If a MEM has VOIDmode (external with incomplete type),
4814 use BLKmode for it instead. */
4815 if (MEM_P (to_rtx))
4817 if (mode1 != VOIDmode)
4818 to_rtx = adjust_address (to_rtx, mode1, 0);
4819 else if (GET_MODE (to_rtx) == VOIDmode)
4820 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4823 if (offset != 0)
4825 enum machine_mode address_mode;
4826 rtx offset_rtx;
4828 if (!MEM_P (to_rtx))
4830 /* We can get constant negative offsets into arrays with broken
4831 user code. Translate this to a trap instead of ICEing. */
4832 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4833 expand_builtin_trap ();
4834 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4837 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4838 address_mode = get_address_mode (to_rtx);
4839 if (GET_MODE (offset_rtx) != address_mode)
4840 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4842 /* If we have an expression in OFFSET_RTX and a non-zero
4843 byte offset in BITPOS, adding the byte offset before the
4844 OFFSET_RTX results in better intermediate code, which makes
4845 later rtl optimization passes perform better.
4847 We prefer intermediate code like this:
4849 r124:DI=r123:DI+0x18
4850 [r124:DI]=r121:DI
4852 ... instead of ...
4854 r124:DI=r123:DI+0x10
4855 [r124:DI+0x8]=r121:DI
4857 This is only done for aligned data values, as these can
4858 be expected to result in single move instructions. */
4859 if (mode1 != VOIDmode
4860 && bitpos != 0
4861 && bitsize > 0
4862 && (bitpos % bitsize) == 0
4863 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4864 && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
4866 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4867 bitregion_start = 0;
4868 if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos)
4869 bitregion_end -= bitpos;
4870 bitpos = 0;
4873 to_rtx = offset_address (to_rtx, offset_rtx,
4874 highest_pow2_factor_for_target (to,
4875 offset));
4878 /* No action is needed if the target is not a memory and the field
4879 lies completely outside that target. This can occur if the source
4880 code contains an out-of-bounds access to a small array. */
4881 if (!MEM_P (to_rtx)
4882 && GET_MODE (to_rtx) != BLKmode
4883 && (unsigned HOST_WIDE_INT) bitpos
4884 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4886 expand_normal (from);
4887 result = NULL;
4889 /* Handle expand_expr of a complex value returning a CONCAT. */
4890 else if (GET_CODE (to_rtx) == CONCAT)
4892 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4893 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4894 && bitpos == 0
4895 && bitsize == mode_bitsize)
4896 result = store_expr (from, to_rtx, false, nontemporal);
4897 else if (bitsize == mode_bitsize / 2
4898 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4899 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4900 nontemporal);
4901 else if (bitpos + bitsize <= mode_bitsize / 2)
4902 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4903 bitregion_start, bitregion_end,
4904 mode1, from,
4905 get_alias_set (to), nontemporal);
4906 else if (bitpos >= mode_bitsize / 2)
4907 result = store_field (XEXP (to_rtx, 1), bitsize,
4908 bitpos - mode_bitsize / 2,
4909 bitregion_start, bitregion_end,
4910 mode1, from,
4911 get_alias_set (to), nontemporal);
4912 else if (bitpos == 0 && bitsize == mode_bitsize)
4914 rtx from_rtx;
4915 result = expand_normal (from);
4916 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4917 TYPE_MODE (TREE_TYPE (from)), 0);
4918 emit_move_insn (XEXP (to_rtx, 0),
4919 read_complex_part (from_rtx, false));
4920 emit_move_insn (XEXP (to_rtx, 1),
4921 read_complex_part (from_rtx, true));
4923 else
4925 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4926 GET_MODE_SIZE (GET_MODE (to_rtx)));
4927 write_complex_part (temp, XEXP (to_rtx, 0), false);
4928 write_complex_part (temp, XEXP (to_rtx, 1), true);
4929 result = store_field (temp, bitsize, bitpos,
4930 bitregion_start, bitregion_end,
4931 mode1, from,
4932 get_alias_set (to), nontemporal);
4933 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4934 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4937 else
4939 if (MEM_P (to_rtx))
4941 /* If the field is at offset zero, we could have been given the
4942 DECL_RTX of the parent struct. Don't munge it. */
4943 to_rtx = shallow_copy_rtx (to_rtx);
4944 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4945 if (volatilep)
4946 MEM_VOLATILE_P (to_rtx) = 1;
4949 if (optimize_bitfield_assignment_op (bitsize, bitpos,
4950 bitregion_start, bitregion_end,
4951 mode1,
4952 to_rtx, to, from))
4953 result = NULL;
4954 else
4955 result = store_field (to_rtx, bitsize, bitpos,
4956 bitregion_start, bitregion_end,
4957 mode1, from,
4958 get_alias_set (to), nontemporal);
4961 if (result)
4962 preserve_temp_slots (result);
4963 pop_temp_slots ();
4964 return;
4967 /* If the rhs is a function call and its value is not an aggregate,
4968 call the function before we start to compute the lhs.
4969 This is needed for correct code for cases such as
4970 val = setjmp (buf) on machines where reference to val
4971 requires loading up part of an address in a separate insn.
4973 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4974 since it might be a promoted variable where the zero- or sign- extension
4975 needs to be done. Handling this in the normal way is safe because no
4976 computation is done before the call. The same is true for SSA names. */
4977 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4978 && COMPLETE_TYPE_P (TREE_TYPE (from))
4979 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4980 && ! (((TREE_CODE (to) == VAR_DECL
4981 || TREE_CODE (to) == PARM_DECL
4982 || TREE_CODE (to) == RESULT_DECL)
4983 && REG_P (DECL_RTL (to)))
4984 || TREE_CODE (to) == SSA_NAME))
4986 rtx value;
4988 push_temp_slots ();
4989 value = expand_normal (from);
4990 if (to_rtx == 0)
4991 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4993 /* Handle calls that return values in multiple non-contiguous locations.
4994 The Irix 6 ABI has examples of this. */
4995 if (GET_CODE (to_rtx) == PARALLEL)
4997 if (GET_CODE (value) == PARALLEL)
4998 emit_group_move (to_rtx, value);
4999 else
5000 emit_group_load (to_rtx, value, TREE_TYPE (from),
5001 int_size_in_bytes (TREE_TYPE (from)));
5003 else if (GET_CODE (value) == PARALLEL)
5004 emit_group_store (to_rtx, value, TREE_TYPE (from),
5005 int_size_in_bytes (TREE_TYPE (from)));
5006 else if (GET_MODE (to_rtx) == BLKmode)
5008 /* Handle calls that return BLKmode values in registers. */
5009 if (REG_P (value))
5010 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5011 else
5012 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5014 else
5016 if (POINTER_TYPE_P (TREE_TYPE (to)))
5017 value = convert_memory_address_addr_space
5018 (GET_MODE (to_rtx), value,
5019 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5021 emit_move_insn (to_rtx, value);
5023 preserve_temp_slots (to_rtx);
5024 pop_temp_slots ();
5025 return;
5028 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5029 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5031 /* Don't move directly into a return register. */
5032 if (TREE_CODE (to) == RESULT_DECL
5033 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5035 rtx temp;
5037 push_temp_slots ();
5039 /* If the source is itself a return value, it still is in a pseudo at
5040 this point so we can move it back to the return register directly. */
5041 if (REG_P (to_rtx)
5042 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5043 && TREE_CODE (from) != CALL_EXPR)
5044 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5045 else
5046 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5048 /* Handle calls that return values in multiple non-contiguous locations.
5049 The Irix 6 ABI has examples of this. */
5050 if (GET_CODE (to_rtx) == PARALLEL)
5052 if (GET_CODE (temp) == PARALLEL)
5053 emit_group_move (to_rtx, temp);
5054 else
5055 emit_group_load (to_rtx, temp, TREE_TYPE (from),
5056 int_size_in_bytes (TREE_TYPE (from)));
5058 else if (temp)
5059 emit_move_insn (to_rtx, temp);
5061 preserve_temp_slots (to_rtx);
5062 pop_temp_slots ();
5063 return;
5066 /* In case we are returning the contents of an object which overlaps
5067 the place the value is being stored, use a safe function when copying
5068 a value through a pointer into a structure value return block. */
5069 if (TREE_CODE (to) == RESULT_DECL
5070 && TREE_CODE (from) == INDIRECT_REF
5071 && ADDR_SPACE_GENERIC_P
5072 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5073 && refs_may_alias_p (to, from)
5074 && cfun->returns_struct
5075 && !cfun->returns_pcc_struct)
5077 rtx from_rtx, size;
5079 push_temp_slots ();
5080 size = expr_size (from);
5081 from_rtx = expand_normal (from);
5083 emit_library_call (memmove_libfunc, LCT_NORMAL,
5084 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
5085 XEXP (from_rtx, 0), Pmode,
5086 convert_to_mode (TYPE_MODE (sizetype),
5087 size, TYPE_UNSIGNED (sizetype)),
5088 TYPE_MODE (sizetype));
5090 preserve_temp_slots (to_rtx);
5091 pop_temp_slots ();
5092 return;
5095 /* Compute FROM and store the value in the rtx we got. */
5097 push_temp_slots ();
5098 result = store_expr (from, to_rtx, 0, nontemporal);
5099 preserve_temp_slots (result);
5100 pop_temp_slots ();
5101 return;
5104 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5105 succeeded, false otherwise. */
5107 bool
5108 emit_storent_insn (rtx to, rtx from)
5110 struct expand_operand ops[2];
5111 enum machine_mode mode = GET_MODE (to);
5112 enum insn_code code = optab_handler (storent_optab, mode);
5114 if (code == CODE_FOR_nothing)
5115 return false;
5117 create_fixed_operand (&ops[0], to);
5118 create_input_operand (&ops[1], from, mode);
5119 return maybe_expand_insn (code, 2, ops);
5122 /* Generate code for computing expression EXP,
5123 and storing the value into TARGET.
5125 If the mode is BLKmode then we may return TARGET itself.
5126 It turns out that in BLKmode it doesn't cause a problem.
5127 because C has no operators that could combine two different
5128 assignments into the same BLKmode object with different values
5129 with no sequence point. Will other languages need this to
5130 be more thorough?
5132 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5133 stack, and block moves may need to be treated specially.
5135 If NONTEMPORAL is true, try using a nontemporal store instruction. */
5138 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
5140 rtx temp;
5141 rtx alt_rtl = NULL_RTX;
5142 location_t loc = curr_insn_location ();
5144 if (VOID_TYPE_P (TREE_TYPE (exp)))
5146 /* C++ can generate ?: expressions with a throw expression in one
5147 branch and an rvalue in the other. Here, we resolve attempts to
5148 store the throw expression's nonexistent result. */
5149 gcc_assert (!call_param_p);
5150 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5151 return NULL_RTX;
5153 if (TREE_CODE (exp) == COMPOUND_EXPR)
5155 /* Perform first part of compound expression, then assign from second
5156 part. */
5157 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5158 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5159 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5160 nontemporal);
5162 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5164 /* For conditional expression, get safe form of the target. Then
5165 test the condition, doing the appropriate assignment on either
5166 side. This avoids the creation of unnecessary temporaries.
5167 For non-BLKmode, it is more efficient not to do this. */
5169 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
5171 do_pending_stack_adjust ();
5172 NO_DEFER_POP;
5173 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5174 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5175 nontemporal);
5176 emit_jump_insn (gen_jump (lab2));
5177 emit_barrier ();
5178 emit_label (lab1);
5179 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
5180 nontemporal);
5181 emit_label (lab2);
5182 OK_DEFER_POP;
5184 return NULL_RTX;
5186 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5187 /* If this is a scalar in a register that is stored in a wider mode
5188 than the declared mode, compute the result into its declared mode
5189 and then convert to the wider mode. Our value is the computed
5190 expression. */
5192 rtx inner_target = 0;
5194 /* We can do the conversion inside EXP, which will often result
5195 in some optimizations. Do the conversion in two steps: first
5196 change the signedness, if needed, then the extend. But don't
5197 do this if the type of EXP is a subtype of something else
5198 since then the conversion might involve more than just
5199 converting modes. */
5200 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5201 && TREE_TYPE (TREE_TYPE (exp)) == 0
5202 && GET_MODE_PRECISION (GET_MODE (target))
5203 == TYPE_PRECISION (TREE_TYPE (exp)))
5205 if (TYPE_UNSIGNED (TREE_TYPE (exp))
5206 != SUBREG_PROMOTED_UNSIGNED_P (target))
5208 /* Some types, e.g. Fortran's logical*4, won't have a signed
5209 version, so use the mode instead. */
5210 tree ntype
5211 = (signed_or_unsigned_type_for
5212 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
5213 if (ntype == NULL)
5214 ntype = lang_hooks.types.type_for_mode
5215 (TYPE_MODE (TREE_TYPE (exp)),
5216 SUBREG_PROMOTED_UNSIGNED_P (target));
5218 exp = fold_convert_loc (loc, ntype, exp);
5221 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5222 (GET_MODE (SUBREG_REG (target)),
5223 SUBREG_PROMOTED_UNSIGNED_P (target)),
5224 exp);
5226 inner_target = SUBREG_REG (target);
5229 temp = expand_expr (exp, inner_target, VOIDmode,
5230 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5232 /* If TEMP is a VOIDmode constant, use convert_modes to make
5233 sure that we properly convert it. */
5234 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5236 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5237 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
5238 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5239 GET_MODE (target), temp,
5240 SUBREG_PROMOTED_UNSIGNED_P (target));
5243 convert_move (SUBREG_REG (target), temp,
5244 SUBREG_PROMOTED_UNSIGNED_P (target));
5246 return NULL_RTX;
5248 else if ((TREE_CODE (exp) == STRING_CST
5249 || (TREE_CODE (exp) == MEM_REF
5250 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5251 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5252 == STRING_CST
5253 && integer_zerop (TREE_OPERAND (exp, 1))))
5254 && !nontemporal && !call_param_p
5255 && MEM_P (target))
5257 /* Optimize initialization of an array with a STRING_CST. */
5258 HOST_WIDE_INT exp_len, str_copy_len;
5259 rtx dest_mem;
5260 tree str = TREE_CODE (exp) == STRING_CST
5261 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5263 exp_len = int_expr_size (exp);
5264 if (exp_len <= 0)
5265 goto normal_expr;
5267 if (TREE_STRING_LENGTH (str) <= 0)
5268 goto normal_expr;
5270 str_copy_len = strlen (TREE_STRING_POINTER (str));
5271 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5272 goto normal_expr;
5274 str_copy_len = TREE_STRING_LENGTH (str);
5275 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5276 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5278 str_copy_len += STORE_MAX_PIECES - 1;
5279 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5281 str_copy_len = MIN (str_copy_len, exp_len);
5282 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5283 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5284 MEM_ALIGN (target), false))
5285 goto normal_expr;
5287 dest_mem = target;
5289 dest_mem = store_by_pieces (dest_mem,
5290 str_copy_len, builtin_strncpy_read_str,
5291 CONST_CAST (char *,
5292 TREE_STRING_POINTER (str)),
5293 MEM_ALIGN (target), false,
5294 exp_len > str_copy_len ? 1 : 0);
5295 if (exp_len > str_copy_len)
5296 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5297 GEN_INT (exp_len - str_copy_len),
5298 BLOCK_OP_NORMAL);
5299 return NULL_RTX;
5301 else
5303 rtx tmp_target;
5305 normal_expr:
5306 /* If we want to use a nontemporal store, force the value to
5307 register first. */
5308 tmp_target = nontemporal ? NULL_RTX : target;
5309 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5310 (call_param_p
5311 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5312 &alt_rtl, false);
5315 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5316 the same as that of TARGET, adjust the constant. This is needed, for
5317 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5318 only a word-sized value. */
5319 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5320 && TREE_CODE (exp) != ERROR_MARK
5321 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5322 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5323 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5325 /* If value was not generated in the target, store it there.
5326 Convert the value to TARGET's type first if necessary and emit the
5327 pending incrementations that have been queued when expanding EXP.
5328 Note that we cannot emit the whole queue blindly because this will
5329 effectively disable the POST_INC optimization later.
5331 If TEMP and TARGET compare equal according to rtx_equal_p, but
5332 one or both of them are volatile memory refs, we have to distinguish
5333 two cases:
5334 - expand_expr has used TARGET. In this case, we must not generate
5335 another copy. This can be detected by TARGET being equal according
5336 to == .
5337 - expand_expr has not used TARGET - that means that the source just
5338 happens to have the same RTX form. Since temp will have been created
5339 by expand_expr, it will compare unequal according to == .
5340 We must generate a copy in this case, to reach the correct number
5341 of volatile memory references. */
5343 if ((! rtx_equal_p (temp, target)
5344 || (temp != target && (side_effects_p (temp)
5345 || side_effects_p (target))))
5346 && TREE_CODE (exp) != ERROR_MARK
5347 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5348 but TARGET is not valid memory reference, TEMP will differ
5349 from TARGET although it is really the same location. */
5350 && !(alt_rtl
5351 && rtx_equal_p (alt_rtl, target)
5352 && !side_effects_p (alt_rtl)
5353 && !side_effects_p (target))
5354 /* If there's nothing to copy, don't bother. Don't call
5355 expr_size unless necessary, because some front-ends (C++)
5356 expr_size-hook must not be given objects that are not
5357 supposed to be bit-copied or bit-initialized. */
5358 && expr_size (exp) != const0_rtx)
5360 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5362 if (GET_MODE (target) == BLKmode)
5364 /* Handle calls that return BLKmode values in registers. */
5365 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5366 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5367 else
5368 store_bit_field (target,
5369 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5370 0, 0, 0, GET_MODE (temp), temp);
5372 else
5373 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5376 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5378 /* Handle copying a string constant into an array. The string
5379 constant may be shorter than the array. So copy just the string's
5380 actual length, and clear the rest. First get the size of the data
5381 type of the string, which is actually the size of the target. */
5382 rtx size = expr_size (exp);
5384 if (CONST_INT_P (size)
5385 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5386 emit_block_move (target, temp, size,
5387 (call_param_p
5388 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5389 else
5391 enum machine_mode pointer_mode
5392 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5393 enum machine_mode address_mode = get_address_mode (target);
5395 /* Compute the size of the data to copy from the string. */
5396 tree copy_size
5397 = size_binop_loc (loc, MIN_EXPR,
5398 make_tree (sizetype, size),
5399 size_int (TREE_STRING_LENGTH (exp)));
5400 rtx copy_size_rtx
5401 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5402 (call_param_p
5403 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5404 rtx label = 0;
5406 /* Copy that much. */
5407 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5408 TYPE_UNSIGNED (sizetype));
5409 emit_block_move (target, temp, copy_size_rtx,
5410 (call_param_p
5411 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5413 /* Figure out how much is left in TARGET that we have to clear.
5414 Do all calculations in pointer_mode. */
5415 if (CONST_INT_P (copy_size_rtx))
5417 size = plus_constant (address_mode, size,
5418 -INTVAL (copy_size_rtx));
5419 target = adjust_address (target, BLKmode,
5420 INTVAL (copy_size_rtx));
5422 else
5424 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5425 copy_size_rtx, NULL_RTX, 0,
5426 OPTAB_LIB_WIDEN);
5428 if (GET_MODE (copy_size_rtx) != address_mode)
5429 copy_size_rtx = convert_to_mode (address_mode,
5430 copy_size_rtx,
5431 TYPE_UNSIGNED (sizetype));
5433 target = offset_address (target, copy_size_rtx,
5434 highest_pow2_factor (copy_size));
5435 label = gen_label_rtx ();
5436 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5437 GET_MODE (size), 0, label);
5440 if (size != const0_rtx)
5441 clear_storage (target, size, BLOCK_OP_NORMAL);
5443 if (label)
5444 emit_label (label);
5447 /* Handle calls that return values in multiple non-contiguous locations.
5448 The Irix 6 ABI has examples of this. */
5449 else if (GET_CODE (target) == PARALLEL)
5451 if (GET_CODE (temp) == PARALLEL)
5452 emit_group_move (target, temp);
5453 else
5454 emit_group_load (target, temp, TREE_TYPE (exp),
5455 int_size_in_bytes (TREE_TYPE (exp)));
5457 else if (GET_CODE (temp) == PARALLEL)
5458 emit_group_store (target, temp, TREE_TYPE (exp),
5459 int_size_in_bytes (TREE_TYPE (exp)));
5460 else if (GET_MODE (temp) == BLKmode)
5461 emit_block_move (target, temp, expr_size (exp),
5462 (call_param_p
5463 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5464 /* If we emit a nontemporal store, there is nothing else to do. */
5465 else if (nontemporal && emit_storent_insn (target, temp))
5467 else
5469 temp = force_operand (temp, target);
5470 if (temp != target)
5471 emit_move_insn (target, temp);
5475 return NULL_RTX;
5478 /* Return true if field F of structure TYPE is a flexible array. */
5480 static bool
5481 flexible_array_member_p (const_tree f, const_tree type)
5483 const_tree tf;
5485 tf = TREE_TYPE (f);
5486 return (DECL_CHAIN (f) == NULL
5487 && TREE_CODE (tf) == ARRAY_TYPE
5488 && TYPE_DOMAIN (tf)
5489 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5490 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5491 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5492 && int_size_in_bytes (type) >= 0);
5495 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5496 must have in order for it to completely initialize a value of type TYPE.
5497 Return -1 if the number isn't known.
5499 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5501 static HOST_WIDE_INT
5502 count_type_elements (const_tree type, bool for_ctor_p)
5504 switch (TREE_CODE (type))
5506 case ARRAY_TYPE:
5508 tree nelts;
5510 nelts = array_type_nelts (type);
5511 if (nelts && tree_fits_uhwi_p (nelts))
5513 unsigned HOST_WIDE_INT n;
5515 n = tree_to_uhwi (nelts) + 1;
5516 if (n == 0 || for_ctor_p)
5517 return n;
5518 else
5519 return n * count_type_elements (TREE_TYPE (type), false);
5521 return for_ctor_p ? -1 : 1;
5524 case RECORD_TYPE:
5526 unsigned HOST_WIDE_INT n;
5527 tree f;
5529 n = 0;
5530 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5531 if (TREE_CODE (f) == FIELD_DECL)
5533 if (!for_ctor_p)
5534 n += count_type_elements (TREE_TYPE (f), false);
5535 else if (!flexible_array_member_p (f, type))
5536 /* Don't count flexible arrays, which are not supposed
5537 to be initialized. */
5538 n += 1;
5541 return n;
5544 case UNION_TYPE:
5545 case QUAL_UNION_TYPE:
5547 tree f;
5548 HOST_WIDE_INT n, m;
5550 gcc_assert (!for_ctor_p);
5551 /* Estimate the number of scalars in each field and pick the
5552 maximum. Other estimates would do instead; the idea is simply
5553 to make sure that the estimate is not sensitive to the ordering
5554 of the fields. */
5555 n = 1;
5556 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5557 if (TREE_CODE (f) == FIELD_DECL)
5559 m = count_type_elements (TREE_TYPE (f), false);
5560 /* If the field doesn't span the whole union, add an extra
5561 scalar for the rest. */
5562 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5563 TYPE_SIZE (type)) != 1)
5564 m++;
5565 if (n < m)
5566 n = m;
5568 return n;
5571 case COMPLEX_TYPE:
5572 return 2;
5574 case VECTOR_TYPE:
5575 return TYPE_VECTOR_SUBPARTS (type);
5577 case INTEGER_TYPE:
5578 case REAL_TYPE:
5579 case FIXED_POINT_TYPE:
5580 case ENUMERAL_TYPE:
5581 case BOOLEAN_TYPE:
5582 case POINTER_TYPE:
5583 case OFFSET_TYPE:
5584 case REFERENCE_TYPE:
5585 case NULLPTR_TYPE:
5586 return 1;
5588 case ERROR_MARK:
5589 return 0;
5591 case VOID_TYPE:
5592 case METHOD_TYPE:
5593 case FUNCTION_TYPE:
5594 case LANG_TYPE:
5595 default:
5596 gcc_unreachable ();
5600 /* Helper for categorize_ctor_elements. Identical interface. */
5602 static bool
5603 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5604 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5606 unsigned HOST_WIDE_INT idx;
5607 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5608 tree value, purpose, elt_type;
5610 /* Whether CTOR is a valid constant initializer, in accordance with what
5611 initializer_constant_valid_p does. If inferred from the constructor
5612 elements, true until proven otherwise. */
5613 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5614 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5616 nz_elts = 0;
5617 init_elts = 0;
5618 num_fields = 0;
5619 elt_type = NULL_TREE;
5621 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5623 HOST_WIDE_INT mult = 1;
5625 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5627 tree lo_index = TREE_OPERAND (purpose, 0);
5628 tree hi_index = TREE_OPERAND (purpose, 1);
5630 if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
5631 mult = (tree_to_uhwi (hi_index)
5632 - tree_to_uhwi (lo_index) + 1);
5634 num_fields += mult;
5635 elt_type = TREE_TYPE (value);
5637 switch (TREE_CODE (value))
5639 case CONSTRUCTOR:
5641 HOST_WIDE_INT nz = 0, ic = 0;
5643 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5644 p_complete);
5646 nz_elts += mult * nz;
5647 init_elts += mult * ic;
5649 if (const_from_elts_p && const_p)
5650 const_p = const_elt_p;
5652 break;
5654 case INTEGER_CST:
5655 case REAL_CST:
5656 case FIXED_CST:
5657 if (!initializer_zerop (value))
5658 nz_elts += mult;
5659 init_elts += mult;
5660 break;
5662 case STRING_CST:
5663 nz_elts += mult * TREE_STRING_LENGTH (value);
5664 init_elts += mult * TREE_STRING_LENGTH (value);
5665 break;
5667 case COMPLEX_CST:
5668 if (!initializer_zerop (TREE_REALPART (value)))
5669 nz_elts += mult;
5670 if (!initializer_zerop (TREE_IMAGPART (value)))
5671 nz_elts += mult;
5672 init_elts += mult;
5673 break;
5675 case VECTOR_CST:
5677 unsigned i;
5678 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5680 tree v = VECTOR_CST_ELT (value, i);
5681 if (!initializer_zerop (v))
5682 nz_elts += mult;
5683 init_elts += mult;
5686 break;
5688 default:
5690 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5691 nz_elts += mult * tc;
5692 init_elts += mult * tc;
5694 if (const_from_elts_p && const_p)
5695 const_p = initializer_constant_valid_p (value, elt_type)
5696 != NULL_TREE;
5698 break;
5702 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5703 num_fields, elt_type))
5704 *p_complete = false;
5706 *p_nz_elts += nz_elts;
5707 *p_init_elts += init_elts;
5709 return const_p;
5712 /* Examine CTOR to discover:
5713 * how many scalar fields are set to nonzero values,
5714 and place it in *P_NZ_ELTS;
5715 * how many scalar fields in total are in CTOR,
5716 and place it in *P_ELT_COUNT.
5717 * whether the constructor is complete -- in the sense that every
5718 meaningful byte is explicitly given a value --
5719 and place it in *P_COMPLETE.
5721 Return whether or not CTOR is a valid static constant initializer, the same
5722 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5724 bool
5725 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5726 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5728 *p_nz_elts = 0;
5729 *p_init_elts = 0;
5730 *p_complete = true;
5732 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5735 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5736 of which had type LAST_TYPE. Each element was itself a complete
5737 initializer, in the sense that every meaningful byte was explicitly
5738 given a value. Return true if the same is true for the constructor
5739 as a whole. */
5741 bool
5742 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5743 const_tree last_type)
5745 if (TREE_CODE (type) == UNION_TYPE
5746 || TREE_CODE (type) == QUAL_UNION_TYPE)
5748 if (num_elts == 0)
5749 return false;
5751 gcc_assert (num_elts == 1 && last_type);
5753 /* ??? We could look at each element of the union, and find the
5754 largest element. Which would avoid comparing the size of the
5755 initialized element against any tail padding in the union.
5756 Doesn't seem worth the effort... */
5757 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5760 return count_type_elements (type, true) == num_elts;
5763 /* Return 1 if EXP contains mostly (3/4) zeros. */
5765 static int
5766 mostly_zeros_p (const_tree exp)
5768 if (TREE_CODE (exp) == CONSTRUCTOR)
5770 HOST_WIDE_INT nz_elts, init_elts;
5771 bool complete_p;
5773 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5774 return !complete_p || nz_elts < init_elts / 4;
5777 return initializer_zerop (exp);
5780 /* Return 1 if EXP contains all zeros. */
5782 static int
5783 all_zeros_p (const_tree exp)
5785 if (TREE_CODE (exp) == CONSTRUCTOR)
5787 HOST_WIDE_INT nz_elts, init_elts;
5788 bool complete_p;
5790 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5791 return nz_elts == 0;
5794 return initializer_zerop (exp);
5797 /* Helper function for store_constructor.
5798 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5799 CLEARED is as for store_constructor.
5800 ALIAS_SET is the alias set to use for any stores.
5802 This provides a recursive shortcut back to store_constructor when it isn't
5803 necessary to go through store_field. This is so that we can pass through
5804 the cleared field to let store_constructor know that we may not have to
5805 clear a substructure if the outer structure has already been cleared. */
5807 static void
5808 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5809 HOST_WIDE_INT bitpos, enum machine_mode mode,
5810 tree exp, int cleared, alias_set_type alias_set)
5812 if (TREE_CODE (exp) == CONSTRUCTOR
5813 /* We can only call store_constructor recursively if the size and
5814 bit position are on a byte boundary. */
5815 && bitpos % BITS_PER_UNIT == 0
5816 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5817 /* If we have a nonzero bitpos for a register target, then we just
5818 let store_field do the bitfield handling. This is unlikely to
5819 generate unnecessary clear instructions anyways. */
5820 && (bitpos == 0 || MEM_P (target)))
5822 if (MEM_P (target))
5823 target
5824 = adjust_address (target,
5825 GET_MODE (target) == BLKmode
5826 || 0 != (bitpos
5827 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5828 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5831 /* Update the alias set, if required. */
5832 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5833 && MEM_ALIAS_SET (target) != 0)
5835 target = copy_rtx (target);
5836 set_mem_alias_set (target, alias_set);
5839 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5841 else
5842 store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false);
5846 /* Returns the number of FIELD_DECLs in TYPE. */
5848 static int
5849 fields_length (const_tree type)
5851 tree t = TYPE_FIELDS (type);
5852 int count = 0;
5854 for (; t; t = DECL_CHAIN (t))
5855 if (TREE_CODE (t) == FIELD_DECL)
5856 ++count;
5858 return count;
5862 /* Store the value of constructor EXP into the rtx TARGET.
5863 TARGET is either a REG or a MEM; we know it cannot conflict, since
5864 safe_from_p has been called.
5865 CLEARED is true if TARGET is known to have been zero'd.
5866 SIZE is the number of bytes of TARGET we are allowed to modify: this
5867 may not be the same as the size of EXP if we are assigning to a field
5868 which has been packed to exclude padding bits. */
5870 static void
5871 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5873 tree type = TREE_TYPE (exp);
5874 #ifdef WORD_REGISTER_OPERATIONS
5875 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5876 #endif
5878 switch (TREE_CODE (type))
5880 case RECORD_TYPE:
5881 case UNION_TYPE:
5882 case QUAL_UNION_TYPE:
5884 unsigned HOST_WIDE_INT idx;
5885 tree field, value;
5887 /* If size is zero or the target is already cleared, do nothing. */
5888 if (size == 0 || cleared)
5889 cleared = 1;
5890 /* We either clear the aggregate or indicate the value is dead. */
5891 else if ((TREE_CODE (type) == UNION_TYPE
5892 || TREE_CODE (type) == QUAL_UNION_TYPE)
5893 && ! CONSTRUCTOR_ELTS (exp))
5894 /* If the constructor is empty, clear the union. */
5896 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5897 cleared = 1;
5900 /* If we are building a static constructor into a register,
5901 set the initial value as zero so we can fold the value into
5902 a constant. But if more than one register is involved,
5903 this probably loses. */
5904 else if (REG_P (target) && TREE_STATIC (exp)
5905 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5907 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5908 cleared = 1;
5911 /* If the constructor has fewer fields than the structure or
5912 if we are initializing the structure to mostly zeros, clear
5913 the whole structure first. Don't do this if TARGET is a
5914 register whose mode size isn't equal to SIZE since
5915 clear_storage can't handle this case. */
5916 else if (size > 0
5917 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
5918 != fields_length (type))
5919 || mostly_zeros_p (exp))
5920 && (!REG_P (target)
5921 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5922 == size)))
5924 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5925 cleared = 1;
5928 if (REG_P (target) && !cleared)
5929 emit_clobber (target);
5931 /* Store each element of the constructor into the
5932 corresponding field of TARGET. */
5933 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5935 enum machine_mode mode;
5936 HOST_WIDE_INT bitsize;
5937 HOST_WIDE_INT bitpos = 0;
5938 tree offset;
5939 rtx to_rtx = target;
5941 /* Just ignore missing fields. We cleared the whole
5942 structure, above, if any fields are missing. */
5943 if (field == 0)
5944 continue;
5946 if (cleared && initializer_zerop (value))
5947 continue;
5949 if (tree_fits_uhwi_p (DECL_SIZE (field)))
5950 bitsize = tree_to_uhwi (DECL_SIZE (field));
5951 else
5952 bitsize = -1;
5954 mode = DECL_MODE (field);
5955 if (DECL_BIT_FIELD (field))
5956 mode = VOIDmode;
5958 offset = DECL_FIELD_OFFSET (field);
5959 if (tree_fits_shwi_p (offset)
5960 && tree_fits_shwi_p (bit_position (field)))
5962 bitpos = int_bit_position (field);
5963 offset = 0;
5965 else
5966 bitpos = tree_to_shwi (DECL_FIELD_BIT_OFFSET (field));
5968 if (offset)
5970 enum machine_mode address_mode;
5971 rtx offset_rtx;
5973 offset
5974 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5975 make_tree (TREE_TYPE (exp),
5976 target));
5978 offset_rtx = expand_normal (offset);
5979 gcc_assert (MEM_P (to_rtx));
5981 address_mode = get_address_mode (to_rtx);
5982 if (GET_MODE (offset_rtx) != address_mode)
5983 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5985 to_rtx = offset_address (to_rtx, offset_rtx,
5986 highest_pow2_factor (offset));
5989 #ifdef WORD_REGISTER_OPERATIONS
5990 /* If this initializes a field that is smaller than a
5991 word, at the start of a word, try to widen it to a full
5992 word. This special case allows us to output C++ member
5993 function initializations in a form that the optimizers
5994 can understand. */
5995 if (REG_P (target)
5996 && bitsize < BITS_PER_WORD
5997 && bitpos % BITS_PER_WORD == 0
5998 && GET_MODE_CLASS (mode) == MODE_INT
5999 && TREE_CODE (value) == INTEGER_CST
6000 && exp_size >= 0
6001 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6003 tree type = TREE_TYPE (value);
6005 if (TYPE_PRECISION (type) < BITS_PER_WORD)
6007 type = lang_hooks.types.type_for_mode
6008 (word_mode, TYPE_UNSIGNED (type));
6009 value = fold_convert (type, value);
6012 if (BYTES_BIG_ENDIAN)
6013 value
6014 = fold_build2 (LSHIFT_EXPR, type, value,
6015 build_int_cst (type,
6016 BITS_PER_WORD - bitsize));
6017 bitsize = BITS_PER_WORD;
6018 mode = word_mode;
6020 #endif
6022 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6023 && DECL_NONADDRESSABLE_P (field))
6025 to_rtx = copy_rtx (to_rtx);
6026 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6029 store_constructor_field (to_rtx, bitsize, bitpos, mode,
6030 value, cleared,
6031 get_alias_set (TREE_TYPE (field)));
6033 break;
6035 case ARRAY_TYPE:
6037 tree value, index;
6038 unsigned HOST_WIDE_INT i;
6039 int need_to_clear;
6040 tree domain;
6041 tree elttype = TREE_TYPE (type);
6042 int const_bounds_p;
6043 HOST_WIDE_INT minelt = 0;
6044 HOST_WIDE_INT maxelt = 0;
6046 domain = TYPE_DOMAIN (type);
6047 const_bounds_p = (TYPE_MIN_VALUE (domain)
6048 && TYPE_MAX_VALUE (domain)
6049 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6050 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6052 /* If we have constant bounds for the range of the type, get them. */
6053 if (const_bounds_p)
6055 minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6056 maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6059 /* If the constructor has fewer elements than the array, clear
6060 the whole array first. Similarly if this is static
6061 constructor of a non-BLKmode object. */
6062 if (cleared)
6063 need_to_clear = 0;
6064 else if (REG_P (target) && TREE_STATIC (exp))
6065 need_to_clear = 1;
6066 else
6068 unsigned HOST_WIDE_INT idx;
6069 tree index, value;
6070 HOST_WIDE_INT count = 0, zero_count = 0;
6071 need_to_clear = ! const_bounds_p;
6073 /* This loop is a more accurate version of the loop in
6074 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6075 is also needed to check for missing elements. */
6076 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6078 HOST_WIDE_INT this_node_count;
6080 if (need_to_clear)
6081 break;
6083 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6085 tree lo_index = TREE_OPERAND (index, 0);
6086 tree hi_index = TREE_OPERAND (index, 1);
6088 if (! tree_fits_uhwi_p (lo_index)
6089 || ! tree_fits_uhwi_p (hi_index))
6091 need_to_clear = 1;
6092 break;
6095 this_node_count = (tree_to_uhwi (hi_index)
6096 - tree_to_uhwi (lo_index) + 1);
6098 else
6099 this_node_count = 1;
6101 count += this_node_count;
6102 if (mostly_zeros_p (value))
6103 zero_count += this_node_count;
6106 /* Clear the entire array first if there are any missing
6107 elements, or if the incidence of zero elements is >=
6108 75%. */
6109 if (! need_to_clear
6110 && (count < maxelt - minelt + 1
6111 || 4 * zero_count >= 3 * count))
6112 need_to_clear = 1;
6115 if (need_to_clear && size > 0)
6117 if (REG_P (target))
6118 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6119 else
6120 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6121 cleared = 1;
6124 if (!cleared && REG_P (target))
6125 /* Inform later passes that the old value is dead. */
6126 emit_clobber (target);
6128 /* Store each element of the constructor into the
6129 corresponding element of TARGET, determined by counting the
6130 elements. */
6131 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6133 enum machine_mode mode;
6134 HOST_WIDE_INT bitsize;
6135 HOST_WIDE_INT bitpos;
6136 rtx xtarget = target;
6138 if (cleared && initializer_zerop (value))
6139 continue;
6141 mode = TYPE_MODE (elttype);
6142 if (mode == BLKmode)
6143 bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6144 ? tree_to_uhwi (TYPE_SIZE (elttype))
6145 : -1);
6146 else
6147 bitsize = GET_MODE_BITSIZE (mode);
6149 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6151 tree lo_index = TREE_OPERAND (index, 0);
6152 tree hi_index = TREE_OPERAND (index, 1);
6153 rtx index_r, pos_rtx;
6154 HOST_WIDE_INT lo, hi, count;
6155 tree position;
6157 /* If the range is constant and "small", unroll the loop. */
6158 if (const_bounds_p
6159 && tree_fits_shwi_p (lo_index)
6160 && tree_fits_shwi_p (hi_index)
6161 && (lo = tree_to_shwi (lo_index),
6162 hi = tree_to_shwi (hi_index),
6163 count = hi - lo + 1,
6164 (!MEM_P (target)
6165 || count <= 2
6166 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6167 && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6168 <= 40 * 8)))))
6170 lo -= minelt; hi -= minelt;
6171 for (; lo <= hi; lo++)
6173 bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6175 if (MEM_P (target)
6176 && !MEM_KEEP_ALIAS_SET_P (target)
6177 && TREE_CODE (type) == ARRAY_TYPE
6178 && TYPE_NONALIASED_COMPONENT (type))
6180 target = copy_rtx (target);
6181 MEM_KEEP_ALIAS_SET_P (target) = 1;
6184 store_constructor_field
6185 (target, bitsize, bitpos, mode, value, cleared,
6186 get_alias_set (elttype));
6189 else
6191 rtx loop_start = gen_label_rtx ();
6192 rtx loop_end = gen_label_rtx ();
6193 tree exit_cond;
6195 expand_normal (hi_index);
6197 index = build_decl (EXPR_LOCATION (exp),
6198 VAR_DECL, NULL_TREE, domain);
6199 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6200 SET_DECL_RTL (index, index_r);
6201 store_expr (lo_index, index_r, 0, false);
6203 /* Build the head of the loop. */
6204 do_pending_stack_adjust ();
6205 emit_label (loop_start);
6207 /* Assign value to element index. */
6208 position =
6209 fold_convert (ssizetype,
6210 fold_build2 (MINUS_EXPR,
6211 TREE_TYPE (index),
6212 index,
6213 TYPE_MIN_VALUE (domain)));
6215 position =
6216 size_binop (MULT_EXPR, position,
6217 fold_convert (ssizetype,
6218 TYPE_SIZE_UNIT (elttype)));
6220 pos_rtx = expand_normal (position);
6221 xtarget = offset_address (target, pos_rtx,
6222 highest_pow2_factor (position));
6223 xtarget = adjust_address (xtarget, mode, 0);
6224 if (TREE_CODE (value) == CONSTRUCTOR)
6225 store_constructor (value, xtarget, cleared,
6226 bitsize / BITS_PER_UNIT);
6227 else
6228 store_expr (value, xtarget, 0, false);
6230 /* Generate a conditional jump to exit the loop. */
6231 exit_cond = build2 (LT_EXPR, integer_type_node,
6232 index, hi_index);
6233 jumpif (exit_cond, loop_end, -1);
6235 /* Update the loop counter, and jump to the head of
6236 the loop. */
6237 expand_assignment (index,
6238 build2 (PLUS_EXPR, TREE_TYPE (index),
6239 index, integer_one_node),
6240 false);
6242 emit_jump (loop_start);
6244 /* Build the end of the loop. */
6245 emit_label (loop_end);
6248 else if ((index != 0 && ! tree_fits_shwi_p (index))
6249 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6251 tree position;
6253 if (index == 0)
6254 index = ssize_int (1);
6256 if (minelt)
6257 index = fold_convert (ssizetype,
6258 fold_build2 (MINUS_EXPR,
6259 TREE_TYPE (index),
6260 index,
6261 TYPE_MIN_VALUE (domain)));
6263 position =
6264 size_binop (MULT_EXPR, index,
6265 fold_convert (ssizetype,
6266 TYPE_SIZE_UNIT (elttype)));
6267 xtarget = offset_address (target,
6268 expand_normal (position),
6269 highest_pow2_factor (position));
6270 xtarget = adjust_address (xtarget, mode, 0);
6271 store_expr (value, xtarget, 0, false);
6273 else
6275 if (index != 0)
6276 bitpos = ((tree_to_shwi (index) - minelt)
6277 * tree_to_uhwi (TYPE_SIZE (elttype)));
6278 else
6279 bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6281 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6282 && TREE_CODE (type) == ARRAY_TYPE
6283 && TYPE_NONALIASED_COMPONENT (type))
6285 target = copy_rtx (target);
6286 MEM_KEEP_ALIAS_SET_P (target) = 1;
6288 store_constructor_field (target, bitsize, bitpos, mode, value,
6289 cleared, get_alias_set (elttype));
6292 break;
6295 case VECTOR_TYPE:
6297 unsigned HOST_WIDE_INT idx;
6298 constructor_elt *ce;
6299 int i;
6300 int need_to_clear;
6301 int icode = CODE_FOR_nothing;
6302 tree elttype = TREE_TYPE (type);
6303 int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6304 enum machine_mode eltmode = TYPE_MODE (elttype);
6305 HOST_WIDE_INT bitsize;
6306 HOST_WIDE_INT bitpos;
6307 rtvec vector = NULL;
6308 unsigned n_elts;
6309 alias_set_type alias;
6311 gcc_assert (eltmode != BLKmode);
6313 n_elts = TYPE_VECTOR_SUBPARTS (type);
6314 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6316 enum machine_mode mode = GET_MODE (target);
6318 icode = (int) optab_handler (vec_init_optab, mode);
6319 /* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
6320 if (icode != CODE_FOR_nothing)
6322 tree value;
6324 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6325 if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
6327 icode = CODE_FOR_nothing;
6328 break;
6331 if (icode != CODE_FOR_nothing)
6333 unsigned int i;
6335 vector = rtvec_alloc (n_elts);
6336 for (i = 0; i < n_elts; i++)
6337 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6341 /* If the constructor has fewer elements than the vector,
6342 clear the whole array first. Similarly if this is static
6343 constructor of a non-BLKmode object. */
6344 if (cleared)
6345 need_to_clear = 0;
6346 else if (REG_P (target) && TREE_STATIC (exp))
6347 need_to_clear = 1;
6348 else
6350 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6351 tree value;
6353 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6355 int n_elts_here = tree_to_uhwi
6356 (int_const_binop (TRUNC_DIV_EXPR,
6357 TYPE_SIZE (TREE_TYPE (value)),
6358 TYPE_SIZE (elttype)));
6360 count += n_elts_here;
6361 if (mostly_zeros_p (value))
6362 zero_count += n_elts_here;
6365 /* Clear the entire vector first if there are any missing elements,
6366 or if the incidence of zero elements is >= 75%. */
6367 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6370 if (need_to_clear && size > 0 && !vector)
6372 if (REG_P (target))
6373 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6374 else
6375 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6376 cleared = 1;
6379 /* Inform later passes that the old value is dead. */
6380 if (!cleared && !vector && REG_P (target))
6381 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6383 if (MEM_P (target))
6384 alias = MEM_ALIAS_SET (target);
6385 else
6386 alias = get_alias_set (elttype);
6388 /* Store each element of the constructor into the corresponding
6389 element of TARGET, determined by counting the elements. */
6390 for (idx = 0, i = 0;
6391 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6392 idx++, i += bitsize / elt_size)
6394 HOST_WIDE_INT eltpos;
6395 tree value = ce->value;
6397 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6398 if (cleared && initializer_zerop (value))
6399 continue;
6401 if (ce->index)
6402 eltpos = tree_to_uhwi (ce->index);
6403 else
6404 eltpos = i;
6406 if (vector)
6408 /* vec_init<mode> should not be used if there are VECTOR_TYPE
6409 elements. */
6410 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6411 RTVEC_ELT (vector, eltpos)
6412 = expand_normal (value);
6414 else
6416 enum machine_mode value_mode =
6417 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6418 ? TYPE_MODE (TREE_TYPE (value))
6419 : eltmode;
6420 bitpos = eltpos * elt_size;
6421 store_constructor_field (target, bitsize, bitpos, value_mode,
6422 value, cleared, alias);
6426 if (vector)
6427 emit_insn (GEN_FCN (icode)
6428 (target,
6429 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6430 break;
6433 default:
6434 gcc_unreachable ();
6438 /* Store the value of EXP (an expression tree)
6439 into a subfield of TARGET which has mode MODE and occupies
6440 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6441 If MODE is VOIDmode, it means that we are storing into a bit-field.
6443 BITREGION_START is bitpos of the first bitfield in this region.
6444 BITREGION_END is the bitpos of the ending bitfield in this region.
6445 These two fields are 0, if the C++ memory model does not apply,
6446 or we are not interested in keeping track of bitfield regions.
6448 Always return const0_rtx unless we have something particular to
6449 return.
6451 ALIAS_SET is the alias set for the destination. This value will
6452 (in general) be different from that for TARGET, since TARGET is a
6453 reference to the containing structure.
6455 If NONTEMPORAL is true, try generating a nontemporal store. */
6457 static rtx
6458 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6459 unsigned HOST_WIDE_INT bitregion_start,
6460 unsigned HOST_WIDE_INT bitregion_end,
6461 enum machine_mode mode, tree exp,
6462 alias_set_type alias_set, bool nontemporal)
6464 if (TREE_CODE (exp) == ERROR_MARK)
6465 return const0_rtx;
6467 /* If we have nothing to store, do nothing unless the expression has
6468 side-effects. */
6469 if (bitsize == 0)
6470 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6472 if (GET_CODE (target) == CONCAT)
6474 /* We're storing into a struct containing a single __complex. */
6476 gcc_assert (!bitpos);
6477 return store_expr (exp, target, 0, nontemporal);
6480 /* If the structure is in a register or if the component
6481 is a bit field, we cannot use addressing to access it.
6482 Use bit-field techniques or SUBREG to store in it. */
6484 if (mode == VOIDmode
6485 || (mode != BLKmode && ! direct_store[(int) mode]
6486 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6487 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6488 || REG_P (target)
6489 || GET_CODE (target) == SUBREG
6490 /* If the field isn't aligned enough to store as an ordinary memref,
6491 store it as a bit field. */
6492 || (mode != BLKmode
6493 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6494 || bitpos % GET_MODE_ALIGNMENT (mode))
6495 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6496 || (bitpos % BITS_PER_UNIT != 0)))
6497 || (bitsize >= 0 && mode != BLKmode
6498 && GET_MODE_BITSIZE (mode) > bitsize)
6499 /* If the RHS and field are a constant size and the size of the
6500 RHS isn't the same size as the bitfield, we must use bitfield
6501 operations. */
6502 || (bitsize >= 0
6503 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6504 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6505 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6506 decl we must use bitfield operations. */
6507 || (bitsize >= 0
6508 && TREE_CODE (exp) == MEM_REF
6509 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6510 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6511 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6512 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6514 rtx temp;
6515 gimple nop_def;
6517 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6518 implies a mask operation. If the precision is the same size as
6519 the field we're storing into, that mask is redundant. This is
6520 particularly common with bit field assignments generated by the
6521 C front end. */
6522 nop_def = get_def_for_expr (exp, NOP_EXPR);
6523 if (nop_def)
6525 tree type = TREE_TYPE (exp);
6526 if (INTEGRAL_TYPE_P (type)
6527 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6528 && bitsize == TYPE_PRECISION (type))
6530 tree op = gimple_assign_rhs1 (nop_def);
6531 type = TREE_TYPE (op);
6532 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6533 exp = op;
6537 temp = expand_normal (exp);
6539 /* If BITSIZE is narrower than the size of the type of EXP
6540 we will be narrowing TEMP. Normally, what's wanted are the
6541 low-order bits. However, if EXP's type is a record and this is
6542 big-endian machine, we want the upper BITSIZE bits. */
6543 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6544 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6545 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6546 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6547 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6548 NULL_RTX, 1);
6550 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6551 if (mode != VOIDmode && mode != BLKmode
6552 && mode != TYPE_MODE (TREE_TYPE (exp)))
6553 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6555 /* If the modes of TEMP and TARGET are both BLKmode, both
6556 must be in memory and BITPOS must be aligned on a byte
6557 boundary. If so, we simply do a block copy. Likewise
6558 for a BLKmode-like TARGET. */
6559 if (GET_MODE (temp) == BLKmode
6560 && (GET_MODE (target) == BLKmode
6561 || (MEM_P (target)
6562 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6563 && (bitpos % BITS_PER_UNIT) == 0
6564 && (bitsize % BITS_PER_UNIT) == 0)))
6566 gcc_assert (MEM_P (target) && MEM_P (temp)
6567 && (bitpos % BITS_PER_UNIT) == 0);
6569 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6570 emit_block_move (target, temp,
6571 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6572 / BITS_PER_UNIT),
6573 BLOCK_OP_NORMAL);
6575 return const0_rtx;
6578 /* Handle calls that return values in multiple non-contiguous locations.
6579 The Irix 6 ABI has examples of this. */
6580 if (GET_CODE (temp) == PARALLEL)
6582 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6583 rtx temp_target;
6584 if (mode == BLKmode || mode == VOIDmode)
6585 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6586 temp_target = gen_reg_rtx (mode);
6587 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6588 temp = temp_target;
6590 else if (mode == BLKmode)
6592 /* Handle calls that return BLKmode values in registers. */
6593 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6595 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6596 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6597 temp = temp_target;
6599 else
6601 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6602 rtx temp_target;
6603 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6604 temp_target = gen_reg_rtx (mode);
6605 temp_target
6606 = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
6607 temp_target, mode, mode);
6608 temp = temp_target;
6612 /* Store the value in the bitfield. */
6613 store_bit_field (target, bitsize, bitpos,
6614 bitregion_start, bitregion_end,
6615 mode, temp);
6617 return const0_rtx;
6619 else
6621 /* Now build a reference to just the desired component. */
6622 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6624 if (to_rtx == target)
6625 to_rtx = copy_rtx (to_rtx);
6627 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6628 set_mem_alias_set (to_rtx, alias_set);
6630 return store_expr (exp, to_rtx, 0, nontemporal);
6634 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6635 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6636 codes and find the ultimate containing object, which we return.
6638 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6639 bit position, and *PUNSIGNEDP to the signedness of the field.
6640 If the position of the field is variable, we store a tree
6641 giving the variable offset (in units) in *POFFSET.
6642 This offset is in addition to the bit position.
6643 If the position is not variable, we store 0 in *POFFSET.
6645 If any of the extraction expressions is volatile,
6646 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6648 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6649 Otherwise, it is a mode that can be used to access the field.
6651 If the field describes a variable-sized object, *PMODE is set to
6652 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6653 this case, but the address of the object can be found.
6655 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6656 look through nodes that serve as markers of a greater alignment than
6657 the one that can be deduced from the expression. These nodes make it
6658 possible for front-ends to prevent temporaries from being created by
6659 the middle-end on alignment considerations. For that purpose, the
6660 normal operating mode at high-level is to always pass FALSE so that
6661 the ultimate containing object is really returned; moreover, the
6662 associated predicate handled_component_p will always return TRUE
6663 on these nodes, thus indicating that they are essentially handled
6664 by get_inner_reference. TRUE should only be passed when the caller
6665 is scanning the expression in order to build another representation
6666 and specifically knows how to handle these nodes; as such, this is
6667 the normal operating mode in the RTL expanders. */
6669 tree
6670 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6671 HOST_WIDE_INT *pbitpos, tree *poffset,
6672 enum machine_mode *pmode, int *punsignedp,
6673 int *pvolatilep, bool keep_aligning)
6675 tree size_tree = 0;
6676 enum machine_mode mode = VOIDmode;
6677 bool blkmode_bitfield = false;
6678 tree offset = size_zero_node;
6679 offset_int bit_offset = 0;
6681 /* First get the mode, signedness, and size. We do this from just the
6682 outermost expression. */
6683 *pbitsize = -1;
6684 if (TREE_CODE (exp) == COMPONENT_REF)
6686 tree field = TREE_OPERAND (exp, 1);
6687 size_tree = DECL_SIZE (field);
6688 if (flag_strict_volatile_bitfields > 0
6689 && TREE_THIS_VOLATILE (exp)
6690 && DECL_BIT_FIELD_TYPE (field)
6691 && DECL_MODE (field) != BLKmode)
6692 /* Volatile bitfields should be accessed in the mode of the
6693 field's type, not the mode computed based on the bit
6694 size. */
6695 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6696 else if (!DECL_BIT_FIELD (field))
6697 mode = DECL_MODE (field);
6698 else if (DECL_MODE (field) == BLKmode)
6699 blkmode_bitfield = true;
6701 *punsignedp = DECL_UNSIGNED (field);
6703 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6705 size_tree = TREE_OPERAND (exp, 1);
6706 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6707 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6709 /* For vector types, with the correct size of access, use the mode of
6710 inner type. */
6711 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6712 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6713 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6714 mode = TYPE_MODE (TREE_TYPE (exp));
6716 else
6718 mode = TYPE_MODE (TREE_TYPE (exp));
6719 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6721 if (mode == BLKmode)
6722 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6723 else
6724 *pbitsize = GET_MODE_BITSIZE (mode);
6727 if (size_tree != 0)
6729 if (! tree_fits_uhwi_p (size_tree))
6730 mode = BLKmode, *pbitsize = -1;
6731 else
6732 *pbitsize = tree_to_uhwi (size_tree);
6735 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6736 and find the ultimate containing object. */
6737 while (1)
6739 switch (TREE_CODE (exp))
6741 case BIT_FIELD_REF:
6742 bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
6743 break;
6745 case COMPONENT_REF:
6747 tree field = TREE_OPERAND (exp, 1);
6748 tree this_offset = component_ref_field_offset (exp);
6750 /* If this field hasn't been filled in yet, don't go past it.
6751 This should only happen when folding expressions made during
6752 type construction. */
6753 if (this_offset == 0)
6754 break;
6756 offset = size_binop (PLUS_EXPR, offset, this_offset);
6757 bit_offset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
6759 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6761 break;
6763 case ARRAY_REF:
6764 case ARRAY_RANGE_REF:
6766 tree index = TREE_OPERAND (exp, 1);
6767 tree low_bound = array_ref_low_bound (exp);
6768 tree unit_size = array_ref_element_size (exp);
6770 /* We assume all arrays have sizes that are a multiple of a byte.
6771 First subtract the lower bound, if any, in the type of the
6772 index, then convert to sizetype and multiply by the size of
6773 the array element. */
6774 if (! integer_zerop (low_bound))
6775 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6776 index, low_bound);
6778 offset = size_binop (PLUS_EXPR, offset,
6779 size_binop (MULT_EXPR,
6780 fold_convert (sizetype, index),
6781 unit_size));
6783 break;
6785 case REALPART_EXPR:
6786 break;
6788 case IMAGPART_EXPR:
6789 bit_offset += *pbitsize;
6790 break;
6792 case VIEW_CONVERT_EXPR:
6793 if (keep_aligning && STRICT_ALIGNMENT
6794 && (TYPE_ALIGN (TREE_TYPE (exp))
6795 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6796 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6797 < BIGGEST_ALIGNMENT)
6798 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6799 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6800 goto done;
6801 break;
6803 case MEM_REF:
6804 /* Hand back the decl for MEM[&decl, off]. */
6805 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6807 tree off = TREE_OPERAND (exp, 1);
6808 if (!integer_zerop (off))
6810 offset_int boff, coff = mem_ref_offset (exp);
6811 boff = wi::lshift (coff, LOG2_BITS_PER_UNIT);
6812 bit_offset += boff;
6814 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6816 goto done;
6818 default:
6819 goto done;
6822 /* If any reference in the chain is volatile, the effect is volatile. */
6823 if (TREE_THIS_VOLATILE (exp))
6824 *pvolatilep = 1;
6826 exp = TREE_OPERAND (exp, 0);
6828 done:
6830 /* If OFFSET is constant, see if we can return the whole thing as a
6831 constant bit position. Make sure to handle overflow during
6832 this conversion. */
6833 if (TREE_CODE (offset) == INTEGER_CST)
6835 offset_int tem = wi::sext (wi::to_offset (offset),
6836 TYPE_PRECISION (sizetype));
6837 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
6838 tem += bit_offset;
6839 if (wi::fits_shwi_p (tem))
6841 *pbitpos = tem.to_shwi ();
6842 *poffset = offset = NULL_TREE;
6846 /* Otherwise, split it up. */
6847 if (offset)
6849 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6850 if (wi::neg_p (bit_offset))
6852 offset_int mask = wi::mask <offset_int> (LOG2_BITS_PER_UNIT, false);
6853 offset_int tem = bit_offset.and_not (mask);
6854 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6855 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6856 bit_offset -= tem;
6857 tem = wi::arshift (tem, LOG2_BITS_PER_UNIT);
6858 offset = size_binop (PLUS_EXPR, offset,
6859 wide_int_to_tree (sizetype, tem));
6862 *pbitpos = bit_offset.to_shwi ();
6863 *poffset = offset;
6866 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6867 if (mode == VOIDmode
6868 && blkmode_bitfield
6869 && (*pbitpos % BITS_PER_UNIT) == 0
6870 && (*pbitsize % BITS_PER_UNIT) == 0)
6871 *pmode = BLKmode;
6872 else
6873 *pmode = mode;
6875 return exp;
6878 /* Return a tree of sizetype representing the size, in bytes, of the element
6879 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6881 tree
6882 array_ref_element_size (tree exp)
6884 tree aligned_size = TREE_OPERAND (exp, 3);
6885 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6886 location_t loc = EXPR_LOCATION (exp);
6888 /* If a size was specified in the ARRAY_REF, it's the size measured
6889 in alignment units of the element type. So multiply by that value. */
6890 if (aligned_size)
6892 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6893 sizetype from another type of the same width and signedness. */
6894 if (TREE_TYPE (aligned_size) != sizetype)
6895 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6896 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6897 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6900 /* Otherwise, take the size from that of the element type. Substitute
6901 any PLACEHOLDER_EXPR that we have. */
6902 else
6903 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6906 /* Return a tree representing the lower bound of the array mentioned in
6907 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6909 tree
6910 array_ref_low_bound (tree exp)
6912 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6914 /* If a lower bound is specified in EXP, use it. */
6915 if (TREE_OPERAND (exp, 2))
6916 return TREE_OPERAND (exp, 2);
6918 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6919 substituting for a PLACEHOLDER_EXPR as needed. */
6920 if (domain_type && TYPE_MIN_VALUE (domain_type))
6921 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6923 /* Otherwise, return a zero of the appropriate type. */
6924 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6927 /* Returns true if REF is an array reference to an array at the end of
6928 a structure. If this is the case, the array may be allocated larger
6929 than its upper bound implies. */
6931 bool
6932 array_at_struct_end_p (tree ref)
6934 if (TREE_CODE (ref) != ARRAY_REF
6935 && TREE_CODE (ref) != ARRAY_RANGE_REF)
6936 return false;
6938 while (handled_component_p (ref))
6940 /* If the reference chain contains a component reference to a
6941 non-union type and there follows another field the reference
6942 is not at the end of a structure. */
6943 if (TREE_CODE (ref) == COMPONENT_REF
6944 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
6946 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
6947 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
6948 nextf = DECL_CHAIN (nextf);
6949 if (nextf)
6950 return false;
6953 ref = TREE_OPERAND (ref, 0);
6956 /* If the reference is based on a declared entity, the size of the array
6957 is constrained by its given domain. */
6958 if (DECL_P (ref))
6959 return false;
6961 return true;
6964 /* Return a tree representing the upper bound of the array mentioned in
6965 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6967 tree
6968 array_ref_up_bound (tree exp)
6970 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6972 /* If there is a domain type and it has an upper bound, use it, substituting
6973 for a PLACEHOLDER_EXPR as needed. */
6974 if (domain_type && TYPE_MAX_VALUE (domain_type))
6975 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6977 /* Otherwise fail. */
6978 return NULL_TREE;
6981 /* Return a tree representing the offset, in bytes, of the field referenced
6982 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6984 tree
6985 component_ref_field_offset (tree exp)
6987 tree aligned_offset = TREE_OPERAND (exp, 2);
6988 tree field = TREE_OPERAND (exp, 1);
6989 location_t loc = EXPR_LOCATION (exp);
6991 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6992 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6993 value. */
6994 if (aligned_offset)
6996 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6997 sizetype from another type of the same width and signedness. */
6998 if (TREE_TYPE (aligned_offset) != sizetype)
6999 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
7000 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
7001 size_int (DECL_OFFSET_ALIGN (field)
7002 / BITS_PER_UNIT));
7005 /* Otherwise, take the offset from that of the field. Substitute
7006 any PLACEHOLDER_EXPR that we have. */
7007 else
7008 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
7011 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7013 static unsigned HOST_WIDE_INT
7014 target_align (const_tree target)
7016 /* We might have a chain of nested references with intermediate misaligning
7017 bitfields components, so need to recurse to find out. */
7019 unsigned HOST_WIDE_INT this_align, outer_align;
7021 switch (TREE_CODE (target))
7023 case BIT_FIELD_REF:
7024 return 1;
7026 case COMPONENT_REF:
7027 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7028 outer_align = target_align (TREE_OPERAND (target, 0));
7029 return MIN (this_align, outer_align);
7031 case ARRAY_REF:
7032 case ARRAY_RANGE_REF:
7033 this_align = TYPE_ALIGN (TREE_TYPE (target));
7034 outer_align = target_align (TREE_OPERAND (target, 0));
7035 return MIN (this_align, outer_align);
7037 CASE_CONVERT:
7038 case NON_LVALUE_EXPR:
7039 case VIEW_CONVERT_EXPR:
7040 this_align = TYPE_ALIGN (TREE_TYPE (target));
7041 outer_align = target_align (TREE_OPERAND (target, 0));
7042 return MAX (this_align, outer_align);
7044 default:
7045 return TYPE_ALIGN (TREE_TYPE (target));
7050 /* Given an rtx VALUE that may contain additions and multiplications, return
7051 an equivalent value that just refers to a register, memory, or constant.
7052 This is done by generating instructions to perform the arithmetic and
7053 returning a pseudo-register containing the value.
7055 The returned value may be a REG, SUBREG, MEM or constant. */
7058 force_operand (rtx value, rtx target)
7060 rtx op1, op2;
7061 /* Use subtarget as the target for operand 0 of a binary operation. */
7062 rtx subtarget = get_subtarget (target);
7063 enum rtx_code code = GET_CODE (value);
7065 /* Check for subreg applied to an expression produced by loop optimizer. */
7066 if (code == SUBREG
7067 && !REG_P (SUBREG_REG (value))
7068 && !MEM_P (SUBREG_REG (value)))
7070 value
7071 = simplify_gen_subreg (GET_MODE (value),
7072 force_reg (GET_MODE (SUBREG_REG (value)),
7073 force_operand (SUBREG_REG (value),
7074 NULL_RTX)),
7075 GET_MODE (SUBREG_REG (value)),
7076 SUBREG_BYTE (value));
7077 code = GET_CODE (value);
7080 /* Check for a PIC address load. */
7081 if ((code == PLUS || code == MINUS)
7082 && XEXP (value, 0) == pic_offset_table_rtx
7083 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7084 || GET_CODE (XEXP (value, 1)) == LABEL_REF
7085 || GET_CODE (XEXP (value, 1)) == CONST))
7087 if (!subtarget)
7088 subtarget = gen_reg_rtx (GET_MODE (value));
7089 emit_move_insn (subtarget, value);
7090 return subtarget;
7093 if (ARITHMETIC_P (value))
7095 op2 = XEXP (value, 1);
7096 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7097 subtarget = 0;
7098 if (code == MINUS && CONST_INT_P (op2))
7100 code = PLUS;
7101 op2 = negate_rtx (GET_MODE (value), op2);
7104 /* Check for an addition with OP2 a constant integer and our first
7105 operand a PLUS of a virtual register and something else. In that
7106 case, we want to emit the sum of the virtual register and the
7107 constant first and then add the other value. This allows virtual
7108 register instantiation to simply modify the constant rather than
7109 creating another one around this addition. */
7110 if (code == PLUS && CONST_INT_P (op2)
7111 && GET_CODE (XEXP (value, 0)) == PLUS
7112 && REG_P (XEXP (XEXP (value, 0), 0))
7113 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7114 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7116 rtx temp = expand_simple_binop (GET_MODE (value), code,
7117 XEXP (XEXP (value, 0), 0), op2,
7118 subtarget, 0, OPTAB_LIB_WIDEN);
7119 return expand_simple_binop (GET_MODE (value), code, temp,
7120 force_operand (XEXP (XEXP (value,
7121 0), 1), 0),
7122 target, 0, OPTAB_LIB_WIDEN);
7125 op1 = force_operand (XEXP (value, 0), subtarget);
7126 op2 = force_operand (op2, NULL_RTX);
7127 switch (code)
7129 case MULT:
7130 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7131 case DIV:
7132 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7133 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7134 target, 1, OPTAB_LIB_WIDEN);
7135 else
7136 return expand_divmod (0,
7137 FLOAT_MODE_P (GET_MODE (value))
7138 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7139 GET_MODE (value), op1, op2, target, 0);
7140 case MOD:
7141 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7142 target, 0);
7143 case UDIV:
7144 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7145 target, 1);
7146 case UMOD:
7147 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7148 target, 1);
7149 case ASHIFTRT:
7150 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7151 target, 0, OPTAB_LIB_WIDEN);
7152 default:
7153 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7154 target, 1, OPTAB_LIB_WIDEN);
7157 if (UNARY_P (value))
7159 if (!target)
7160 target = gen_reg_rtx (GET_MODE (value));
7161 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7162 switch (code)
7164 case ZERO_EXTEND:
7165 case SIGN_EXTEND:
7166 case TRUNCATE:
7167 case FLOAT_EXTEND:
7168 case FLOAT_TRUNCATE:
7169 convert_move (target, op1, code == ZERO_EXTEND);
7170 return target;
7172 case FIX:
7173 case UNSIGNED_FIX:
7174 expand_fix (target, op1, code == UNSIGNED_FIX);
7175 return target;
7177 case FLOAT:
7178 case UNSIGNED_FLOAT:
7179 expand_float (target, op1, code == UNSIGNED_FLOAT);
7180 return target;
7182 default:
7183 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7187 #ifdef INSN_SCHEDULING
7188 /* On machines that have insn scheduling, we want all memory reference to be
7189 explicit, so we need to deal with such paradoxical SUBREGs. */
7190 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7191 value
7192 = simplify_gen_subreg (GET_MODE (value),
7193 force_reg (GET_MODE (SUBREG_REG (value)),
7194 force_operand (SUBREG_REG (value),
7195 NULL_RTX)),
7196 GET_MODE (SUBREG_REG (value)),
7197 SUBREG_BYTE (value));
7198 #endif
7200 return value;
7203 /* Subroutine of expand_expr: return nonzero iff there is no way that
7204 EXP can reference X, which is being modified. TOP_P is nonzero if this
7205 call is going to be used to determine whether we need a temporary
7206 for EXP, as opposed to a recursive call to this function.
7208 It is always safe for this routine to return zero since it merely
7209 searches for optimization opportunities. */
7212 safe_from_p (const_rtx x, tree exp, int top_p)
7214 rtx exp_rtl = 0;
7215 int i, nops;
7217 if (x == 0
7218 /* If EXP has varying size, we MUST use a target since we currently
7219 have no way of allocating temporaries of variable size
7220 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7221 So we assume here that something at a higher level has prevented a
7222 clash. This is somewhat bogus, but the best we can do. Only
7223 do this when X is BLKmode and when we are at the top level. */
7224 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7225 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7226 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7227 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7228 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7229 != INTEGER_CST)
7230 && GET_MODE (x) == BLKmode)
7231 /* If X is in the outgoing argument area, it is always safe. */
7232 || (MEM_P (x)
7233 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7234 || (GET_CODE (XEXP (x, 0)) == PLUS
7235 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7236 return 1;
7238 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7239 find the underlying pseudo. */
7240 if (GET_CODE (x) == SUBREG)
7242 x = SUBREG_REG (x);
7243 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7244 return 0;
7247 /* Now look at our tree code and possibly recurse. */
7248 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7250 case tcc_declaration:
7251 exp_rtl = DECL_RTL_IF_SET (exp);
7252 break;
7254 case tcc_constant:
7255 return 1;
7257 case tcc_exceptional:
7258 if (TREE_CODE (exp) == TREE_LIST)
7260 while (1)
7262 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7263 return 0;
7264 exp = TREE_CHAIN (exp);
7265 if (!exp)
7266 return 1;
7267 if (TREE_CODE (exp) != TREE_LIST)
7268 return safe_from_p (x, exp, 0);
7271 else if (TREE_CODE (exp) == CONSTRUCTOR)
7273 constructor_elt *ce;
7274 unsigned HOST_WIDE_INT idx;
7276 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7277 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7278 || !safe_from_p (x, ce->value, 0))
7279 return 0;
7280 return 1;
7282 else if (TREE_CODE (exp) == ERROR_MARK)
7283 return 1; /* An already-visited SAVE_EXPR? */
7284 else
7285 return 0;
7287 case tcc_statement:
7288 /* The only case we look at here is the DECL_INITIAL inside a
7289 DECL_EXPR. */
7290 return (TREE_CODE (exp) != DECL_EXPR
7291 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7292 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7293 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7295 case tcc_binary:
7296 case tcc_comparison:
7297 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7298 return 0;
7299 /* Fall through. */
7301 case tcc_unary:
7302 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7304 case tcc_expression:
7305 case tcc_reference:
7306 case tcc_vl_exp:
7307 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7308 the expression. If it is set, we conflict iff we are that rtx or
7309 both are in memory. Otherwise, we check all operands of the
7310 expression recursively. */
7312 switch (TREE_CODE (exp))
7314 case ADDR_EXPR:
7315 /* If the operand is static or we are static, we can't conflict.
7316 Likewise if we don't conflict with the operand at all. */
7317 if (staticp (TREE_OPERAND (exp, 0))
7318 || TREE_STATIC (exp)
7319 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7320 return 1;
7322 /* Otherwise, the only way this can conflict is if we are taking
7323 the address of a DECL a that address if part of X, which is
7324 very rare. */
7325 exp = TREE_OPERAND (exp, 0);
7326 if (DECL_P (exp))
7328 if (!DECL_RTL_SET_P (exp)
7329 || !MEM_P (DECL_RTL (exp)))
7330 return 0;
7331 else
7332 exp_rtl = XEXP (DECL_RTL (exp), 0);
7334 break;
7336 case MEM_REF:
7337 if (MEM_P (x)
7338 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7339 get_alias_set (exp)))
7340 return 0;
7341 break;
7343 case CALL_EXPR:
7344 /* Assume that the call will clobber all hard registers and
7345 all of memory. */
7346 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7347 || MEM_P (x))
7348 return 0;
7349 break;
7351 case WITH_CLEANUP_EXPR:
7352 case CLEANUP_POINT_EXPR:
7353 /* Lowered by gimplify.c. */
7354 gcc_unreachable ();
7356 case SAVE_EXPR:
7357 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7359 default:
7360 break;
7363 /* If we have an rtx, we do not need to scan our operands. */
7364 if (exp_rtl)
7365 break;
7367 nops = TREE_OPERAND_LENGTH (exp);
7368 for (i = 0; i < nops; i++)
7369 if (TREE_OPERAND (exp, i) != 0
7370 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7371 return 0;
7373 break;
7375 case tcc_type:
7376 /* Should never get a type here. */
7377 gcc_unreachable ();
7380 /* If we have an rtl, find any enclosed object. Then see if we conflict
7381 with it. */
7382 if (exp_rtl)
7384 if (GET_CODE (exp_rtl) == SUBREG)
7386 exp_rtl = SUBREG_REG (exp_rtl);
7387 if (REG_P (exp_rtl)
7388 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7389 return 0;
7392 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7393 are memory and they conflict. */
7394 return ! (rtx_equal_p (x, exp_rtl)
7395 || (MEM_P (x) && MEM_P (exp_rtl)
7396 && true_dependence (exp_rtl, VOIDmode, x)));
7399 /* If we reach here, it is safe. */
7400 return 1;
7404 /* Return the highest power of two that EXP is known to be a multiple of.
7405 This is used in updating alignment of MEMs in array references. */
7407 unsigned HOST_WIDE_INT
7408 highest_pow2_factor (const_tree exp)
7410 unsigned HOST_WIDE_INT ret;
7411 int trailing_zeros = tree_ctz (exp);
7412 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7413 return BIGGEST_ALIGNMENT;
7414 ret = (unsigned HOST_WIDE_INT) 1 << trailing_zeros;
7415 if (ret > BIGGEST_ALIGNMENT)
7416 return BIGGEST_ALIGNMENT;
7417 return ret;
7420 /* Similar, except that the alignment requirements of TARGET are
7421 taken into account. Assume it is at least as aligned as its
7422 type, unless it is a COMPONENT_REF in which case the layout of
7423 the structure gives the alignment. */
7425 static unsigned HOST_WIDE_INT
7426 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7428 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7429 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7431 return MAX (factor, talign);
7434 #ifdef HAVE_conditional_move
7435 /* Convert the tree comparison code TCODE to the rtl one where the
7436 signedness is UNSIGNEDP. */
7438 static enum rtx_code
7439 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7441 enum rtx_code code;
7442 switch (tcode)
7444 case EQ_EXPR:
7445 code = EQ;
7446 break;
7447 case NE_EXPR:
7448 code = NE;
7449 break;
7450 case LT_EXPR:
7451 code = unsignedp ? LTU : LT;
7452 break;
7453 case LE_EXPR:
7454 code = unsignedp ? LEU : LE;
7455 break;
7456 case GT_EXPR:
7457 code = unsignedp ? GTU : GT;
7458 break;
7459 case GE_EXPR:
7460 code = unsignedp ? GEU : GE;
7461 break;
7462 case UNORDERED_EXPR:
7463 code = UNORDERED;
7464 break;
7465 case ORDERED_EXPR:
7466 code = ORDERED;
7467 break;
7468 case UNLT_EXPR:
7469 code = UNLT;
7470 break;
7471 case UNLE_EXPR:
7472 code = UNLE;
7473 break;
7474 case UNGT_EXPR:
7475 code = UNGT;
7476 break;
7477 case UNGE_EXPR:
7478 code = UNGE;
7479 break;
7480 case UNEQ_EXPR:
7481 code = UNEQ;
7482 break;
7483 case LTGT_EXPR:
7484 code = LTGT;
7485 break;
7487 default:
7488 gcc_unreachable ();
7490 return code;
7492 #endif
7494 /* Subroutine of expand_expr. Expand the two operands of a binary
7495 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7496 The value may be stored in TARGET if TARGET is nonzero. The
7497 MODIFIER argument is as documented by expand_expr. */
7499 static void
7500 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7501 enum expand_modifier modifier)
7503 if (! safe_from_p (target, exp1, 1))
7504 target = 0;
7505 if (operand_equal_p (exp0, exp1, 0))
7507 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7508 *op1 = copy_rtx (*op0);
7510 else
7512 /* If we need to preserve evaluation order, copy exp0 into its own
7513 temporary variable so that it can't be clobbered by exp1. */
7514 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7515 exp0 = save_expr (exp0);
7516 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7517 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7522 /* Return a MEM that contains constant EXP. DEFER is as for
7523 output_constant_def and MODIFIER is as for expand_expr. */
7525 static rtx
7526 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7528 rtx mem;
7530 mem = output_constant_def (exp, defer);
7531 if (modifier != EXPAND_INITIALIZER)
7532 mem = use_anchored_address (mem);
7533 return mem;
7536 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7537 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7539 static rtx
7540 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
7541 enum expand_modifier modifier, addr_space_t as)
7543 rtx result, subtarget;
7544 tree inner, offset;
7545 HOST_WIDE_INT bitsize, bitpos;
7546 int volatilep, unsignedp;
7547 enum machine_mode mode1;
7549 /* If we are taking the address of a constant and are at the top level,
7550 we have to use output_constant_def since we can't call force_const_mem
7551 at top level. */
7552 /* ??? This should be considered a front-end bug. We should not be
7553 generating ADDR_EXPR of something that isn't an LVALUE. The only
7554 exception here is STRING_CST. */
7555 if (CONSTANT_CLASS_P (exp))
7557 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7558 if (modifier < EXPAND_SUM)
7559 result = force_operand (result, target);
7560 return result;
7563 /* Everything must be something allowed by is_gimple_addressable. */
7564 switch (TREE_CODE (exp))
7566 case INDIRECT_REF:
7567 /* This case will happen via recursion for &a->b. */
7568 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7570 case MEM_REF:
7572 tree tem = TREE_OPERAND (exp, 0);
7573 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7574 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7575 return expand_expr (tem, target, tmode, modifier);
7578 case CONST_DECL:
7579 /* Expand the initializer like constants above. */
7580 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7581 0, modifier), 0);
7582 if (modifier < EXPAND_SUM)
7583 result = force_operand (result, target);
7584 return result;
7586 case REALPART_EXPR:
7587 /* The real part of the complex number is always first, therefore
7588 the address is the same as the address of the parent object. */
7589 offset = 0;
7590 bitpos = 0;
7591 inner = TREE_OPERAND (exp, 0);
7592 break;
7594 case IMAGPART_EXPR:
7595 /* The imaginary part of the complex number is always second.
7596 The expression is therefore always offset by the size of the
7597 scalar type. */
7598 offset = 0;
7599 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7600 inner = TREE_OPERAND (exp, 0);
7601 break;
7603 case COMPOUND_LITERAL_EXPR:
7604 /* Allow COMPOUND_LITERAL_EXPR in initializers, if e.g.
7605 rtl_for_decl_init is called on DECL_INITIAL with
7606 COMPOUNT_LITERAL_EXPRs in it, they aren't gimplified. */
7607 if (modifier == EXPAND_INITIALIZER
7608 && COMPOUND_LITERAL_EXPR_DECL (exp))
7609 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7610 target, tmode, modifier, as);
7611 /* FALLTHRU */
7612 default:
7613 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7614 expand_expr, as that can have various side effects; LABEL_DECLs for
7615 example, may not have their DECL_RTL set yet. Expand the rtl of
7616 CONSTRUCTORs too, which should yield a memory reference for the
7617 constructor's contents. Assume language specific tree nodes can
7618 be expanded in some interesting way. */
7619 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7620 if (DECL_P (exp)
7621 || TREE_CODE (exp) == CONSTRUCTOR
7622 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7624 result = expand_expr (exp, target, tmode,
7625 modifier == EXPAND_INITIALIZER
7626 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7628 /* If the DECL isn't in memory, then the DECL wasn't properly
7629 marked TREE_ADDRESSABLE, which will be either a front-end
7630 or a tree optimizer bug. */
7632 if (TREE_ADDRESSABLE (exp)
7633 && ! MEM_P (result)
7634 && ! targetm.calls.allocate_stack_slots_for_args ())
7636 error ("local frame unavailable (naked function?)");
7637 return result;
7639 else
7640 gcc_assert (MEM_P (result));
7641 result = XEXP (result, 0);
7643 /* ??? Is this needed anymore? */
7644 if (DECL_P (exp))
7645 TREE_USED (exp) = 1;
7647 if (modifier != EXPAND_INITIALIZER
7648 && modifier != EXPAND_CONST_ADDRESS
7649 && modifier != EXPAND_SUM)
7650 result = force_operand (result, target);
7651 return result;
7654 /* Pass FALSE as the last argument to get_inner_reference although
7655 we are expanding to RTL. The rationale is that we know how to
7656 handle "aligning nodes" here: we can just bypass them because
7657 they won't change the final object whose address will be returned
7658 (they actually exist only for that purpose). */
7659 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7660 &mode1, &unsignedp, &volatilep, false);
7661 break;
7664 /* We must have made progress. */
7665 gcc_assert (inner != exp);
7667 subtarget = offset || bitpos ? NULL_RTX : target;
7668 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7669 inner alignment, force the inner to be sufficiently aligned. */
7670 if (CONSTANT_CLASS_P (inner)
7671 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7673 inner = copy_node (inner);
7674 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7675 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7676 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7678 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7680 if (offset)
7682 rtx tmp;
7684 if (modifier != EXPAND_NORMAL)
7685 result = force_operand (result, NULL);
7686 tmp = expand_expr (offset, NULL_RTX, tmode,
7687 modifier == EXPAND_INITIALIZER
7688 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7690 /* expand_expr is allowed to return an object in a mode other
7691 than TMODE. If it did, we need to convert. */
7692 if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
7693 tmp = convert_modes (tmode, GET_MODE (tmp),
7694 tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
7695 result = convert_memory_address_addr_space (tmode, result, as);
7696 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7698 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7699 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7700 else
7702 subtarget = bitpos ? NULL_RTX : target;
7703 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7704 1, OPTAB_LIB_WIDEN);
7708 if (bitpos)
7710 /* Someone beforehand should have rejected taking the address
7711 of such an object. */
7712 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7714 result = convert_memory_address_addr_space (tmode, result, as);
7715 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7716 if (modifier < EXPAND_SUM)
7717 result = force_operand (result, target);
7720 return result;
7723 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7724 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7726 static rtx
7727 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
7728 enum expand_modifier modifier)
7730 addr_space_t as = ADDR_SPACE_GENERIC;
7731 enum machine_mode address_mode = Pmode;
7732 enum machine_mode pointer_mode = ptr_mode;
7733 enum machine_mode rmode;
7734 rtx result;
7736 /* Target mode of VOIDmode says "whatever's natural". */
7737 if (tmode == VOIDmode)
7738 tmode = TYPE_MODE (TREE_TYPE (exp));
7740 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7742 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7743 address_mode = targetm.addr_space.address_mode (as);
7744 pointer_mode = targetm.addr_space.pointer_mode (as);
7747 /* We can get called with some Weird Things if the user does silliness
7748 like "(short) &a". In that case, convert_memory_address won't do
7749 the right thing, so ignore the given target mode. */
7750 if (tmode != address_mode && tmode != pointer_mode)
7751 tmode = address_mode;
7753 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7754 tmode, modifier, as);
7756 /* Despite expand_expr claims concerning ignoring TMODE when not
7757 strictly convenient, stuff breaks if we don't honor it. Note
7758 that combined with the above, we only do this for pointer modes. */
7759 rmode = GET_MODE (result);
7760 if (rmode == VOIDmode)
7761 rmode = tmode;
7762 if (rmode != tmode)
7763 result = convert_memory_address_addr_space (tmode, result, as);
7765 return result;
7768 /* Generate code for computing CONSTRUCTOR EXP.
7769 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7770 is TRUE, instead of creating a temporary variable in memory
7771 NULL is returned and the caller needs to handle it differently. */
7773 static rtx
7774 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7775 bool avoid_temp_mem)
7777 tree type = TREE_TYPE (exp);
7778 enum machine_mode mode = TYPE_MODE (type);
7780 /* Try to avoid creating a temporary at all. This is possible
7781 if all of the initializer is zero.
7782 FIXME: try to handle all [0..255] initializers we can handle
7783 with memset. */
7784 if (TREE_STATIC (exp)
7785 && !TREE_ADDRESSABLE (exp)
7786 && target != 0 && mode == BLKmode
7787 && all_zeros_p (exp))
7789 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7790 return target;
7793 /* All elts simple constants => refer to a constant in memory. But
7794 if this is a non-BLKmode mode, let it store a field at a time
7795 since that should make a CONST_INT, CONST_WIDE_INT or
7796 CONST_DOUBLE when we fold. Likewise, if we have a target we can
7797 use, it is best to store directly into the target unless the type
7798 is large enough that memcpy will be used. If we are making an
7799 initializer and all operands are constant, put it in memory as
7800 well.
7802 FIXME: Avoid trying to fill vector constructors piece-meal.
7803 Output them with output_constant_def below unless we're sure
7804 they're zeros. This should go away when vector initializers
7805 are treated like VECTOR_CST instead of arrays. */
7806 if ((TREE_STATIC (exp)
7807 && ((mode == BLKmode
7808 && ! (target != 0 && safe_from_p (target, exp, 1)))
7809 || TREE_ADDRESSABLE (exp)
7810 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
7811 && (! MOVE_BY_PIECES_P
7812 (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
7813 TYPE_ALIGN (type)))
7814 && ! mostly_zeros_p (exp))))
7815 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7816 && TREE_CONSTANT (exp)))
7818 rtx constructor;
7820 if (avoid_temp_mem)
7821 return NULL_RTX;
7823 constructor = expand_expr_constant (exp, 1, modifier);
7825 if (modifier != EXPAND_CONST_ADDRESS
7826 && modifier != EXPAND_INITIALIZER
7827 && modifier != EXPAND_SUM)
7828 constructor = validize_mem (constructor);
7830 return constructor;
7833 /* Handle calls that pass values in multiple non-contiguous
7834 locations. The Irix 6 ABI has examples of this. */
7835 if (target == 0 || ! safe_from_p (target, exp, 1)
7836 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7838 if (avoid_temp_mem)
7839 return NULL_RTX;
7841 target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
7844 store_constructor (exp, target, 0, int_expr_size (exp));
7845 return target;
7849 /* expand_expr: generate code for computing expression EXP.
7850 An rtx for the computed value is returned. The value is never null.
7851 In the case of a void EXP, const0_rtx is returned.
7853 The value may be stored in TARGET if TARGET is nonzero.
7854 TARGET is just a suggestion; callers must assume that
7855 the rtx returned may not be the same as TARGET.
7857 If TARGET is CONST0_RTX, it means that the value will be ignored.
7859 If TMODE is not VOIDmode, it suggests generating the
7860 result in mode TMODE. But this is done only when convenient.
7861 Otherwise, TMODE is ignored and the value generated in its natural mode.
7862 TMODE is just a suggestion; callers must assume that
7863 the rtx returned may not have mode TMODE.
7865 Note that TARGET may have neither TMODE nor MODE. In that case, it
7866 probably will not be used.
7868 If MODIFIER is EXPAND_SUM then when EXP is an addition
7869 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7870 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7871 products as above, or REG or MEM, or constant.
7872 Ordinarily in such cases we would output mul or add instructions
7873 and then return a pseudo reg containing the sum.
7875 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7876 it also marks a label as absolutely required (it can't be dead).
7877 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7878 This is used for outputting expressions used in initializers.
7880 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7881 with a constant address even if that address is not normally legitimate.
7882 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7884 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7885 a call parameter. Such targets require special care as we haven't yet
7886 marked TARGET so that it's safe from being trashed by libcalls. We
7887 don't want to use TARGET for anything but the final result;
7888 Intermediate values must go elsewhere. Additionally, calls to
7889 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7891 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7892 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7893 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7894 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7895 recursively.
7897 If INNER_REFERENCE_P is true, we are expanding an inner reference.
7898 In this case, we don't adjust a returned MEM rtx that wouldn't be
7899 sufficiently aligned for its mode; instead, it's up to the caller
7900 to deal with it afterwards. This is used to make sure that unaligned
7901 base objects for which out-of-bounds accesses are supported, for
7902 example record types with trailing arrays, aren't realigned behind
7903 the back of the caller.
7904 The normal operating mode is to pass FALSE for this parameter. */
7907 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7908 enum expand_modifier modifier, rtx *alt_rtl,
7909 bool inner_reference_p)
7911 rtx ret;
7913 /* Handle ERROR_MARK before anybody tries to access its type. */
7914 if (TREE_CODE (exp) == ERROR_MARK
7915 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7917 ret = CONST0_RTX (tmode);
7918 return ret ? ret : const0_rtx;
7921 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
7922 inner_reference_p);
7923 return ret;
7926 /* Try to expand the conditional expression which is represented by
7927 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
7928 return the rtl reg which repsents the result. Otherwise return
7929 NULL_RTL. */
7931 static rtx
7932 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
7933 tree treeop1 ATTRIBUTE_UNUSED,
7934 tree treeop2 ATTRIBUTE_UNUSED)
7936 #ifdef HAVE_conditional_move
7937 rtx insn;
7938 rtx op00, op01, op1, op2;
7939 enum rtx_code comparison_code;
7940 enum machine_mode comparison_mode;
7941 gimple srcstmt;
7942 rtx temp;
7943 tree type = TREE_TYPE (treeop1);
7944 int unsignedp = TYPE_UNSIGNED (type);
7945 enum machine_mode mode = TYPE_MODE (type);
7946 enum machine_mode orig_mode = mode;
7948 /* If we cannot do a conditional move on the mode, try doing it
7949 with the promoted mode. */
7950 if (!can_conditionally_move_p (mode))
7952 mode = promote_mode (type, mode, &unsignedp);
7953 if (!can_conditionally_move_p (mode))
7954 return NULL_RTX;
7955 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
7957 else
7958 temp = assign_temp (type, 0, 1);
7960 start_sequence ();
7961 expand_operands (treeop1, treeop2,
7962 temp, &op1, &op2, EXPAND_NORMAL);
7964 if (TREE_CODE (treeop0) == SSA_NAME
7965 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
7967 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
7968 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
7969 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
7970 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
7971 comparison_mode = TYPE_MODE (type);
7972 unsignedp = TYPE_UNSIGNED (type);
7973 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7975 else if (TREE_CODE_CLASS (TREE_CODE (treeop0)) == tcc_comparison)
7977 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
7978 enum tree_code cmpcode = TREE_CODE (treeop0);
7979 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
7980 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
7981 unsignedp = TYPE_UNSIGNED (type);
7982 comparison_mode = TYPE_MODE (type);
7983 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7985 else
7987 op00 = expand_normal (treeop0);
7988 op01 = const0_rtx;
7989 comparison_code = NE;
7990 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
7993 if (GET_MODE (op1) != mode)
7994 op1 = gen_lowpart (mode, op1);
7996 if (GET_MODE (op2) != mode)
7997 op2 = gen_lowpart (mode, op2);
7999 /* Try to emit the conditional move. */
8000 insn = emit_conditional_move (temp, comparison_code,
8001 op00, op01, comparison_mode,
8002 op1, op2, mode,
8003 unsignedp);
8005 /* If we could do the conditional move, emit the sequence,
8006 and return. */
8007 if (insn)
8009 rtx seq = get_insns ();
8010 end_sequence ();
8011 emit_insn (seq);
8012 return convert_modes (orig_mode, mode, temp, 0);
8015 /* Otherwise discard the sequence and fall back to code with
8016 branches. */
8017 end_sequence ();
8018 #endif
8019 return NULL_RTX;
8023 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
8024 enum expand_modifier modifier)
8026 rtx op0, op1, op2, temp;
8027 tree type;
8028 int unsignedp;
8029 enum machine_mode mode;
8030 enum tree_code code = ops->code;
8031 optab this_optab;
8032 rtx subtarget, original_target;
8033 int ignore;
8034 bool reduce_bit_field;
8035 location_t loc = ops->location;
8036 tree treeop0, treeop1, treeop2;
8037 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8038 ? reduce_to_bit_field_precision ((expr), \
8039 target, \
8040 type) \
8041 : (expr))
8043 type = ops->type;
8044 mode = TYPE_MODE (type);
8045 unsignedp = TYPE_UNSIGNED (type);
8047 treeop0 = ops->op0;
8048 treeop1 = ops->op1;
8049 treeop2 = ops->op2;
8051 /* We should be called only on simple (binary or unary) expressions,
8052 exactly those that are valid in gimple expressions that aren't
8053 GIMPLE_SINGLE_RHS (or invalid). */
8054 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8055 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8056 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8058 ignore = (target == const0_rtx
8059 || ((CONVERT_EXPR_CODE_P (code)
8060 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8061 && TREE_CODE (type) == VOID_TYPE));
8063 /* We should be called only if we need the result. */
8064 gcc_assert (!ignore);
8066 /* An operation in what may be a bit-field type needs the
8067 result to be reduced to the precision of the bit-field type,
8068 which is narrower than that of the type's mode. */
8069 reduce_bit_field = (INTEGRAL_TYPE_P (type)
8070 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8072 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8073 target = 0;
8075 /* Use subtarget as the target for operand 0 of a binary operation. */
8076 subtarget = get_subtarget (target);
8077 original_target = target;
8079 switch (code)
8081 case NON_LVALUE_EXPR:
8082 case PAREN_EXPR:
8083 CASE_CONVERT:
8084 if (treeop0 == error_mark_node)
8085 return const0_rtx;
8087 if (TREE_CODE (type) == UNION_TYPE)
8089 tree valtype = TREE_TYPE (treeop0);
8091 /* If both input and output are BLKmode, this conversion isn't doing
8092 anything except possibly changing memory attribute. */
8093 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8095 rtx result = expand_expr (treeop0, target, tmode,
8096 modifier);
8098 result = copy_rtx (result);
8099 set_mem_attributes (result, type, 0);
8100 return result;
8103 if (target == 0)
8105 if (TYPE_MODE (type) != BLKmode)
8106 target = gen_reg_rtx (TYPE_MODE (type));
8107 else
8108 target = assign_temp (type, 1, 1);
8111 if (MEM_P (target))
8112 /* Store data into beginning of memory target. */
8113 store_expr (treeop0,
8114 adjust_address (target, TYPE_MODE (valtype), 0),
8115 modifier == EXPAND_STACK_PARM,
8116 false);
8118 else
8120 gcc_assert (REG_P (target));
8122 /* Store this field into a union of the proper type. */
8123 store_field (target,
8124 MIN ((int_size_in_bytes (TREE_TYPE
8125 (treeop0))
8126 * BITS_PER_UNIT),
8127 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8128 0, 0, 0, TYPE_MODE (valtype), treeop0, 0, false);
8131 /* Return the entire union. */
8132 return target;
8135 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8137 op0 = expand_expr (treeop0, target, VOIDmode,
8138 modifier);
8140 /* If the signedness of the conversion differs and OP0 is
8141 a promoted SUBREG, clear that indication since we now
8142 have to do the proper extension. */
8143 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8144 && GET_CODE (op0) == SUBREG)
8145 SUBREG_PROMOTED_VAR_P (op0) = 0;
8147 return REDUCE_BIT_FIELD (op0);
8150 op0 = expand_expr (treeop0, NULL_RTX, mode,
8151 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8152 if (GET_MODE (op0) == mode)
8155 /* If OP0 is a constant, just convert it into the proper mode. */
8156 else if (CONSTANT_P (op0))
8158 tree inner_type = TREE_TYPE (treeop0);
8159 enum machine_mode inner_mode = GET_MODE (op0);
8161 if (inner_mode == VOIDmode)
8162 inner_mode = TYPE_MODE (inner_type);
8164 if (modifier == EXPAND_INITIALIZER)
8165 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8166 subreg_lowpart_offset (mode,
8167 inner_mode));
8168 else
8169 op0= convert_modes (mode, inner_mode, op0,
8170 TYPE_UNSIGNED (inner_type));
8173 else if (modifier == EXPAND_INITIALIZER)
8174 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8176 else if (target == 0)
8177 op0 = convert_to_mode (mode, op0,
8178 TYPE_UNSIGNED (TREE_TYPE
8179 (treeop0)));
8180 else
8182 convert_move (target, op0,
8183 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8184 op0 = target;
8187 return REDUCE_BIT_FIELD (op0);
8189 case ADDR_SPACE_CONVERT_EXPR:
8191 tree treeop0_type = TREE_TYPE (treeop0);
8192 addr_space_t as_to;
8193 addr_space_t as_from;
8195 gcc_assert (POINTER_TYPE_P (type));
8196 gcc_assert (POINTER_TYPE_P (treeop0_type));
8198 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8199 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8201 /* Conversions between pointers to the same address space should
8202 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8203 gcc_assert (as_to != as_from);
8205 /* Ask target code to handle conversion between pointers
8206 to overlapping address spaces. */
8207 if (targetm.addr_space.subset_p (as_to, as_from)
8208 || targetm.addr_space.subset_p (as_from, as_to))
8210 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8211 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8212 gcc_assert (op0);
8213 return op0;
8216 /* For disjoint address spaces, converting anything but
8217 a null pointer invokes undefined behaviour. We simply
8218 always return a null pointer here. */
8219 return CONST0_RTX (mode);
8222 case POINTER_PLUS_EXPR:
8223 /* Even though the sizetype mode and the pointer's mode can be different
8224 expand is able to handle this correctly and get the correct result out
8225 of the PLUS_EXPR code. */
8226 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8227 if sizetype precision is smaller than pointer precision. */
8228 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8229 treeop1 = fold_convert_loc (loc, type,
8230 fold_convert_loc (loc, ssizetype,
8231 treeop1));
8232 /* If sizetype precision is larger than pointer precision, truncate the
8233 offset to have matching modes. */
8234 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8235 treeop1 = fold_convert_loc (loc, type, treeop1);
8237 case PLUS_EXPR:
8238 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8239 something else, make sure we add the register to the constant and
8240 then to the other thing. This case can occur during strength
8241 reduction and doing it this way will produce better code if the
8242 frame pointer or argument pointer is eliminated.
8244 fold-const.c will ensure that the constant is always in the inner
8245 PLUS_EXPR, so the only case we need to do anything about is if
8246 sp, ap, or fp is our second argument, in which case we must swap
8247 the innermost first argument and our second argument. */
8249 if (TREE_CODE (treeop0) == PLUS_EXPR
8250 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8251 && TREE_CODE (treeop1) == VAR_DECL
8252 && (DECL_RTL (treeop1) == frame_pointer_rtx
8253 || DECL_RTL (treeop1) == stack_pointer_rtx
8254 || DECL_RTL (treeop1) == arg_pointer_rtx))
8256 gcc_unreachable ();
8259 /* If the result is to be ptr_mode and we are adding an integer to
8260 something, we might be forming a constant. So try to use
8261 plus_constant. If it produces a sum and we can't accept it,
8262 use force_operand. This allows P = &ARR[const] to generate
8263 efficient code on machines where a SYMBOL_REF is not a valid
8264 address.
8266 If this is an EXPAND_SUM call, always return the sum. */
8267 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8268 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8270 if (modifier == EXPAND_STACK_PARM)
8271 target = 0;
8272 if (TREE_CODE (treeop0) == INTEGER_CST
8273 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8274 && TREE_CONSTANT (treeop1))
8276 rtx constant_part;
8277 HOST_WIDE_INT wc;
8278 enum machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
8280 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8281 EXPAND_SUM);
8282 /* Use wi::shwi to ensure that the constant is
8283 truncated according to the mode of OP1, then sign extended
8284 to a HOST_WIDE_INT. Using the constant directly can result
8285 in non-canonical RTL in a 64x32 cross compile. */
8286 wc = TREE_INT_CST_LOW (treeop0);
8287 constant_part =
8288 immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8289 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8290 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8291 op1 = force_operand (op1, target);
8292 return REDUCE_BIT_FIELD (op1);
8295 else if (TREE_CODE (treeop1) == INTEGER_CST
8296 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8297 && TREE_CONSTANT (treeop0))
8299 rtx constant_part;
8300 HOST_WIDE_INT wc;
8301 enum machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
8303 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8304 (modifier == EXPAND_INITIALIZER
8305 ? EXPAND_INITIALIZER : EXPAND_SUM));
8306 if (! CONSTANT_P (op0))
8308 op1 = expand_expr (treeop1, NULL_RTX,
8309 VOIDmode, modifier);
8310 /* Return a PLUS if modifier says it's OK. */
8311 if (modifier == EXPAND_SUM
8312 || modifier == EXPAND_INITIALIZER)
8313 return simplify_gen_binary (PLUS, mode, op0, op1);
8314 goto binop2;
8316 /* Use wi::shwi to ensure that the constant is
8317 truncated according to the mode of OP1, then sign extended
8318 to a HOST_WIDE_INT. Using the constant directly can result
8319 in non-canonical RTL in a 64x32 cross compile. */
8320 wc = TREE_INT_CST_LOW (treeop1);
8321 constant_part
8322 = immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8323 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8324 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8325 op0 = force_operand (op0, target);
8326 return REDUCE_BIT_FIELD (op0);
8330 /* Use TER to expand pointer addition of a negated value
8331 as pointer subtraction. */
8332 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8333 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8334 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8335 && TREE_CODE (treeop1) == SSA_NAME
8336 && TYPE_MODE (TREE_TYPE (treeop0))
8337 == TYPE_MODE (TREE_TYPE (treeop1)))
8339 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8340 if (def)
8342 treeop1 = gimple_assign_rhs1 (def);
8343 code = MINUS_EXPR;
8344 goto do_minus;
8348 /* No sense saving up arithmetic to be done
8349 if it's all in the wrong mode to form part of an address.
8350 And force_operand won't know whether to sign-extend or
8351 zero-extend. */
8352 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8353 || mode != ptr_mode)
8355 expand_operands (treeop0, treeop1,
8356 subtarget, &op0, &op1, EXPAND_NORMAL);
8357 if (op0 == const0_rtx)
8358 return op1;
8359 if (op1 == const0_rtx)
8360 return op0;
8361 goto binop2;
8364 expand_operands (treeop0, treeop1,
8365 subtarget, &op0, &op1, modifier);
8366 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8368 case MINUS_EXPR:
8369 do_minus:
8370 /* For initializers, we are allowed to return a MINUS of two
8371 symbolic constants. Here we handle all cases when both operands
8372 are constant. */
8373 /* Handle difference of two symbolic constants,
8374 for the sake of an initializer. */
8375 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8376 && really_constant_p (treeop0)
8377 && really_constant_p (treeop1))
8379 expand_operands (treeop0, treeop1,
8380 NULL_RTX, &op0, &op1, modifier);
8382 /* If the last operand is a CONST_INT, use plus_constant of
8383 the negated constant. Else make the MINUS. */
8384 if (CONST_INT_P (op1))
8385 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8386 -INTVAL (op1)));
8387 else
8388 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8391 /* No sense saving up arithmetic to be done
8392 if it's all in the wrong mode to form part of an address.
8393 And force_operand won't know whether to sign-extend or
8394 zero-extend. */
8395 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8396 || mode != ptr_mode)
8397 goto binop;
8399 expand_operands (treeop0, treeop1,
8400 subtarget, &op0, &op1, modifier);
8402 /* Convert A - const to A + (-const). */
8403 if (CONST_INT_P (op1))
8405 op1 = negate_rtx (mode, op1);
8406 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8409 goto binop2;
8411 case WIDEN_MULT_PLUS_EXPR:
8412 case WIDEN_MULT_MINUS_EXPR:
8413 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8414 op2 = expand_normal (treeop2);
8415 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8416 target, unsignedp);
8417 return target;
8419 case WIDEN_MULT_EXPR:
8420 /* If first operand is constant, swap them.
8421 Thus the following special case checks need only
8422 check the second operand. */
8423 if (TREE_CODE (treeop0) == INTEGER_CST)
8425 tree t1 = treeop0;
8426 treeop0 = treeop1;
8427 treeop1 = t1;
8430 /* First, check if we have a multiplication of one signed and one
8431 unsigned operand. */
8432 if (TREE_CODE (treeop1) != INTEGER_CST
8433 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8434 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8436 enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8437 this_optab = usmul_widen_optab;
8438 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8439 != CODE_FOR_nothing)
8441 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8442 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8443 EXPAND_NORMAL);
8444 else
8445 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8446 EXPAND_NORMAL);
8447 /* op0 and op1 might still be constant, despite the above
8448 != INTEGER_CST check. Handle it. */
8449 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8451 op0 = convert_modes (innermode, mode, op0, true);
8452 op1 = convert_modes (innermode, mode, op1, false);
8453 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8454 target, unsignedp));
8456 goto binop3;
8459 /* Check for a multiplication with matching signedness. */
8460 else if ((TREE_CODE (treeop1) == INTEGER_CST
8461 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8462 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8463 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8465 tree op0type = TREE_TYPE (treeop0);
8466 enum machine_mode innermode = TYPE_MODE (op0type);
8467 bool zextend_p = TYPE_UNSIGNED (op0type);
8468 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8469 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8471 if (TREE_CODE (treeop0) != INTEGER_CST)
8473 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8474 != CODE_FOR_nothing)
8476 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8477 EXPAND_NORMAL);
8478 /* op0 and op1 might still be constant, despite the above
8479 != INTEGER_CST check. Handle it. */
8480 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8482 widen_mult_const:
8483 op0 = convert_modes (innermode, mode, op0, zextend_p);
8485 = convert_modes (innermode, mode, op1,
8486 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8487 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8488 target,
8489 unsignedp));
8491 temp = expand_widening_mult (mode, op0, op1, target,
8492 unsignedp, this_optab);
8493 return REDUCE_BIT_FIELD (temp);
8495 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8496 != CODE_FOR_nothing
8497 && innermode == word_mode)
8499 rtx htem, hipart;
8500 op0 = expand_normal (treeop0);
8501 if (TREE_CODE (treeop1) == INTEGER_CST)
8502 op1 = convert_modes (innermode, mode,
8503 expand_normal (treeop1),
8504 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8505 else
8506 op1 = expand_normal (treeop1);
8507 /* op0 and op1 might still be constant, despite the above
8508 != INTEGER_CST check. Handle it. */
8509 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8510 goto widen_mult_const;
8511 temp = expand_binop (mode, other_optab, op0, op1, target,
8512 unsignedp, OPTAB_LIB_WIDEN);
8513 hipart = gen_highpart (innermode, temp);
8514 htem = expand_mult_highpart_adjust (innermode, hipart,
8515 op0, op1, hipart,
8516 zextend_p);
8517 if (htem != hipart)
8518 emit_move_insn (hipart, htem);
8519 return REDUCE_BIT_FIELD (temp);
8523 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8524 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8525 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8526 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8528 case FMA_EXPR:
8530 optab opt = fma_optab;
8531 gimple def0, def2;
8533 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8534 call. */
8535 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8537 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8538 tree call_expr;
8540 gcc_assert (fn != NULL_TREE);
8541 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8542 return expand_builtin (call_expr, target, subtarget, mode, false);
8545 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8546 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8548 op0 = op2 = NULL;
8550 if (def0 && def2
8551 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8553 opt = fnms_optab;
8554 op0 = expand_normal (gimple_assign_rhs1 (def0));
8555 op2 = expand_normal (gimple_assign_rhs1 (def2));
8557 else if (def0
8558 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8560 opt = fnma_optab;
8561 op0 = expand_normal (gimple_assign_rhs1 (def0));
8563 else if (def2
8564 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8566 opt = fms_optab;
8567 op2 = expand_normal (gimple_assign_rhs1 (def2));
8570 if (op0 == NULL)
8571 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8572 if (op2 == NULL)
8573 op2 = expand_normal (treeop2);
8574 op1 = expand_normal (treeop1);
8576 return expand_ternary_op (TYPE_MODE (type), opt,
8577 op0, op1, op2, target, 0);
8580 case MULT_EXPR:
8581 /* If this is a fixed-point operation, then we cannot use the code
8582 below because "expand_mult" doesn't support sat/no-sat fixed-point
8583 multiplications. */
8584 if (ALL_FIXED_POINT_MODE_P (mode))
8585 goto binop;
8587 /* If first operand is constant, swap them.
8588 Thus the following special case checks need only
8589 check the second operand. */
8590 if (TREE_CODE (treeop0) == INTEGER_CST)
8592 tree t1 = treeop0;
8593 treeop0 = treeop1;
8594 treeop1 = t1;
8597 /* Attempt to return something suitable for generating an
8598 indexed address, for machines that support that. */
8600 if (modifier == EXPAND_SUM && mode == ptr_mode
8601 && tree_fits_shwi_p (treeop1))
8603 tree exp1 = treeop1;
8605 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8606 EXPAND_SUM);
8608 if (!REG_P (op0))
8609 op0 = force_operand (op0, NULL_RTX);
8610 if (!REG_P (op0))
8611 op0 = copy_to_mode_reg (mode, op0);
8613 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8614 gen_int_mode (tree_to_shwi (exp1),
8615 TYPE_MODE (TREE_TYPE (exp1)))));
8618 if (modifier == EXPAND_STACK_PARM)
8619 target = 0;
8621 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8622 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8624 case TRUNC_DIV_EXPR:
8625 case FLOOR_DIV_EXPR:
8626 case CEIL_DIV_EXPR:
8627 case ROUND_DIV_EXPR:
8628 case EXACT_DIV_EXPR:
8629 /* If this is a fixed-point operation, then we cannot use the code
8630 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8631 divisions. */
8632 if (ALL_FIXED_POINT_MODE_P (mode))
8633 goto binop;
8635 if (modifier == EXPAND_STACK_PARM)
8636 target = 0;
8637 /* Possible optimization: compute the dividend with EXPAND_SUM
8638 then if the divisor is constant can optimize the case
8639 where some terms of the dividend have coeffs divisible by it. */
8640 expand_operands (treeop0, treeop1,
8641 subtarget, &op0, &op1, EXPAND_NORMAL);
8642 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8644 case RDIV_EXPR:
8645 goto binop;
8647 case MULT_HIGHPART_EXPR:
8648 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8649 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8650 gcc_assert (temp);
8651 return temp;
8653 case TRUNC_MOD_EXPR:
8654 case FLOOR_MOD_EXPR:
8655 case CEIL_MOD_EXPR:
8656 case ROUND_MOD_EXPR:
8657 if (modifier == EXPAND_STACK_PARM)
8658 target = 0;
8659 expand_operands (treeop0, treeop1,
8660 subtarget, &op0, &op1, EXPAND_NORMAL);
8661 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8663 case FIXED_CONVERT_EXPR:
8664 op0 = expand_normal (treeop0);
8665 if (target == 0 || modifier == EXPAND_STACK_PARM)
8666 target = gen_reg_rtx (mode);
8668 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8669 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8670 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8671 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8672 else
8673 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8674 return target;
8676 case FIX_TRUNC_EXPR:
8677 op0 = expand_normal (treeop0);
8678 if (target == 0 || modifier == EXPAND_STACK_PARM)
8679 target = gen_reg_rtx (mode);
8680 expand_fix (target, op0, unsignedp);
8681 return target;
8683 case FLOAT_EXPR:
8684 op0 = expand_normal (treeop0);
8685 if (target == 0 || modifier == EXPAND_STACK_PARM)
8686 target = gen_reg_rtx (mode);
8687 /* expand_float can't figure out what to do if FROM has VOIDmode.
8688 So give it the correct mode. With -O, cse will optimize this. */
8689 if (GET_MODE (op0) == VOIDmode)
8690 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8691 op0);
8692 expand_float (target, op0,
8693 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8694 return target;
8696 case NEGATE_EXPR:
8697 op0 = expand_expr (treeop0, subtarget,
8698 VOIDmode, EXPAND_NORMAL);
8699 if (modifier == EXPAND_STACK_PARM)
8700 target = 0;
8701 temp = expand_unop (mode,
8702 optab_for_tree_code (NEGATE_EXPR, type,
8703 optab_default),
8704 op0, target, 0);
8705 gcc_assert (temp);
8706 return REDUCE_BIT_FIELD (temp);
8708 case ABS_EXPR:
8709 op0 = expand_expr (treeop0, subtarget,
8710 VOIDmode, EXPAND_NORMAL);
8711 if (modifier == EXPAND_STACK_PARM)
8712 target = 0;
8714 /* ABS_EXPR is not valid for complex arguments. */
8715 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8716 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8718 /* Unsigned abs is simply the operand. Testing here means we don't
8719 risk generating incorrect code below. */
8720 if (TYPE_UNSIGNED (type))
8721 return op0;
8723 return expand_abs (mode, op0, target, unsignedp,
8724 safe_from_p (target, treeop0, 1));
8726 case MAX_EXPR:
8727 case MIN_EXPR:
8728 target = original_target;
8729 if (target == 0
8730 || modifier == EXPAND_STACK_PARM
8731 || (MEM_P (target) && MEM_VOLATILE_P (target))
8732 || GET_MODE (target) != mode
8733 || (REG_P (target)
8734 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8735 target = gen_reg_rtx (mode);
8736 expand_operands (treeop0, treeop1,
8737 target, &op0, &op1, EXPAND_NORMAL);
8739 /* First try to do it with a special MIN or MAX instruction.
8740 If that does not win, use a conditional jump to select the proper
8741 value. */
8742 this_optab = optab_for_tree_code (code, type, optab_default);
8743 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8744 OPTAB_WIDEN);
8745 if (temp != 0)
8746 return temp;
8748 /* At this point, a MEM target is no longer useful; we will get better
8749 code without it. */
8751 if (! REG_P (target))
8752 target = gen_reg_rtx (mode);
8754 /* If op1 was placed in target, swap op0 and op1. */
8755 if (target != op0 && target == op1)
8757 temp = op0;
8758 op0 = op1;
8759 op1 = temp;
8762 /* We generate better code and avoid problems with op1 mentioning
8763 target by forcing op1 into a pseudo if it isn't a constant. */
8764 if (! CONSTANT_P (op1))
8765 op1 = force_reg (mode, op1);
8768 enum rtx_code comparison_code;
8769 rtx cmpop1 = op1;
8771 if (code == MAX_EXPR)
8772 comparison_code = unsignedp ? GEU : GE;
8773 else
8774 comparison_code = unsignedp ? LEU : LE;
8776 /* Canonicalize to comparisons against 0. */
8777 if (op1 == const1_rtx)
8779 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8780 or (a != 0 ? a : 1) for unsigned.
8781 For MIN we are safe converting (a <= 1 ? a : 1)
8782 into (a <= 0 ? a : 1) */
8783 cmpop1 = const0_rtx;
8784 if (code == MAX_EXPR)
8785 comparison_code = unsignedp ? NE : GT;
8787 if (op1 == constm1_rtx && !unsignedp)
8789 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8790 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8791 cmpop1 = const0_rtx;
8792 if (code == MIN_EXPR)
8793 comparison_code = LT;
8795 #ifdef HAVE_conditional_move
8796 /* Use a conditional move if possible. */
8797 if (can_conditionally_move_p (mode))
8799 rtx insn;
8801 start_sequence ();
8803 /* Try to emit the conditional move. */
8804 insn = emit_conditional_move (target, comparison_code,
8805 op0, cmpop1, mode,
8806 op0, op1, mode,
8807 unsignedp);
8809 /* If we could do the conditional move, emit the sequence,
8810 and return. */
8811 if (insn)
8813 rtx seq = get_insns ();
8814 end_sequence ();
8815 emit_insn (seq);
8816 return target;
8819 /* Otherwise discard the sequence and fall back to code with
8820 branches. */
8821 end_sequence ();
8823 #endif
8824 if (target != op0)
8825 emit_move_insn (target, op0);
8827 temp = gen_label_rtx ();
8828 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8829 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8830 -1);
8832 emit_move_insn (target, op1);
8833 emit_label (temp);
8834 return target;
8836 case BIT_NOT_EXPR:
8837 op0 = expand_expr (treeop0, subtarget,
8838 VOIDmode, EXPAND_NORMAL);
8839 if (modifier == EXPAND_STACK_PARM)
8840 target = 0;
8841 /* In case we have to reduce the result to bitfield precision
8842 for unsigned bitfield expand this as XOR with a proper constant
8843 instead. */
8844 if (reduce_bit_field && TYPE_UNSIGNED (type))
8846 wide_int mask = wi::mask (TYPE_PRECISION (type),
8847 false, GET_MODE_PRECISION (mode));
8849 temp = expand_binop (mode, xor_optab, op0,
8850 immed_wide_int_const (mask, mode),
8851 target, 1, OPTAB_LIB_WIDEN);
8853 else
8854 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8855 gcc_assert (temp);
8856 return temp;
8858 /* ??? Can optimize bitwise operations with one arg constant.
8859 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8860 and (a bitwise1 b) bitwise2 b (etc)
8861 but that is probably not worth while. */
8863 case BIT_AND_EXPR:
8864 case BIT_IOR_EXPR:
8865 case BIT_XOR_EXPR:
8866 goto binop;
8868 case LROTATE_EXPR:
8869 case RROTATE_EXPR:
8870 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8871 || (GET_MODE_PRECISION (TYPE_MODE (type))
8872 == TYPE_PRECISION (type)));
8873 /* fall through */
8875 case LSHIFT_EXPR:
8876 case RSHIFT_EXPR:
8877 /* If this is a fixed-point operation, then we cannot use the code
8878 below because "expand_shift" doesn't support sat/no-sat fixed-point
8879 shifts. */
8880 if (ALL_FIXED_POINT_MODE_P (mode))
8881 goto binop;
8883 if (! safe_from_p (subtarget, treeop1, 1))
8884 subtarget = 0;
8885 if (modifier == EXPAND_STACK_PARM)
8886 target = 0;
8887 op0 = expand_expr (treeop0, subtarget,
8888 VOIDmode, EXPAND_NORMAL);
8889 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8890 unsignedp);
8891 if (code == LSHIFT_EXPR)
8892 temp = REDUCE_BIT_FIELD (temp);
8893 return temp;
8895 /* Could determine the answer when only additive constants differ. Also,
8896 the addition of one can be handled by changing the condition. */
8897 case LT_EXPR:
8898 case LE_EXPR:
8899 case GT_EXPR:
8900 case GE_EXPR:
8901 case EQ_EXPR:
8902 case NE_EXPR:
8903 case UNORDERED_EXPR:
8904 case ORDERED_EXPR:
8905 case UNLT_EXPR:
8906 case UNLE_EXPR:
8907 case UNGT_EXPR:
8908 case UNGE_EXPR:
8909 case UNEQ_EXPR:
8910 case LTGT_EXPR:
8911 temp = do_store_flag (ops,
8912 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8913 tmode != VOIDmode ? tmode : mode);
8914 if (temp)
8915 return temp;
8917 /* Use a compare and a jump for BLKmode comparisons, or for function
8918 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8920 if ((target == 0
8921 || modifier == EXPAND_STACK_PARM
8922 || ! safe_from_p (target, treeop0, 1)
8923 || ! safe_from_p (target, treeop1, 1)
8924 /* Make sure we don't have a hard reg (such as function's return
8925 value) live across basic blocks, if not optimizing. */
8926 || (!optimize && REG_P (target)
8927 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8928 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8930 emit_move_insn (target, const0_rtx);
8932 op1 = gen_label_rtx ();
8933 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8935 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8936 emit_move_insn (target, constm1_rtx);
8937 else
8938 emit_move_insn (target, const1_rtx);
8940 emit_label (op1);
8941 return target;
8943 case COMPLEX_EXPR:
8944 /* Get the rtx code of the operands. */
8945 op0 = expand_normal (treeop0);
8946 op1 = expand_normal (treeop1);
8948 if (!target)
8949 target = gen_reg_rtx (TYPE_MODE (type));
8950 else
8951 /* If target overlaps with op1, then either we need to force
8952 op1 into a pseudo (if target also overlaps with op0),
8953 or write the complex parts in reverse order. */
8954 switch (GET_CODE (target))
8956 case CONCAT:
8957 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
8959 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
8961 complex_expr_force_op1:
8962 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
8963 emit_move_insn (temp, op1);
8964 op1 = temp;
8965 break;
8967 complex_expr_swap_order:
8968 /* Move the imaginary (op1) and real (op0) parts to their
8969 location. */
8970 write_complex_part (target, op1, true);
8971 write_complex_part (target, op0, false);
8973 return target;
8975 break;
8976 case MEM:
8977 temp = adjust_address_nv (target,
8978 GET_MODE_INNER (GET_MODE (target)), 0);
8979 if (reg_overlap_mentioned_p (temp, op1))
8981 enum machine_mode imode = GET_MODE_INNER (GET_MODE (target));
8982 temp = adjust_address_nv (target, imode,
8983 GET_MODE_SIZE (imode));
8984 if (reg_overlap_mentioned_p (temp, op0))
8985 goto complex_expr_force_op1;
8986 goto complex_expr_swap_order;
8988 break;
8989 default:
8990 if (reg_overlap_mentioned_p (target, op1))
8992 if (reg_overlap_mentioned_p (target, op0))
8993 goto complex_expr_force_op1;
8994 goto complex_expr_swap_order;
8996 break;
8999 /* Move the real (op0) and imaginary (op1) parts to their location. */
9000 write_complex_part (target, op0, false);
9001 write_complex_part (target, op1, true);
9003 return target;
9005 case WIDEN_SUM_EXPR:
9007 tree oprnd0 = treeop0;
9008 tree oprnd1 = treeop1;
9010 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9011 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
9012 target, unsignedp);
9013 return target;
9016 case REDUC_MAX_EXPR:
9017 case REDUC_MIN_EXPR:
9018 case REDUC_PLUS_EXPR:
9020 op0 = expand_normal (treeop0);
9021 this_optab = optab_for_tree_code (code, type, optab_default);
9022 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
9023 gcc_assert (temp);
9024 return temp;
9027 case VEC_LSHIFT_EXPR:
9028 case VEC_RSHIFT_EXPR:
9030 target = expand_vec_shift_expr (ops, target);
9031 return target;
9034 case VEC_UNPACK_HI_EXPR:
9035 case VEC_UNPACK_LO_EXPR:
9037 op0 = expand_normal (treeop0);
9038 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9039 target, unsignedp);
9040 gcc_assert (temp);
9041 return temp;
9044 case VEC_UNPACK_FLOAT_HI_EXPR:
9045 case VEC_UNPACK_FLOAT_LO_EXPR:
9047 op0 = expand_normal (treeop0);
9048 /* The signedness is determined from input operand. */
9049 temp = expand_widen_pattern_expr
9050 (ops, op0, NULL_RTX, NULL_RTX,
9051 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9053 gcc_assert (temp);
9054 return temp;
9057 case VEC_WIDEN_MULT_HI_EXPR:
9058 case VEC_WIDEN_MULT_LO_EXPR:
9059 case VEC_WIDEN_MULT_EVEN_EXPR:
9060 case VEC_WIDEN_MULT_ODD_EXPR:
9061 case VEC_WIDEN_LSHIFT_HI_EXPR:
9062 case VEC_WIDEN_LSHIFT_LO_EXPR:
9063 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9064 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9065 target, unsignedp);
9066 gcc_assert (target);
9067 return target;
9069 case VEC_PACK_TRUNC_EXPR:
9070 case VEC_PACK_SAT_EXPR:
9071 case VEC_PACK_FIX_TRUNC_EXPR:
9072 mode = TYPE_MODE (TREE_TYPE (treeop0));
9073 goto binop;
9075 case VEC_PERM_EXPR:
9076 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9077 op2 = expand_normal (treeop2);
9079 /* Careful here: if the target doesn't support integral vector modes,
9080 a constant selection vector could wind up smooshed into a normal
9081 integral constant. */
9082 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9084 tree sel_type = TREE_TYPE (treeop2);
9085 enum machine_mode vmode
9086 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9087 TYPE_VECTOR_SUBPARTS (sel_type));
9088 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9089 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9090 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9092 else
9093 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9095 temp = expand_vec_perm (mode, op0, op1, op2, target);
9096 gcc_assert (temp);
9097 return temp;
9099 case DOT_PROD_EXPR:
9101 tree oprnd0 = treeop0;
9102 tree oprnd1 = treeop1;
9103 tree oprnd2 = treeop2;
9104 rtx op2;
9106 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9107 op2 = expand_normal (oprnd2);
9108 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9109 target, unsignedp);
9110 return target;
9113 case SAD_EXPR:
9115 tree oprnd0 = treeop0;
9116 tree oprnd1 = treeop1;
9117 tree oprnd2 = treeop2;
9118 rtx op2;
9120 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9121 op2 = expand_normal (oprnd2);
9122 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9123 target, unsignedp);
9124 return target;
9127 case REALIGN_LOAD_EXPR:
9129 tree oprnd0 = treeop0;
9130 tree oprnd1 = treeop1;
9131 tree oprnd2 = treeop2;
9132 rtx op2;
9134 this_optab = optab_for_tree_code (code, type, optab_default);
9135 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9136 op2 = expand_normal (oprnd2);
9137 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9138 target, unsignedp);
9139 gcc_assert (temp);
9140 return temp;
9143 case COND_EXPR:
9144 /* A COND_EXPR with its type being VOID_TYPE represents a
9145 conditional jump and is handled in
9146 expand_gimple_cond_expr. */
9147 gcc_assert (!VOID_TYPE_P (type));
9149 /* Note that COND_EXPRs whose type is a structure or union
9150 are required to be constructed to contain assignments of
9151 a temporary variable, so that we can evaluate them here
9152 for side effect only. If type is void, we must do likewise. */
9154 gcc_assert (!TREE_ADDRESSABLE (type)
9155 && !ignore
9156 && TREE_TYPE (treeop1) != void_type_node
9157 && TREE_TYPE (treeop2) != void_type_node);
9159 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9160 if (temp)
9161 return temp;
9163 /* If we are not to produce a result, we have no target. Otherwise,
9164 if a target was specified use it; it will not be used as an
9165 intermediate target unless it is safe. If no target, use a
9166 temporary. */
9168 if (modifier != EXPAND_STACK_PARM
9169 && original_target
9170 && safe_from_p (original_target, treeop0, 1)
9171 && GET_MODE (original_target) == mode
9172 && !MEM_P (original_target))
9173 temp = original_target;
9174 else
9175 temp = assign_temp (type, 0, 1);
9177 do_pending_stack_adjust ();
9178 NO_DEFER_POP;
9179 op0 = gen_label_rtx ();
9180 op1 = gen_label_rtx ();
9181 jumpifnot (treeop0, op0, -1);
9182 store_expr (treeop1, temp,
9183 modifier == EXPAND_STACK_PARM,
9184 false);
9186 emit_jump_insn (gen_jump (op1));
9187 emit_barrier ();
9188 emit_label (op0);
9189 store_expr (treeop2, temp,
9190 modifier == EXPAND_STACK_PARM,
9191 false);
9193 emit_label (op1);
9194 OK_DEFER_POP;
9195 return temp;
9197 case VEC_COND_EXPR:
9198 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9199 return target;
9201 default:
9202 gcc_unreachable ();
9205 /* Here to do an ordinary binary operator. */
9206 binop:
9207 expand_operands (treeop0, treeop1,
9208 subtarget, &op0, &op1, EXPAND_NORMAL);
9209 binop2:
9210 this_optab = optab_for_tree_code (code, type, optab_default);
9211 binop3:
9212 if (modifier == EXPAND_STACK_PARM)
9213 target = 0;
9214 temp = expand_binop (mode, this_optab, op0, op1, target,
9215 unsignedp, OPTAB_LIB_WIDEN);
9216 gcc_assert (temp);
9217 /* Bitwise operations do not need bitfield reduction as we expect their
9218 operands being properly truncated. */
9219 if (code == BIT_XOR_EXPR
9220 || code == BIT_AND_EXPR
9221 || code == BIT_IOR_EXPR)
9222 return temp;
9223 return REDUCE_BIT_FIELD (temp);
9225 #undef REDUCE_BIT_FIELD
9228 /* Return TRUE if expression STMT is suitable for replacement.
9229 Never consider memory loads as replaceable, because those don't ever lead
9230 into constant expressions. */
9232 static bool
9233 stmt_is_replaceable_p (gimple stmt)
9235 if (ssa_is_replaceable_p (stmt))
9237 /* Don't move around loads. */
9238 if (!gimple_assign_single_p (stmt)
9239 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9240 return true;
9242 return false;
9246 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
9247 enum expand_modifier modifier, rtx *alt_rtl,
9248 bool inner_reference_p)
9250 rtx op0, op1, temp, decl_rtl;
9251 tree type;
9252 int unsignedp;
9253 enum machine_mode mode;
9254 enum tree_code code = TREE_CODE (exp);
9255 rtx subtarget, original_target;
9256 int ignore;
9257 tree context;
9258 bool reduce_bit_field;
9259 location_t loc = EXPR_LOCATION (exp);
9260 struct separate_ops ops;
9261 tree treeop0, treeop1, treeop2;
9262 tree ssa_name = NULL_TREE;
9263 gimple g;
9265 type = TREE_TYPE (exp);
9266 mode = TYPE_MODE (type);
9267 unsignedp = TYPE_UNSIGNED (type);
9269 treeop0 = treeop1 = treeop2 = NULL_TREE;
9270 if (!VL_EXP_CLASS_P (exp))
9271 switch (TREE_CODE_LENGTH (code))
9273 default:
9274 case 3: treeop2 = TREE_OPERAND (exp, 2);
9275 case 2: treeop1 = TREE_OPERAND (exp, 1);
9276 case 1: treeop0 = TREE_OPERAND (exp, 0);
9277 case 0: break;
9279 ops.code = code;
9280 ops.type = type;
9281 ops.op0 = treeop0;
9282 ops.op1 = treeop1;
9283 ops.op2 = treeop2;
9284 ops.location = loc;
9286 ignore = (target == const0_rtx
9287 || ((CONVERT_EXPR_CODE_P (code)
9288 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9289 && TREE_CODE (type) == VOID_TYPE));
9291 /* An operation in what may be a bit-field type needs the
9292 result to be reduced to the precision of the bit-field type,
9293 which is narrower than that of the type's mode. */
9294 reduce_bit_field = (!ignore
9295 && INTEGRAL_TYPE_P (type)
9296 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9298 /* If we are going to ignore this result, we need only do something
9299 if there is a side-effect somewhere in the expression. If there
9300 is, short-circuit the most common cases here. Note that we must
9301 not call expand_expr with anything but const0_rtx in case this
9302 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9304 if (ignore)
9306 if (! TREE_SIDE_EFFECTS (exp))
9307 return const0_rtx;
9309 /* Ensure we reference a volatile object even if value is ignored, but
9310 don't do this if all we are doing is taking its address. */
9311 if (TREE_THIS_VOLATILE (exp)
9312 && TREE_CODE (exp) != FUNCTION_DECL
9313 && mode != VOIDmode && mode != BLKmode
9314 && modifier != EXPAND_CONST_ADDRESS)
9316 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9317 if (MEM_P (temp))
9318 copy_to_reg (temp);
9319 return const0_rtx;
9322 if (TREE_CODE_CLASS (code) == tcc_unary
9323 || code == BIT_FIELD_REF
9324 || code == COMPONENT_REF
9325 || code == INDIRECT_REF)
9326 return expand_expr (treeop0, const0_rtx, VOIDmode,
9327 modifier);
9329 else if (TREE_CODE_CLASS (code) == tcc_binary
9330 || TREE_CODE_CLASS (code) == tcc_comparison
9331 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9333 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9334 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9335 return const0_rtx;
9338 target = 0;
9341 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9342 target = 0;
9344 /* Use subtarget as the target for operand 0 of a binary operation. */
9345 subtarget = get_subtarget (target);
9346 original_target = target;
9348 switch (code)
9350 case LABEL_DECL:
9352 tree function = decl_function_context (exp);
9354 temp = label_rtx (exp);
9355 temp = gen_rtx_LABEL_REF (Pmode, temp);
9357 if (function != current_function_decl
9358 && function != 0)
9359 LABEL_REF_NONLOCAL_P (temp) = 1;
9361 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9362 return temp;
9365 case SSA_NAME:
9366 /* ??? ivopts calls expander, without any preparation from
9367 out-of-ssa. So fake instructions as if this was an access to the
9368 base variable. This unnecessarily allocates a pseudo, see how we can
9369 reuse it, if partition base vars have it set already. */
9370 if (!currently_expanding_to_rtl)
9372 tree var = SSA_NAME_VAR (exp);
9373 if (var && DECL_RTL_SET_P (var))
9374 return DECL_RTL (var);
9375 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9376 LAST_VIRTUAL_REGISTER + 1);
9379 g = get_gimple_for_ssa_name (exp);
9380 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9381 if (g == NULL
9382 && modifier == EXPAND_INITIALIZER
9383 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9384 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9385 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9386 g = SSA_NAME_DEF_STMT (exp);
9387 if (g)
9389 rtx r;
9390 ops.code = gimple_assign_rhs_code (g);
9391 switch (get_gimple_rhs_class (ops.code))
9393 case GIMPLE_TERNARY_RHS:
9394 ops.op2 = gimple_assign_rhs3 (g);
9395 /* Fallthru */
9396 case GIMPLE_BINARY_RHS:
9397 ops.op1 = gimple_assign_rhs2 (g);
9398 /* Fallthru */
9399 case GIMPLE_UNARY_RHS:
9400 ops.op0 = gimple_assign_rhs1 (g);
9401 ops.type = TREE_TYPE (gimple_assign_lhs (g));
9402 ops.location = gimple_location (g);
9403 r = expand_expr_real_2 (&ops, target, tmode, modifier);
9404 break;
9405 case GIMPLE_SINGLE_RHS:
9407 location_t saved_loc = curr_insn_location ();
9408 set_curr_insn_location (gimple_location (g));
9409 r = expand_expr_real (gimple_assign_rhs1 (g), target,
9410 tmode, modifier, NULL, inner_reference_p);
9411 set_curr_insn_location (saved_loc);
9412 break;
9414 default:
9415 gcc_unreachable ();
9417 if (REG_P (r) && !REG_EXPR (r))
9418 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9419 return r;
9422 ssa_name = exp;
9423 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9424 exp = SSA_NAME_VAR (ssa_name);
9425 goto expand_decl_rtl;
9427 case PARM_DECL:
9428 case VAR_DECL:
9429 /* If a static var's type was incomplete when the decl was written,
9430 but the type is complete now, lay out the decl now. */
9431 if (DECL_SIZE (exp) == 0
9432 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9433 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9434 layout_decl (exp, 0);
9436 /* ... fall through ... */
9438 case FUNCTION_DECL:
9439 case RESULT_DECL:
9440 decl_rtl = DECL_RTL (exp);
9441 expand_decl_rtl:
9442 gcc_assert (decl_rtl);
9443 decl_rtl = copy_rtx (decl_rtl);
9444 /* Record writes to register variables. */
9445 if (modifier == EXPAND_WRITE
9446 && REG_P (decl_rtl)
9447 && HARD_REGISTER_P (decl_rtl))
9448 add_to_hard_reg_set (&crtl->asm_clobbers,
9449 GET_MODE (decl_rtl), REGNO (decl_rtl));
9451 /* Ensure variable marked as used even if it doesn't go through
9452 a parser. If it hasn't be used yet, write out an external
9453 definition. */
9454 TREE_USED (exp) = 1;
9456 /* Show we haven't gotten RTL for this yet. */
9457 temp = 0;
9459 /* Variables inherited from containing functions should have
9460 been lowered by this point. */
9461 context = decl_function_context (exp);
9462 gcc_assert (SCOPE_FILE_SCOPE_P (context)
9463 || context == current_function_decl
9464 || TREE_STATIC (exp)
9465 || DECL_EXTERNAL (exp)
9466 /* ??? C++ creates functions that are not TREE_STATIC. */
9467 || TREE_CODE (exp) == FUNCTION_DECL);
9469 /* This is the case of an array whose size is to be determined
9470 from its initializer, while the initializer is still being parsed.
9471 ??? We aren't parsing while expanding anymore. */
9473 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9474 temp = validize_mem (decl_rtl);
9476 /* If DECL_RTL is memory, we are in the normal case and the
9477 address is not valid, get the address into a register. */
9479 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9481 if (alt_rtl)
9482 *alt_rtl = decl_rtl;
9483 decl_rtl = use_anchored_address (decl_rtl);
9484 if (modifier != EXPAND_CONST_ADDRESS
9485 && modifier != EXPAND_SUM
9486 && !memory_address_addr_space_p (DECL_MODE (exp),
9487 XEXP (decl_rtl, 0),
9488 MEM_ADDR_SPACE (decl_rtl)))
9489 temp = replace_equiv_address (decl_rtl,
9490 copy_rtx (XEXP (decl_rtl, 0)));
9493 /* If we got something, return it. But first, set the alignment
9494 if the address is a register. */
9495 if (temp != 0)
9497 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9498 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9500 return temp;
9503 /* If the mode of DECL_RTL does not match that of the decl,
9504 there are two cases: we are dealing with a BLKmode value
9505 that is returned in a register, or we are dealing with
9506 a promoted value. In the latter case, return a SUBREG
9507 of the wanted mode, but mark it so that we know that it
9508 was already extended. */
9509 if (REG_P (decl_rtl)
9510 && DECL_MODE (exp) != BLKmode
9511 && GET_MODE (decl_rtl) != DECL_MODE (exp))
9513 enum machine_mode pmode;
9515 /* Get the signedness to be used for this variable. Ensure we get
9516 the same mode we got when the variable was declared. */
9517 if (code == SSA_NAME
9518 && (g = SSA_NAME_DEF_STMT (ssa_name))
9519 && gimple_code (g) == GIMPLE_CALL
9520 && !gimple_call_internal_p (g))
9521 pmode = promote_function_mode (type, mode, &unsignedp,
9522 gimple_call_fntype (g),
9524 else
9525 pmode = promote_decl_mode (exp, &unsignedp);
9526 gcc_assert (GET_MODE (decl_rtl) == pmode);
9528 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9529 SUBREG_PROMOTED_VAR_P (temp) = 1;
9530 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
9531 return temp;
9534 return decl_rtl;
9536 case INTEGER_CST:
9537 /* Given that TYPE_PRECISION (type) is not always equal to
9538 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9539 the former to the latter according to the signedness of the
9540 type. */
9541 temp = immed_wide_int_const (wide_int::from
9542 (exp,
9543 GET_MODE_PRECISION (TYPE_MODE (type)),
9544 TYPE_SIGN (type)),
9545 TYPE_MODE (type));
9546 return temp;
9548 case VECTOR_CST:
9550 tree tmp = NULL_TREE;
9551 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9552 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9553 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9554 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9555 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9556 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9557 return const_vector_from_tree (exp);
9558 if (GET_MODE_CLASS (mode) == MODE_INT)
9560 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9561 if (type_for_mode)
9562 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9564 if (!tmp)
9566 vec<constructor_elt, va_gc> *v;
9567 unsigned i;
9568 vec_alloc (v, VECTOR_CST_NELTS (exp));
9569 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9570 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9571 tmp = build_constructor (type, v);
9573 return expand_expr (tmp, ignore ? const0_rtx : target,
9574 tmode, modifier);
9577 case CONST_DECL:
9578 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9580 case REAL_CST:
9581 /* If optimized, generate immediate CONST_DOUBLE
9582 which will be turned into memory by reload if necessary.
9584 We used to force a register so that loop.c could see it. But
9585 this does not allow gen_* patterns to perform optimizations with
9586 the constants. It also produces two insns in cases like "x = 1.0;".
9587 On most machines, floating-point constants are not permitted in
9588 many insns, so we'd end up copying it to a register in any case.
9590 Now, we do the copying in expand_binop, if appropriate. */
9591 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9592 TYPE_MODE (TREE_TYPE (exp)));
9594 case FIXED_CST:
9595 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9596 TYPE_MODE (TREE_TYPE (exp)));
9598 case COMPLEX_CST:
9599 /* Handle evaluating a complex constant in a CONCAT target. */
9600 if (original_target && GET_CODE (original_target) == CONCAT)
9602 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9603 rtx rtarg, itarg;
9605 rtarg = XEXP (original_target, 0);
9606 itarg = XEXP (original_target, 1);
9608 /* Move the real and imaginary parts separately. */
9609 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9610 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9612 if (op0 != rtarg)
9613 emit_move_insn (rtarg, op0);
9614 if (op1 != itarg)
9615 emit_move_insn (itarg, op1);
9617 return original_target;
9620 /* ... fall through ... */
9622 case STRING_CST:
9623 temp = expand_expr_constant (exp, 1, modifier);
9625 /* temp contains a constant address.
9626 On RISC machines where a constant address isn't valid,
9627 make some insns to get that address into a register. */
9628 if (modifier != EXPAND_CONST_ADDRESS
9629 && modifier != EXPAND_INITIALIZER
9630 && modifier != EXPAND_SUM
9631 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9632 MEM_ADDR_SPACE (temp)))
9633 return replace_equiv_address (temp,
9634 copy_rtx (XEXP (temp, 0)));
9635 return temp;
9637 case SAVE_EXPR:
9639 tree val = treeop0;
9640 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
9641 inner_reference_p);
9643 if (!SAVE_EXPR_RESOLVED_P (exp))
9645 /* We can indeed still hit this case, typically via builtin
9646 expanders calling save_expr immediately before expanding
9647 something. Assume this means that we only have to deal
9648 with non-BLKmode values. */
9649 gcc_assert (GET_MODE (ret) != BLKmode);
9651 val = build_decl (curr_insn_location (),
9652 VAR_DECL, NULL, TREE_TYPE (exp));
9653 DECL_ARTIFICIAL (val) = 1;
9654 DECL_IGNORED_P (val) = 1;
9655 treeop0 = val;
9656 TREE_OPERAND (exp, 0) = treeop0;
9657 SAVE_EXPR_RESOLVED_P (exp) = 1;
9659 if (!CONSTANT_P (ret))
9660 ret = copy_to_reg (ret);
9661 SET_DECL_RTL (val, ret);
9664 return ret;
9668 case CONSTRUCTOR:
9669 /* If we don't need the result, just ensure we evaluate any
9670 subexpressions. */
9671 if (ignore)
9673 unsigned HOST_WIDE_INT idx;
9674 tree value;
9676 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9677 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9679 return const0_rtx;
9682 return expand_constructor (exp, target, modifier, false);
9684 case TARGET_MEM_REF:
9686 addr_space_t as
9687 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9688 enum insn_code icode;
9689 unsigned int align;
9691 op0 = addr_for_mem_ref (exp, as, true);
9692 op0 = memory_address_addr_space (mode, op0, as);
9693 temp = gen_rtx_MEM (mode, op0);
9694 set_mem_attributes (temp, exp, 0);
9695 set_mem_addr_space (temp, as);
9696 align = get_object_alignment (exp);
9697 if (modifier != EXPAND_WRITE
9698 && modifier != EXPAND_MEMORY
9699 && mode != BLKmode
9700 && align < GET_MODE_ALIGNMENT (mode)
9701 /* If the target does not have special handling for unaligned
9702 loads of mode then it can use regular moves for them. */
9703 && ((icode = optab_handler (movmisalign_optab, mode))
9704 != CODE_FOR_nothing))
9706 struct expand_operand ops[2];
9708 /* We've already validated the memory, and we're creating a
9709 new pseudo destination. The predicates really can't fail,
9710 nor can the generator. */
9711 create_output_operand (&ops[0], NULL_RTX, mode);
9712 create_fixed_operand (&ops[1], temp);
9713 expand_insn (icode, 2, ops);
9714 temp = ops[0].value;
9716 return temp;
9719 case MEM_REF:
9721 addr_space_t as
9722 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9723 enum machine_mode address_mode;
9724 tree base = TREE_OPERAND (exp, 0);
9725 gimple def_stmt;
9726 enum insn_code icode;
9727 unsigned align;
9728 /* Handle expansion of non-aliased memory with non-BLKmode. That
9729 might end up in a register. */
9730 if (mem_ref_refers_to_non_mem_p (exp))
9732 HOST_WIDE_INT offset = mem_ref_offset (exp).to_short_addr ();
9733 base = TREE_OPERAND (base, 0);
9734 if (offset == 0
9735 && tree_fits_uhwi_p (TYPE_SIZE (type))
9736 && (GET_MODE_BITSIZE (DECL_MODE (base))
9737 == tree_to_uhwi (TYPE_SIZE (type))))
9738 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
9739 target, tmode, modifier);
9740 if (TYPE_MODE (type) == BLKmode)
9742 temp = assign_stack_temp (DECL_MODE (base),
9743 GET_MODE_SIZE (DECL_MODE (base)));
9744 store_expr (base, temp, 0, false);
9745 temp = adjust_address (temp, BLKmode, offset);
9746 set_mem_size (temp, int_size_in_bytes (type));
9747 return temp;
9749 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
9750 bitsize_int (offset * BITS_PER_UNIT));
9751 return expand_expr (exp, target, tmode, modifier);
9753 address_mode = targetm.addr_space.address_mode (as);
9754 base = TREE_OPERAND (exp, 0);
9755 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9757 tree mask = gimple_assign_rhs2 (def_stmt);
9758 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9759 gimple_assign_rhs1 (def_stmt), mask);
9760 TREE_OPERAND (exp, 0) = base;
9762 align = get_object_alignment (exp);
9763 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9764 op0 = memory_address_addr_space (mode, op0, as);
9765 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9767 rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
9768 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9769 op0 = memory_address_addr_space (mode, op0, as);
9771 temp = gen_rtx_MEM (mode, op0);
9772 set_mem_attributes (temp, exp, 0);
9773 set_mem_addr_space (temp, as);
9774 if (TREE_THIS_VOLATILE (exp))
9775 MEM_VOLATILE_P (temp) = 1;
9776 if (modifier != EXPAND_WRITE
9777 && modifier != EXPAND_MEMORY
9778 && !inner_reference_p
9779 && mode != BLKmode
9780 && align < GET_MODE_ALIGNMENT (mode))
9782 if ((icode = optab_handler (movmisalign_optab, mode))
9783 != CODE_FOR_nothing)
9785 struct expand_operand ops[2];
9787 /* We've already validated the memory, and we're creating a
9788 new pseudo destination. The predicates really can't fail,
9789 nor can the generator. */
9790 create_output_operand (&ops[0], NULL_RTX, mode);
9791 create_fixed_operand (&ops[1], temp);
9792 expand_insn (icode, 2, ops);
9793 temp = ops[0].value;
9795 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9796 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9797 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9798 (modifier == EXPAND_STACK_PARM
9799 ? NULL_RTX : target),
9800 mode, mode);
9802 return temp;
9805 case ARRAY_REF:
9808 tree array = treeop0;
9809 tree index = treeop1;
9810 tree init;
9812 /* Fold an expression like: "foo"[2].
9813 This is not done in fold so it won't happen inside &.
9814 Don't fold if this is for wide characters since it's too
9815 difficult to do correctly and this is a very rare case. */
9817 if (modifier != EXPAND_CONST_ADDRESS
9818 && modifier != EXPAND_INITIALIZER
9819 && modifier != EXPAND_MEMORY)
9821 tree t = fold_read_from_constant_string (exp);
9823 if (t)
9824 return expand_expr (t, target, tmode, modifier);
9827 /* If this is a constant index into a constant array,
9828 just get the value from the array. Handle both the cases when
9829 we have an explicit constructor and when our operand is a variable
9830 that was declared const. */
9832 if (modifier != EXPAND_CONST_ADDRESS
9833 && modifier != EXPAND_INITIALIZER
9834 && modifier != EXPAND_MEMORY
9835 && TREE_CODE (array) == CONSTRUCTOR
9836 && ! TREE_SIDE_EFFECTS (array)
9837 && TREE_CODE (index) == INTEGER_CST)
9839 unsigned HOST_WIDE_INT ix;
9840 tree field, value;
9842 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9843 field, value)
9844 if (tree_int_cst_equal (field, index))
9846 if (!TREE_SIDE_EFFECTS (value))
9847 return expand_expr (fold (value), target, tmode, modifier);
9848 break;
9852 else if (optimize >= 1
9853 && modifier != EXPAND_CONST_ADDRESS
9854 && modifier != EXPAND_INITIALIZER
9855 && modifier != EXPAND_MEMORY
9856 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9857 && TREE_CODE (index) == INTEGER_CST
9858 && (TREE_CODE (array) == VAR_DECL
9859 || TREE_CODE (array) == CONST_DECL)
9860 && (init = ctor_for_folding (array)) != error_mark_node)
9862 if (init == NULL_TREE)
9864 tree value = build_zero_cst (type);
9865 if (TREE_CODE (value) == CONSTRUCTOR)
9867 /* If VALUE is a CONSTRUCTOR, this optimization is only
9868 useful if this doesn't store the CONSTRUCTOR into
9869 memory. If it does, it is more efficient to just
9870 load the data from the array directly. */
9871 rtx ret = expand_constructor (value, target,
9872 modifier, true);
9873 if (ret == NULL_RTX)
9874 value = NULL_TREE;
9877 if (value)
9878 return expand_expr (value, target, tmode, modifier);
9880 else if (TREE_CODE (init) == CONSTRUCTOR)
9882 unsigned HOST_WIDE_INT ix;
9883 tree field, value;
9885 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
9886 field, value)
9887 if (tree_int_cst_equal (field, index))
9889 if (TREE_SIDE_EFFECTS (value))
9890 break;
9892 if (TREE_CODE (value) == CONSTRUCTOR)
9894 /* If VALUE is a CONSTRUCTOR, this
9895 optimization is only useful if
9896 this doesn't store the CONSTRUCTOR
9897 into memory. If it does, it is more
9898 efficient to just load the data from
9899 the array directly. */
9900 rtx ret = expand_constructor (value, target,
9901 modifier, true);
9902 if (ret == NULL_RTX)
9903 break;
9906 return
9907 expand_expr (fold (value), target, tmode, modifier);
9910 else if (TREE_CODE (init) == STRING_CST)
9912 tree low_bound = array_ref_low_bound (exp);
9913 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
9915 /* Optimize the special case of a zero lower bound.
9917 We convert the lower bound to sizetype to avoid problems
9918 with constant folding. E.g. suppose the lower bound is
9919 1 and its mode is QI. Without the conversion
9920 (ARRAY + (INDEX - (unsigned char)1))
9921 becomes
9922 (ARRAY + (-(unsigned char)1) + INDEX)
9923 which becomes
9924 (ARRAY + 255 + INDEX). Oops! */
9925 if (!integer_zerop (low_bound))
9926 index1 = size_diffop_loc (loc, index1,
9927 fold_convert_loc (loc, sizetype,
9928 low_bound));
9930 if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
9932 tree type = TREE_TYPE (TREE_TYPE (init));
9933 enum machine_mode mode = TYPE_MODE (type);
9935 if (GET_MODE_CLASS (mode) == MODE_INT
9936 && GET_MODE_SIZE (mode) == 1)
9937 return gen_int_mode (TREE_STRING_POINTER (init)
9938 [TREE_INT_CST_LOW (index1)],
9939 mode);
9944 goto normal_inner_ref;
9946 case COMPONENT_REF:
9947 /* If the operand is a CONSTRUCTOR, we can just extract the
9948 appropriate field if it is present. */
9949 if (TREE_CODE (treeop0) == CONSTRUCTOR)
9951 unsigned HOST_WIDE_INT idx;
9952 tree field, value;
9954 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
9955 idx, field, value)
9956 if (field == treeop1
9957 /* We can normally use the value of the field in the
9958 CONSTRUCTOR. However, if this is a bitfield in
9959 an integral mode that we can fit in a HOST_WIDE_INT,
9960 we must mask only the number of bits in the bitfield,
9961 since this is done implicitly by the constructor. If
9962 the bitfield does not meet either of those conditions,
9963 we can't do this optimization. */
9964 && (! DECL_BIT_FIELD (field)
9965 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
9966 && (GET_MODE_PRECISION (DECL_MODE (field))
9967 <= HOST_BITS_PER_WIDE_INT))))
9969 if (DECL_BIT_FIELD (field)
9970 && modifier == EXPAND_STACK_PARM)
9971 target = 0;
9972 op0 = expand_expr (value, target, tmode, modifier);
9973 if (DECL_BIT_FIELD (field))
9975 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
9976 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
9978 if (TYPE_UNSIGNED (TREE_TYPE (field)))
9980 op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
9981 imode);
9982 op0 = expand_and (imode, op0, op1, target);
9984 else
9986 int count = GET_MODE_PRECISION (imode) - bitsize;
9988 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
9989 target, 0);
9990 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
9991 target, 0);
9995 return op0;
9998 goto normal_inner_ref;
10000 case BIT_FIELD_REF:
10001 case ARRAY_RANGE_REF:
10002 normal_inner_ref:
10004 enum machine_mode mode1, mode2;
10005 HOST_WIDE_INT bitsize, bitpos;
10006 tree offset;
10007 int volatilep = 0, must_force_mem;
10008 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
10009 &mode1, &unsignedp, &volatilep, true);
10010 rtx orig_op0, memloc;
10011 bool mem_attrs_from_type = false;
10013 /* If we got back the original object, something is wrong. Perhaps
10014 we are evaluating an expression too early. In any event, don't
10015 infinitely recurse. */
10016 gcc_assert (tem != exp);
10018 /* If TEM's type is a union of variable size, pass TARGET to the inner
10019 computation, since it will need a temporary and TARGET is known
10020 to have to do. This occurs in unchecked conversion in Ada. */
10021 orig_op0 = op0
10022 = expand_expr_real (tem,
10023 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10024 && COMPLETE_TYPE_P (TREE_TYPE (tem))
10025 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10026 != INTEGER_CST)
10027 && modifier != EXPAND_STACK_PARM
10028 ? target : NULL_RTX),
10029 VOIDmode,
10030 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10031 NULL, true);
10033 /* If the field has a mode, we want to access it in the
10034 field's mode, not the computed mode.
10035 If a MEM has VOIDmode (external with incomplete type),
10036 use BLKmode for it instead. */
10037 if (MEM_P (op0))
10039 if (mode1 != VOIDmode)
10040 op0 = adjust_address (op0, mode1, 0);
10041 else if (GET_MODE (op0) == VOIDmode)
10042 op0 = adjust_address (op0, BLKmode, 0);
10045 mode2
10046 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10048 /* If we have either an offset, a BLKmode result, or a reference
10049 outside the underlying object, we must force it to memory.
10050 Such a case can occur in Ada if we have unchecked conversion
10051 of an expression from a scalar type to an aggregate type or
10052 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10053 passed a partially uninitialized object or a view-conversion
10054 to a larger size. */
10055 must_force_mem = (offset
10056 || mode1 == BLKmode
10057 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
10059 /* Handle CONCAT first. */
10060 if (GET_CODE (op0) == CONCAT && !must_force_mem)
10062 if (bitpos == 0
10063 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
10064 return op0;
10065 if (bitpos == 0
10066 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10067 && bitsize)
10069 op0 = XEXP (op0, 0);
10070 mode2 = GET_MODE (op0);
10072 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10073 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
10074 && bitpos
10075 && bitsize)
10077 op0 = XEXP (op0, 1);
10078 bitpos = 0;
10079 mode2 = GET_MODE (op0);
10081 else
10082 /* Otherwise force into memory. */
10083 must_force_mem = 1;
10086 /* If this is a constant, put it in a register if it is a legitimate
10087 constant and we don't need a memory reference. */
10088 if (CONSTANT_P (op0)
10089 && mode2 != BLKmode
10090 && targetm.legitimate_constant_p (mode2, op0)
10091 && !must_force_mem)
10092 op0 = force_reg (mode2, op0);
10094 /* Otherwise, if this is a constant, try to force it to the constant
10095 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10096 is a legitimate constant. */
10097 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10098 op0 = validize_mem (memloc);
10100 /* Otherwise, if this is a constant or the object is not in memory
10101 and need be, put it there. */
10102 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10104 memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10105 emit_move_insn (memloc, op0);
10106 op0 = memloc;
10107 mem_attrs_from_type = true;
10110 if (offset)
10112 enum machine_mode address_mode;
10113 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10114 EXPAND_SUM);
10116 gcc_assert (MEM_P (op0));
10118 address_mode = get_address_mode (op0);
10119 if (GET_MODE (offset_rtx) != address_mode)
10120 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10122 /* See the comment in expand_assignment for the rationale. */
10123 if (mode1 != VOIDmode
10124 && bitpos != 0
10125 && bitsize > 0
10126 && (bitpos % bitsize) == 0
10127 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
10128 && MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
10130 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10131 bitpos = 0;
10134 op0 = offset_address (op0, offset_rtx,
10135 highest_pow2_factor (offset));
10138 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10139 record its alignment as BIGGEST_ALIGNMENT. */
10140 if (MEM_P (op0) && bitpos == 0 && offset != 0
10141 && is_aligning_offset (offset, tem))
10142 set_mem_align (op0, BIGGEST_ALIGNMENT);
10144 /* Don't forget about volatility even if this is a bitfield. */
10145 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10147 if (op0 == orig_op0)
10148 op0 = copy_rtx (op0);
10150 MEM_VOLATILE_P (op0) = 1;
10153 /* In cases where an aligned union has an unaligned object
10154 as a field, we might be extracting a BLKmode value from
10155 an integer-mode (e.g., SImode) object. Handle this case
10156 by doing the extract into an object as wide as the field
10157 (which we know to be the width of a basic mode), then
10158 storing into memory, and changing the mode to BLKmode. */
10159 if (mode1 == VOIDmode
10160 || REG_P (op0) || GET_CODE (op0) == SUBREG
10161 || (mode1 != BLKmode && ! direct_load[(int) mode1]
10162 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10163 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10164 && modifier != EXPAND_CONST_ADDRESS
10165 && modifier != EXPAND_INITIALIZER
10166 && modifier != EXPAND_MEMORY)
10167 /* If the bitfield is volatile and the bitsize
10168 is narrower than the access size of the bitfield,
10169 we need to extract bitfields from the access. */
10170 || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10171 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10172 && mode1 != BLKmode
10173 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)
10174 /* If the field isn't aligned enough to fetch as a memref,
10175 fetch it as a bit field. */
10176 || (mode1 != BLKmode
10177 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10178 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10179 || (MEM_P (op0)
10180 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10181 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10182 && modifier != EXPAND_MEMORY
10183 && ((modifier == EXPAND_CONST_ADDRESS
10184 || modifier == EXPAND_INITIALIZER)
10185 ? STRICT_ALIGNMENT
10186 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10187 || (bitpos % BITS_PER_UNIT != 0)))
10188 /* If the type and the field are a constant size and the
10189 size of the type isn't the same size as the bitfield,
10190 we must use bitfield operations. */
10191 || (bitsize >= 0
10192 && TYPE_SIZE (TREE_TYPE (exp))
10193 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10194 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10195 bitsize)))
10197 enum machine_mode ext_mode = mode;
10199 if (ext_mode == BLKmode
10200 && ! (target != 0 && MEM_P (op0)
10201 && MEM_P (target)
10202 && bitpos % BITS_PER_UNIT == 0))
10203 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10205 if (ext_mode == BLKmode)
10207 if (target == 0)
10208 target = assign_temp (type, 1, 1);
10210 /* ??? Unlike the similar test a few lines below, this one is
10211 very likely obsolete. */
10212 if (bitsize == 0)
10213 return target;
10215 /* In this case, BITPOS must start at a byte boundary and
10216 TARGET, if specified, must be a MEM. */
10217 gcc_assert (MEM_P (op0)
10218 && (!target || MEM_P (target))
10219 && !(bitpos % BITS_PER_UNIT));
10221 emit_block_move (target,
10222 adjust_address (op0, VOIDmode,
10223 bitpos / BITS_PER_UNIT),
10224 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10225 / BITS_PER_UNIT),
10226 (modifier == EXPAND_STACK_PARM
10227 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10229 return target;
10232 /* If we have nothing to extract, the result will be 0 for targets
10233 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10234 return 0 for the sake of consistency, as reading a zero-sized
10235 bitfield is valid in Ada and the value is fully specified. */
10236 if (bitsize == 0)
10237 return const0_rtx;
10239 op0 = validize_mem (op0);
10241 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10242 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10244 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10245 (modifier == EXPAND_STACK_PARM
10246 ? NULL_RTX : target),
10247 ext_mode, ext_mode);
10249 /* If the result is a record type and BITSIZE is narrower than
10250 the mode of OP0, an integral mode, and this is a big endian
10251 machine, we must put the field into the high-order bits. */
10252 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10253 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10254 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10255 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10256 GET_MODE_BITSIZE (GET_MODE (op0))
10257 - bitsize, op0, 1);
10259 /* If the result type is BLKmode, store the data into a temporary
10260 of the appropriate type, but with the mode corresponding to the
10261 mode for the data we have (op0's mode). */
10262 if (mode == BLKmode)
10264 rtx new_rtx
10265 = assign_stack_temp_for_type (ext_mode,
10266 GET_MODE_BITSIZE (ext_mode),
10267 type);
10268 emit_move_insn (new_rtx, op0);
10269 op0 = copy_rtx (new_rtx);
10270 PUT_MODE (op0, BLKmode);
10273 return op0;
10276 /* If the result is BLKmode, use that to access the object
10277 now as well. */
10278 if (mode == BLKmode)
10279 mode1 = BLKmode;
10281 /* Get a reference to just this component. */
10282 if (modifier == EXPAND_CONST_ADDRESS
10283 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10284 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10285 else
10286 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10288 if (op0 == orig_op0)
10289 op0 = copy_rtx (op0);
10291 /* If op0 is a temporary because of forcing to memory, pass only the
10292 type to set_mem_attributes so that the original expression is never
10293 marked as ADDRESSABLE through MEM_EXPR of the temporary. */
10294 if (mem_attrs_from_type)
10295 set_mem_attributes (op0, type, 0);
10296 else
10297 set_mem_attributes (op0, exp, 0);
10299 if (REG_P (XEXP (op0, 0)))
10300 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10302 MEM_VOLATILE_P (op0) |= volatilep;
10303 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10304 || modifier == EXPAND_CONST_ADDRESS
10305 || modifier == EXPAND_INITIALIZER)
10306 return op0;
10308 if (target == 0)
10309 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10311 convert_move (target, op0, unsignedp);
10312 return target;
10315 case OBJ_TYPE_REF:
10316 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10318 case CALL_EXPR:
10319 /* All valid uses of __builtin_va_arg_pack () are removed during
10320 inlining. */
10321 if (CALL_EXPR_VA_ARG_PACK (exp))
10322 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10324 tree fndecl = get_callee_fndecl (exp), attr;
10326 if (fndecl
10327 && (attr = lookup_attribute ("error",
10328 DECL_ATTRIBUTES (fndecl))) != NULL)
10329 error ("%Kcall to %qs declared with attribute error: %s",
10330 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10331 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10332 if (fndecl
10333 && (attr = lookup_attribute ("warning",
10334 DECL_ATTRIBUTES (fndecl))) != NULL)
10335 warning_at (tree_nonartificial_location (exp),
10336 0, "%Kcall to %qs declared with attribute warning: %s",
10337 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10338 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10340 /* Check for a built-in function. */
10341 if (fndecl && DECL_BUILT_IN (fndecl))
10343 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10344 return expand_builtin (exp, target, subtarget, tmode, ignore);
10347 return expand_call (exp, target, ignore);
10349 case VIEW_CONVERT_EXPR:
10350 op0 = NULL_RTX;
10352 /* If we are converting to BLKmode, try to avoid an intermediate
10353 temporary by fetching an inner memory reference. */
10354 if (mode == BLKmode
10355 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10356 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10357 && handled_component_p (treeop0))
10359 enum machine_mode mode1;
10360 HOST_WIDE_INT bitsize, bitpos;
10361 tree offset;
10362 int unsignedp;
10363 int volatilep = 0;
10364 tree tem
10365 = get_inner_reference (treeop0, &bitsize, &bitpos,
10366 &offset, &mode1, &unsignedp, &volatilep,
10367 true);
10368 rtx orig_op0;
10370 /* ??? We should work harder and deal with non-zero offsets. */
10371 if (!offset
10372 && (bitpos % BITS_PER_UNIT) == 0
10373 && bitsize >= 0
10374 && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10376 /* See the normal_inner_ref case for the rationale. */
10377 orig_op0
10378 = expand_expr_real (tem,
10379 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10380 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10381 != INTEGER_CST)
10382 && modifier != EXPAND_STACK_PARM
10383 ? target : NULL_RTX),
10384 VOIDmode,
10385 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10386 NULL, true);
10388 if (MEM_P (orig_op0))
10390 op0 = orig_op0;
10392 /* Get a reference to just this component. */
10393 if (modifier == EXPAND_CONST_ADDRESS
10394 || modifier == EXPAND_SUM
10395 || modifier == EXPAND_INITIALIZER)
10396 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10397 else
10398 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10400 if (op0 == orig_op0)
10401 op0 = copy_rtx (op0);
10403 set_mem_attributes (op0, treeop0, 0);
10404 if (REG_P (XEXP (op0, 0)))
10405 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10407 MEM_VOLATILE_P (op0) |= volatilep;
10412 if (!op0)
10413 op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
10414 NULL, inner_reference_p);
10416 /* If the input and output modes are both the same, we are done. */
10417 if (mode == GET_MODE (op0))
10419 /* If neither mode is BLKmode, and both modes are the same size
10420 then we can use gen_lowpart. */
10421 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10422 && (GET_MODE_PRECISION (mode)
10423 == GET_MODE_PRECISION (GET_MODE (op0)))
10424 && !COMPLEX_MODE_P (GET_MODE (op0)))
10426 if (GET_CODE (op0) == SUBREG)
10427 op0 = force_reg (GET_MODE (op0), op0);
10428 temp = gen_lowpart_common (mode, op0);
10429 if (temp)
10430 op0 = temp;
10431 else
10433 if (!REG_P (op0) && !MEM_P (op0))
10434 op0 = force_reg (GET_MODE (op0), op0);
10435 op0 = gen_lowpart (mode, op0);
10438 /* If both types are integral, convert from one mode to the other. */
10439 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10440 op0 = convert_modes (mode, GET_MODE (op0), op0,
10441 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10442 /* If the output type is a bit-field type, do an extraction. */
10443 else if (reduce_bit_field)
10444 return extract_bit_field (op0, TYPE_PRECISION (type), 0,
10445 TYPE_UNSIGNED (type), NULL_RTX,
10446 mode, mode);
10447 /* As a last resort, spill op0 to memory, and reload it in a
10448 different mode. */
10449 else if (!MEM_P (op0))
10451 /* If the operand is not a MEM, force it into memory. Since we
10452 are going to be changing the mode of the MEM, don't call
10453 force_const_mem for constants because we don't allow pool
10454 constants to change mode. */
10455 tree inner_type = TREE_TYPE (treeop0);
10457 gcc_assert (!TREE_ADDRESSABLE (exp));
10459 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10460 target
10461 = assign_stack_temp_for_type
10462 (TYPE_MODE (inner_type),
10463 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10465 emit_move_insn (target, op0);
10466 op0 = target;
10469 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10470 output type is such that the operand is known to be aligned, indicate
10471 that it is. Otherwise, we need only be concerned about alignment for
10472 non-BLKmode results. */
10473 if (MEM_P (op0))
10475 enum insn_code icode;
10477 if (TYPE_ALIGN_OK (type))
10479 /* ??? Copying the MEM without substantially changing it might
10480 run afoul of the code handling volatile memory references in
10481 store_expr, which assumes that TARGET is returned unmodified
10482 if it has been used. */
10483 op0 = copy_rtx (op0);
10484 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10486 else if (modifier != EXPAND_WRITE
10487 && modifier != EXPAND_MEMORY
10488 && !inner_reference_p
10489 && mode != BLKmode
10490 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10492 /* If the target does have special handling for unaligned
10493 loads of mode then use them. */
10494 if ((icode = optab_handler (movmisalign_optab, mode))
10495 != CODE_FOR_nothing)
10497 rtx reg, insn;
10499 op0 = adjust_address (op0, mode, 0);
10500 /* We've already validated the memory, and we're creating a
10501 new pseudo destination. The predicates really can't
10502 fail. */
10503 reg = gen_reg_rtx (mode);
10505 /* Nor can the insn generator. */
10506 insn = GEN_FCN (icode) (reg, op0);
10507 emit_insn (insn);
10508 return reg;
10510 else if (STRICT_ALIGNMENT)
10512 tree inner_type = TREE_TYPE (treeop0);
10513 HOST_WIDE_INT temp_size
10514 = MAX (int_size_in_bytes (inner_type),
10515 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10516 rtx new_rtx
10517 = assign_stack_temp_for_type (mode, temp_size, type);
10518 rtx new_with_op0_mode
10519 = adjust_address (new_rtx, GET_MODE (op0), 0);
10521 gcc_assert (!TREE_ADDRESSABLE (exp));
10523 if (GET_MODE (op0) == BLKmode)
10524 emit_block_move (new_with_op0_mode, op0,
10525 GEN_INT (GET_MODE_SIZE (mode)),
10526 (modifier == EXPAND_STACK_PARM
10527 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10528 else
10529 emit_move_insn (new_with_op0_mode, op0);
10531 op0 = new_rtx;
10535 op0 = adjust_address (op0, mode, 0);
10538 return op0;
10540 case MODIFY_EXPR:
10542 tree lhs = treeop0;
10543 tree rhs = treeop1;
10544 gcc_assert (ignore);
10546 /* Check for |= or &= of a bitfield of size one into another bitfield
10547 of size 1. In this case, (unless we need the result of the
10548 assignment) we can do this more efficiently with a
10549 test followed by an assignment, if necessary.
10551 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10552 things change so we do, this code should be enhanced to
10553 support it. */
10554 if (TREE_CODE (lhs) == COMPONENT_REF
10555 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10556 || TREE_CODE (rhs) == BIT_AND_EXPR)
10557 && TREE_OPERAND (rhs, 0) == lhs
10558 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10559 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10560 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10562 rtx label = gen_label_rtx ();
10563 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10564 do_jump (TREE_OPERAND (rhs, 1),
10565 value ? label : 0,
10566 value ? 0 : label, -1);
10567 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10568 false);
10569 do_pending_stack_adjust ();
10570 emit_label (label);
10571 return const0_rtx;
10574 expand_assignment (lhs, rhs, false);
10575 return const0_rtx;
10578 case ADDR_EXPR:
10579 return expand_expr_addr_expr (exp, target, tmode, modifier);
10581 case REALPART_EXPR:
10582 op0 = expand_normal (treeop0);
10583 return read_complex_part (op0, false);
10585 case IMAGPART_EXPR:
10586 op0 = expand_normal (treeop0);
10587 return read_complex_part (op0, true);
10589 case RETURN_EXPR:
10590 case LABEL_EXPR:
10591 case GOTO_EXPR:
10592 case SWITCH_EXPR:
10593 case ASM_EXPR:
10594 /* Expanded in cfgexpand.c. */
10595 gcc_unreachable ();
10597 case TRY_CATCH_EXPR:
10598 case CATCH_EXPR:
10599 case EH_FILTER_EXPR:
10600 case TRY_FINALLY_EXPR:
10601 /* Lowered by tree-eh.c. */
10602 gcc_unreachable ();
10604 case WITH_CLEANUP_EXPR:
10605 case CLEANUP_POINT_EXPR:
10606 case TARGET_EXPR:
10607 case CASE_LABEL_EXPR:
10608 case VA_ARG_EXPR:
10609 case BIND_EXPR:
10610 case INIT_EXPR:
10611 case CONJ_EXPR:
10612 case COMPOUND_EXPR:
10613 case PREINCREMENT_EXPR:
10614 case PREDECREMENT_EXPR:
10615 case POSTINCREMENT_EXPR:
10616 case POSTDECREMENT_EXPR:
10617 case LOOP_EXPR:
10618 case EXIT_EXPR:
10619 case COMPOUND_LITERAL_EXPR:
10620 /* Lowered by gimplify.c. */
10621 gcc_unreachable ();
10623 case FDESC_EXPR:
10624 /* Function descriptors are not valid except for as
10625 initialization constants, and should not be expanded. */
10626 gcc_unreachable ();
10628 case WITH_SIZE_EXPR:
10629 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10630 have pulled out the size to use in whatever context it needed. */
10631 return expand_expr_real (treeop0, original_target, tmode,
10632 modifier, alt_rtl, inner_reference_p);
10634 default:
10635 return expand_expr_real_2 (&ops, target, tmode, modifier);
10639 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10640 signedness of TYPE), possibly returning the result in TARGET. */
10641 static rtx
10642 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10644 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10645 if (target && GET_MODE (target) != GET_MODE (exp))
10646 target = 0;
10647 /* For constant values, reduce using build_int_cst_type. */
10648 if (CONST_INT_P (exp))
10650 HOST_WIDE_INT value = INTVAL (exp);
10651 tree t = build_int_cst_type (type, value);
10652 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10654 else if (TYPE_UNSIGNED (type))
10656 enum machine_mode mode = GET_MODE (exp);
10657 rtx mask = immed_wide_int_const
10658 (wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
10659 return expand_and (mode, exp, mask, target);
10661 else
10663 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10664 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10665 exp, count, target, 0);
10666 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10667 exp, count, target, 0);
10671 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10672 when applied to the address of EXP produces an address known to be
10673 aligned more than BIGGEST_ALIGNMENT. */
10675 static int
10676 is_aligning_offset (const_tree offset, const_tree exp)
10678 /* Strip off any conversions. */
10679 while (CONVERT_EXPR_P (offset))
10680 offset = TREE_OPERAND (offset, 0);
10682 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10683 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10684 if (TREE_CODE (offset) != BIT_AND_EXPR
10685 || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
10686 || compare_tree_int (TREE_OPERAND (offset, 1),
10687 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10688 || !exact_log2 (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1) < 0)
10689 return 0;
10691 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10692 It must be NEGATE_EXPR. Then strip any more conversions. */
10693 offset = TREE_OPERAND (offset, 0);
10694 while (CONVERT_EXPR_P (offset))
10695 offset = TREE_OPERAND (offset, 0);
10697 if (TREE_CODE (offset) != NEGATE_EXPR)
10698 return 0;
10700 offset = TREE_OPERAND (offset, 0);
10701 while (CONVERT_EXPR_P (offset))
10702 offset = TREE_OPERAND (offset, 0);
10704 /* This must now be the address of EXP. */
10705 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10708 /* Return the tree node if an ARG corresponds to a string constant or zero
10709 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10710 in bytes within the string that ARG is accessing. The type of the
10711 offset will be `sizetype'. */
10713 tree
10714 string_constant (tree arg, tree *ptr_offset)
10716 tree array, offset, lower_bound;
10717 STRIP_NOPS (arg);
10719 if (TREE_CODE (arg) == ADDR_EXPR)
10721 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10723 *ptr_offset = size_zero_node;
10724 return TREE_OPERAND (arg, 0);
10726 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10728 array = TREE_OPERAND (arg, 0);
10729 offset = size_zero_node;
10731 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10733 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10734 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10735 if (TREE_CODE (array) != STRING_CST
10736 && TREE_CODE (array) != VAR_DECL)
10737 return 0;
10739 /* Check if the array has a nonzero lower bound. */
10740 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10741 if (!integer_zerop (lower_bound))
10743 /* If the offset and base aren't both constants, return 0. */
10744 if (TREE_CODE (lower_bound) != INTEGER_CST)
10745 return 0;
10746 if (TREE_CODE (offset) != INTEGER_CST)
10747 return 0;
10748 /* Adjust offset by the lower bound. */
10749 offset = size_diffop (fold_convert (sizetype, offset),
10750 fold_convert (sizetype, lower_bound));
10753 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10755 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10756 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10757 if (TREE_CODE (array) != ADDR_EXPR)
10758 return 0;
10759 array = TREE_OPERAND (array, 0);
10760 if (TREE_CODE (array) != STRING_CST
10761 && TREE_CODE (array) != VAR_DECL)
10762 return 0;
10764 else
10765 return 0;
10767 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10769 tree arg0 = TREE_OPERAND (arg, 0);
10770 tree arg1 = TREE_OPERAND (arg, 1);
10772 STRIP_NOPS (arg0);
10773 STRIP_NOPS (arg1);
10775 if (TREE_CODE (arg0) == ADDR_EXPR
10776 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10777 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10779 array = TREE_OPERAND (arg0, 0);
10780 offset = arg1;
10782 else if (TREE_CODE (arg1) == ADDR_EXPR
10783 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10784 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10786 array = TREE_OPERAND (arg1, 0);
10787 offset = arg0;
10789 else
10790 return 0;
10792 else
10793 return 0;
10795 if (TREE_CODE (array) == STRING_CST)
10797 *ptr_offset = fold_convert (sizetype, offset);
10798 return array;
10800 else if (TREE_CODE (array) == VAR_DECL
10801 || TREE_CODE (array) == CONST_DECL)
10803 int length;
10804 tree init = ctor_for_folding (array);
10806 /* Variables initialized to string literals can be handled too. */
10807 if (init == error_mark_node
10808 || !init
10809 || TREE_CODE (init) != STRING_CST)
10810 return 0;
10812 /* Avoid const char foo[4] = "abcde"; */
10813 if (DECL_SIZE_UNIT (array) == NULL_TREE
10814 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10815 || (length = TREE_STRING_LENGTH (init)) <= 0
10816 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10817 return 0;
10819 /* If variable is bigger than the string literal, OFFSET must be constant
10820 and inside of the bounds of the string literal. */
10821 offset = fold_convert (sizetype, offset);
10822 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10823 && (! tree_fits_uhwi_p (offset)
10824 || compare_tree_int (offset, length) >= 0))
10825 return 0;
10827 *ptr_offset = offset;
10828 return init;
10831 return 0;
10834 /* Generate code to calculate OPS, and exploded expression
10835 using a store-flag instruction and return an rtx for the result.
10836 OPS reflects a comparison.
10838 If TARGET is nonzero, store the result there if convenient.
10840 Return zero if there is no suitable set-flag instruction
10841 available on this machine.
10843 Once expand_expr has been called on the arguments of the comparison,
10844 we are committed to doing the store flag, since it is not safe to
10845 re-evaluate the expression. We emit the store-flag insn by calling
10846 emit_store_flag, but only expand the arguments if we have a reason
10847 to believe that emit_store_flag will be successful. If we think that
10848 it will, but it isn't, we have to simulate the store-flag with a
10849 set/jump/set sequence. */
10851 static rtx
10852 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
10854 enum rtx_code code;
10855 tree arg0, arg1, type;
10856 tree tem;
10857 enum machine_mode operand_mode;
10858 int unsignedp;
10859 rtx op0, op1;
10860 rtx subtarget = target;
10861 location_t loc = ops->location;
10863 arg0 = ops->op0;
10864 arg1 = ops->op1;
10866 /* Don't crash if the comparison was erroneous. */
10867 if (arg0 == error_mark_node || arg1 == error_mark_node)
10868 return const0_rtx;
10870 type = TREE_TYPE (arg0);
10871 operand_mode = TYPE_MODE (type);
10872 unsignedp = TYPE_UNSIGNED (type);
10874 /* We won't bother with BLKmode store-flag operations because it would mean
10875 passing a lot of information to emit_store_flag. */
10876 if (operand_mode == BLKmode)
10877 return 0;
10879 /* We won't bother with store-flag operations involving function pointers
10880 when function pointers must be canonicalized before comparisons. */
10881 #ifdef HAVE_canonicalize_funcptr_for_compare
10882 if (HAVE_canonicalize_funcptr_for_compare
10883 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
10884 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
10885 == FUNCTION_TYPE))
10886 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
10887 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
10888 == FUNCTION_TYPE))))
10889 return 0;
10890 #endif
10892 STRIP_NOPS (arg0);
10893 STRIP_NOPS (arg1);
10895 /* For vector typed comparisons emit code to generate the desired
10896 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10897 expander for this. */
10898 if (TREE_CODE (ops->type) == VECTOR_TYPE)
10900 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10901 tree if_true = constant_boolean_node (true, ops->type);
10902 tree if_false = constant_boolean_node (false, ops->type);
10903 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10906 /* Get the rtx comparison code to use. We know that EXP is a comparison
10907 operation of some type. Some comparisons against 1 and -1 can be
10908 converted to comparisons with zero. Do so here so that the tests
10909 below will be aware that we have a comparison with zero. These
10910 tests will not catch constants in the first operand, but constants
10911 are rarely passed as the first operand. */
10913 switch (ops->code)
10915 case EQ_EXPR:
10916 code = EQ;
10917 break;
10918 case NE_EXPR:
10919 code = NE;
10920 break;
10921 case LT_EXPR:
10922 if (integer_onep (arg1))
10923 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10924 else
10925 code = unsignedp ? LTU : LT;
10926 break;
10927 case LE_EXPR:
10928 if (! unsignedp && integer_all_onesp (arg1))
10929 arg1 = integer_zero_node, code = LT;
10930 else
10931 code = unsignedp ? LEU : LE;
10932 break;
10933 case GT_EXPR:
10934 if (! unsignedp && integer_all_onesp (arg1))
10935 arg1 = integer_zero_node, code = GE;
10936 else
10937 code = unsignedp ? GTU : GT;
10938 break;
10939 case GE_EXPR:
10940 if (integer_onep (arg1))
10941 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10942 else
10943 code = unsignedp ? GEU : GE;
10944 break;
10946 case UNORDERED_EXPR:
10947 code = UNORDERED;
10948 break;
10949 case ORDERED_EXPR:
10950 code = ORDERED;
10951 break;
10952 case UNLT_EXPR:
10953 code = UNLT;
10954 break;
10955 case UNLE_EXPR:
10956 code = UNLE;
10957 break;
10958 case UNGT_EXPR:
10959 code = UNGT;
10960 break;
10961 case UNGE_EXPR:
10962 code = UNGE;
10963 break;
10964 case UNEQ_EXPR:
10965 code = UNEQ;
10966 break;
10967 case LTGT_EXPR:
10968 code = LTGT;
10969 break;
10971 default:
10972 gcc_unreachable ();
10975 /* Put a constant second. */
10976 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
10977 || TREE_CODE (arg0) == FIXED_CST)
10979 tem = arg0; arg0 = arg1; arg1 = tem;
10980 code = swap_condition (code);
10983 /* If this is an equality or inequality test of a single bit, we can
10984 do this by shifting the bit being tested to the low-order bit and
10985 masking the result with the constant 1. If the condition was EQ,
10986 we xor it with 1. This does not require an scc insn and is faster
10987 than an scc insn even if we have it.
10989 The code to make this transformation was moved into fold_single_bit_test,
10990 so we just call into the folder and expand its result. */
10992 if ((code == NE || code == EQ)
10993 && integer_zerop (arg1)
10994 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
10996 gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
10997 if (srcstmt
10998 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
11000 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
11001 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
11002 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
11003 gimple_assign_rhs1 (srcstmt),
11004 gimple_assign_rhs2 (srcstmt));
11005 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
11006 if (temp)
11007 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
11011 if (! get_subtarget (target)
11012 || GET_MODE (subtarget) != operand_mode)
11013 subtarget = 0;
11015 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
11017 if (target == 0)
11018 target = gen_reg_rtx (mode);
11020 /* Try a cstore if possible. */
11021 return emit_store_flag_force (target, code, op0, op1,
11022 operand_mode, unsignedp,
11023 (TYPE_PRECISION (ops->type) == 1
11024 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
11028 /* Stubs in case we haven't got a casesi insn. */
11029 #ifndef HAVE_casesi
11030 # define HAVE_casesi 0
11031 # define gen_casesi(a, b, c, d, e) (0)
11032 # define CODE_FOR_casesi CODE_FOR_nothing
11033 #endif
11035 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11036 0 otherwise (i.e. if there is no casesi instruction).
11038 DEFAULT_PROBABILITY is the probability of jumping to the default
11039 label. */
11041 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
11042 rtx table_label, rtx default_label, rtx fallback_label,
11043 int default_probability)
11045 struct expand_operand ops[5];
11046 enum machine_mode index_mode = SImode;
11047 rtx op1, op2, index;
11049 if (! HAVE_casesi)
11050 return 0;
11052 /* Convert the index to SImode. */
11053 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11055 enum machine_mode omode = TYPE_MODE (index_type);
11056 rtx rangertx = expand_normal (range);
11058 /* We must handle the endpoints in the original mode. */
11059 index_expr = build2 (MINUS_EXPR, index_type,
11060 index_expr, minval);
11061 minval = integer_zero_node;
11062 index = expand_normal (index_expr);
11063 if (default_label)
11064 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11065 omode, 1, default_label,
11066 default_probability);
11067 /* Now we can safely truncate. */
11068 index = convert_to_mode (index_mode, index, 0);
11070 else
11072 if (TYPE_MODE (index_type) != index_mode)
11074 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
11075 index_expr = fold_convert (index_type, index_expr);
11078 index = expand_normal (index_expr);
11081 do_pending_stack_adjust ();
11083 op1 = expand_normal (minval);
11084 op2 = expand_normal (range);
11086 create_input_operand (&ops[0], index, index_mode);
11087 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11088 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11089 create_fixed_operand (&ops[3], table_label);
11090 create_fixed_operand (&ops[4], (default_label
11091 ? default_label
11092 : fallback_label));
11093 expand_jump_insn (CODE_FOR_casesi, 5, ops);
11094 return 1;
11097 /* Attempt to generate a tablejump instruction; same concept. */
11098 #ifndef HAVE_tablejump
11099 #define HAVE_tablejump 0
11100 #define gen_tablejump(x, y) (0)
11101 #endif
11103 /* Subroutine of the next function.
11105 INDEX is the value being switched on, with the lowest value
11106 in the table already subtracted.
11107 MODE is its expected mode (needed if INDEX is constant).
11108 RANGE is the length of the jump table.
11109 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11111 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11112 index value is out of range.
11113 DEFAULT_PROBABILITY is the probability of jumping to
11114 the default label. */
11116 static void
11117 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
11118 rtx default_label, int default_probability)
11120 rtx temp, vector;
11122 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11123 cfun->cfg->max_jumptable_ents = INTVAL (range);
11125 /* Do an unsigned comparison (in the proper mode) between the index
11126 expression and the value which represents the length of the range.
11127 Since we just finished subtracting the lower bound of the range
11128 from the index expression, this comparison allows us to simultaneously
11129 check that the original index expression value is both greater than
11130 or equal to the minimum value of the range and less than or equal to
11131 the maximum value of the range. */
11133 if (default_label)
11134 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11135 default_label, default_probability);
11138 /* If index is in range, it must fit in Pmode.
11139 Convert to Pmode so we can index with it. */
11140 if (mode != Pmode)
11141 index = convert_to_mode (Pmode, index, 1);
11143 /* Don't let a MEM slip through, because then INDEX that comes
11144 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11145 and break_out_memory_refs will go to work on it and mess it up. */
11146 #ifdef PIC_CASE_VECTOR_ADDRESS
11147 if (flag_pic && !REG_P (index))
11148 index = copy_to_mode_reg (Pmode, index);
11149 #endif
11151 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11152 GET_MODE_SIZE, because this indicates how large insns are. The other
11153 uses should all be Pmode, because they are addresses. This code
11154 could fail if addresses and insns are not the same size. */
11155 index = simplify_gen_binary (MULT, Pmode, index,
11156 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
11157 Pmode));
11158 index = simplify_gen_binary (PLUS, Pmode, index,
11159 gen_rtx_LABEL_REF (Pmode, table_label));
11161 #ifdef PIC_CASE_VECTOR_ADDRESS
11162 if (flag_pic)
11163 index = PIC_CASE_VECTOR_ADDRESS (index);
11164 else
11165 #endif
11166 index = memory_address (CASE_VECTOR_MODE, index);
11167 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11168 vector = gen_const_mem (CASE_VECTOR_MODE, index);
11169 convert_move (temp, vector, 0);
11171 emit_jump_insn (gen_tablejump (temp, table_label));
11173 /* If we are generating PIC code or if the table is PC-relative, the
11174 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11175 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11176 emit_barrier ();
11180 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11181 rtx table_label, rtx default_label, int default_probability)
11183 rtx index;
11185 if (! HAVE_tablejump)
11186 return 0;
11188 index_expr = fold_build2 (MINUS_EXPR, index_type,
11189 fold_convert (index_type, index_expr),
11190 fold_convert (index_type, minval));
11191 index = expand_normal (index_expr);
11192 do_pending_stack_adjust ();
11194 do_tablejump (index, TYPE_MODE (index_type),
11195 convert_modes (TYPE_MODE (index_type),
11196 TYPE_MODE (TREE_TYPE (range)),
11197 expand_normal (range),
11198 TYPE_UNSIGNED (TREE_TYPE (range))),
11199 table_label, default_label, default_probability);
11200 return 1;
11203 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11204 static rtx
11205 const_vector_from_tree (tree exp)
11207 rtvec v;
11208 unsigned i;
11209 int units;
11210 tree elt;
11211 enum machine_mode inner, mode;
11213 mode = TYPE_MODE (TREE_TYPE (exp));
11215 if (initializer_zerop (exp))
11216 return CONST0_RTX (mode);
11218 units = GET_MODE_NUNITS (mode);
11219 inner = GET_MODE_INNER (mode);
11221 v = rtvec_alloc (units);
11223 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11225 elt = VECTOR_CST_ELT (exp, i);
11227 if (TREE_CODE (elt) == REAL_CST)
11228 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11229 inner);
11230 else if (TREE_CODE (elt) == FIXED_CST)
11231 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11232 inner);
11233 else
11234 RTVEC_ELT (v, i) = immed_wide_int_const (elt, inner);
11237 return gen_rtx_CONST_VECTOR (mode, v);
11240 /* Build a decl for a personality function given a language prefix. */
11242 tree
11243 build_personality_function (const char *lang)
11245 const char *unwind_and_version;
11246 tree decl, type;
11247 char *name;
11249 switch (targetm_common.except_unwind_info (&global_options))
11251 case UI_NONE:
11252 return NULL;
11253 case UI_SJLJ:
11254 unwind_and_version = "_sj0";
11255 break;
11256 case UI_DWARF2:
11257 case UI_TARGET:
11258 unwind_and_version = "_v0";
11259 break;
11260 case UI_SEH:
11261 unwind_and_version = "_seh0";
11262 break;
11263 default:
11264 gcc_unreachable ();
11267 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11269 type = build_function_type_list (integer_type_node, integer_type_node,
11270 long_long_unsigned_type_node,
11271 ptr_type_node, ptr_type_node, NULL_TREE);
11272 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11273 get_identifier (name), type);
11274 DECL_ARTIFICIAL (decl) = 1;
11275 DECL_EXTERNAL (decl) = 1;
11276 TREE_PUBLIC (decl) = 1;
11278 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11279 are the flags assigned by targetm.encode_section_info. */
11280 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11282 return decl;
11285 /* Extracts the personality function of DECL and returns the corresponding
11286 libfunc. */
11289 get_personality_function (tree decl)
11291 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11292 enum eh_personality_kind pk;
11294 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11295 if (pk == eh_personality_none)
11296 return NULL;
11298 if (!personality
11299 && pk == eh_personality_any)
11300 personality = lang_hooks.eh_personality ();
11302 if (pk == eh_personality_lang)
11303 gcc_assert (personality != NULL_TREE);
11305 return XEXP (DECL_RTL (personality), 0);
11308 #include "gt-expr.h"