[Vectorizer] Make REDUC_xxx_EXPR tree codes produce a scalar result
[official-gcc.git] / gcc / expr.c
blobe9cabbe412d0c0078ffa901a39345c6c1f13792d
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
29 #include "attribs.h"
30 #include "varasm.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "hashtab.h"
36 #include "hash-set.h"
37 #include "vec.h"
38 #include "input.h"
39 #include "function.h"
40 #include "insn-config.h"
41 #include "insn-attr.h"
42 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
43 #include "expr.h"
44 #include "optabs.h"
45 #include "libfuncs.h"
46 #include "recog.h"
47 #include "reload.h"
48 #include "typeclass.h"
49 #include "toplev.h"
50 #include "langhooks.h"
51 #include "intl.h"
52 #include "tm_p.h"
53 #include "tree-iterator.h"
54 #include "predict.h"
55 #include "dominance.h"
56 #include "cfg.h"
57 #include "basic-block.h"
58 #include "tree-ssa-alias.h"
59 #include "internal-fn.h"
60 #include "gimple-expr.h"
61 #include "is-a.h"
62 #include "gimple.h"
63 #include "gimple-ssa.h"
64 #include "cgraph.h"
65 #include "tree-ssanames.h"
66 #include "target.h"
67 #include "common/common-target.h"
68 #include "timevar.h"
69 #include "df.h"
70 #include "diagnostic.h"
71 #include "tree-ssa-live.h"
72 #include "tree-outof-ssa.h"
73 #include "target-globals.h"
74 #include "params.h"
75 #include "tree-ssa-address.h"
76 #include "cfgexpand.h"
77 #include "builtins.h"
79 #ifndef STACK_PUSH_CODE
80 #ifdef STACK_GROWS_DOWNWARD
81 #define STACK_PUSH_CODE PRE_DEC
82 #else
83 #define STACK_PUSH_CODE PRE_INC
84 #endif
85 #endif
88 /* If this is nonzero, we do not bother generating VOLATILE
89 around volatile memory references, and we are willing to
90 output indirect addresses. If cse is to follow, we reject
91 indirect addresses so a useful potential cse is generated;
92 if it is used only once, instruction combination will produce
93 the same indirect address eventually. */
94 int cse_not_expected;
96 /* This structure is used by move_by_pieces to describe the move to
97 be performed. */
98 struct move_by_pieces_d
100 rtx to;
101 rtx to_addr;
102 int autinc_to;
103 int explicit_inc_to;
104 rtx from;
105 rtx from_addr;
106 int autinc_from;
107 int explicit_inc_from;
108 unsigned HOST_WIDE_INT len;
109 HOST_WIDE_INT offset;
110 int reverse;
113 /* This structure is used by store_by_pieces to describe the clear to
114 be performed. */
116 struct store_by_pieces_d
118 rtx to;
119 rtx to_addr;
120 int autinc_to;
121 int explicit_inc_to;
122 unsigned HOST_WIDE_INT len;
123 HOST_WIDE_INT offset;
124 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
125 void *constfundata;
126 int reverse;
129 static void move_by_pieces_1 (insn_gen_fn, machine_mode,
130 struct move_by_pieces_d *);
131 static bool block_move_libcall_safe_for_call_parm (void);
132 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
133 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
134 unsigned HOST_WIDE_INT);
135 static tree emit_block_move_libcall_fn (int);
136 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
137 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
138 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
139 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
140 static void store_by_pieces_2 (insn_gen_fn, machine_mode,
141 struct store_by_pieces_d *);
142 static tree clear_storage_libcall_fn (int);
143 static rtx_insn *compress_float_constant (rtx, rtx);
144 static rtx get_subtarget (rtx);
145 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
146 HOST_WIDE_INT, enum machine_mode,
147 tree, int, alias_set_type);
148 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
149 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
150 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
151 enum machine_mode, tree, alias_set_type, bool);
153 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
155 static int is_aligning_offset (const_tree, const_tree);
156 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
157 enum expand_modifier);
158 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
159 static rtx do_store_flag (sepops, rtx, enum machine_mode);
160 #ifdef PUSH_ROUNDING
161 static void emit_single_push_insn (enum machine_mode, rtx, tree);
162 #endif
163 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx, int);
164 static rtx const_vector_from_tree (tree);
165 static void write_complex_part (rtx, rtx, bool);
167 /* This macro is used to determine whether move_by_pieces should be called
168 to perform a structure copy. */
169 #ifndef MOVE_BY_PIECES_P
170 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
171 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
172 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
173 #endif
175 /* This macro is used to determine whether clear_by_pieces should be
176 called to clear storage. */
177 #ifndef CLEAR_BY_PIECES_P
178 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
179 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
180 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
181 #endif
183 /* This macro is used to determine whether store_by_pieces should be
184 called to "memset" storage with byte values other than zero. */
185 #ifndef SET_BY_PIECES_P
186 #define SET_BY_PIECES_P(SIZE, ALIGN) \
187 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
188 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
189 #endif
191 /* This macro is used to determine whether store_by_pieces should be
192 called to "memcpy" storage when the source is a constant string. */
193 #ifndef STORE_BY_PIECES_P
194 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
195 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
197 #endif
199 /* This is run to set up which modes can be used
200 directly in memory and to initialize the block move optab. It is run
201 at the beginning of compilation and when the target is reinitialized. */
203 void
204 init_expr_target (void)
206 rtx insn, pat;
207 enum machine_mode mode;
208 int num_clobbers;
209 rtx mem, mem1;
210 rtx reg;
212 /* Try indexing by frame ptr and try by stack ptr.
213 It is known that on the Convex the stack ptr isn't a valid index.
214 With luck, one or the other is valid on any machine. */
215 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
216 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
218 /* A scratch register we can modify in-place below to avoid
219 useless RTL allocations. */
220 reg = gen_rtx_REG (VOIDmode, -1);
222 insn = rtx_alloc (INSN);
223 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
224 PATTERN (insn) = pat;
226 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
227 mode = (enum machine_mode) ((int) mode + 1))
229 int regno;
231 direct_load[(int) mode] = direct_store[(int) mode] = 0;
232 PUT_MODE (mem, mode);
233 PUT_MODE (mem1, mode);
234 PUT_MODE (reg, mode);
236 /* See if there is some register that can be used in this mode and
237 directly loaded or stored from memory. */
239 if (mode != VOIDmode && mode != BLKmode)
240 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
241 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
242 regno++)
244 if (! HARD_REGNO_MODE_OK (regno, mode))
245 continue;
247 SET_REGNO (reg, regno);
249 SET_SRC (pat) = mem;
250 SET_DEST (pat) = reg;
251 if (recog (pat, insn, &num_clobbers) >= 0)
252 direct_load[(int) mode] = 1;
254 SET_SRC (pat) = mem1;
255 SET_DEST (pat) = reg;
256 if (recog (pat, insn, &num_clobbers) >= 0)
257 direct_load[(int) mode] = 1;
259 SET_SRC (pat) = reg;
260 SET_DEST (pat) = mem;
261 if (recog (pat, insn, &num_clobbers) >= 0)
262 direct_store[(int) mode] = 1;
264 SET_SRC (pat) = reg;
265 SET_DEST (pat) = mem1;
266 if (recog (pat, insn, &num_clobbers) >= 0)
267 direct_store[(int) mode] = 1;
271 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
273 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
274 mode = GET_MODE_WIDER_MODE (mode))
276 enum machine_mode srcmode;
277 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
278 srcmode = GET_MODE_WIDER_MODE (srcmode))
280 enum insn_code ic;
282 ic = can_extend_p (mode, srcmode, 0);
283 if (ic == CODE_FOR_nothing)
284 continue;
286 PUT_MODE (mem, srcmode);
288 if (insn_operand_matches (ic, 1, mem))
289 float_extend_from_mem[mode][srcmode] = true;
294 /* This is run at the start of compiling a function. */
296 void
297 init_expr (void)
299 memset (&crtl->expr, 0, sizeof (crtl->expr));
302 /* Copy data from FROM to TO, where the machine modes are not the same.
303 Both modes may be integer, or both may be floating, or both may be
304 fixed-point.
305 UNSIGNEDP should be nonzero if FROM is an unsigned type.
306 This causes zero-extension instead of sign-extension. */
308 void
309 convert_move (rtx to, rtx from, int unsignedp)
311 enum machine_mode to_mode = GET_MODE (to);
312 enum machine_mode from_mode = GET_MODE (from);
313 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
314 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
315 enum insn_code code;
316 rtx libcall;
318 /* rtx code for making an equivalent value. */
319 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
320 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
323 gcc_assert (to_real == from_real);
324 gcc_assert (to_mode != BLKmode);
325 gcc_assert (from_mode != BLKmode);
327 /* If the source and destination are already the same, then there's
328 nothing to do. */
329 if (to == from)
330 return;
332 /* If FROM is a SUBREG that indicates that we have already done at least
333 the required extension, strip it. We don't handle such SUBREGs as
334 TO here. */
336 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
337 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
338 >= GET_MODE_PRECISION (to_mode))
339 && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp))
340 from = gen_lowpart (to_mode, from), from_mode = to_mode;
342 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
344 if (to_mode == from_mode
345 || (from_mode == VOIDmode && CONSTANT_P (from)))
347 emit_move_insn (to, from);
348 return;
351 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
353 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
355 if (VECTOR_MODE_P (to_mode))
356 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
357 else
358 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
360 emit_move_insn (to, from);
361 return;
364 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
366 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
367 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
368 return;
371 if (to_real)
373 rtx value;
374 rtx_insn *insns;
375 convert_optab tab;
377 gcc_assert ((GET_MODE_PRECISION (from_mode)
378 != GET_MODE_PRECISION (to_mode))
379 || (DECIMAL_FLOAT_MODE_P (from_mode)
380 != DECIMAL_FLOAT_MODE_P (to_mode)));
382 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
383 /* Conversion between decimal float and binary float, same size. */
384 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
385 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
386 tab = sext_optab;
387 else
388 tab = trunc_optab;
390 /* Try converting directly if the insn is supported. */
392 code = convert_optab_handler (tab, to_mode, from_mode);
393 if (code != CODE_FOR_nothing)
395 emit_unop_insn (code, to, from,
396 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
397 return;
400 /* Otherwise use a libcall. */
401 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
403 /* Is this conversion implemented yet? */
404 gcc_assert (libcall);
406 start_sequence ();
407 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
408 1, from, from_mode);
409 insns = get_insns ();
410 end_sequence ();
411 emit_libcall_block (insns, to, value,
412 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
413 from)
414 : gen_rtx_FLOAT_EXTEND (to_mode, from));
415 return;
418 /* Handle pointer conversion. */ /* SPEE 900220. */
419 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
421 convert_optab ctab;
423 if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
424 ctab = trunc_optab;
425 else if (unsignedp)
426 ctab = zext_optab;
427 else
428 ctab = sext_optab;
430 if (convert_optab_handler (ctab, to_mode, from_mode)
431 != CODE_FOR_nothing)
433 emit_unop_insn (convert_optab_handler (ctab, to_mode, from_mode),
434 to, from, UNKNOWN);
435 return;
439 /* Targets are expected to provide conversion insns between PxImode and
440 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
441 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
443 enum machine_mode full_mode
444 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
446 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
447 != CODE_FOR_nothing);
449 if (full_mode != from_mode)
450 from = convert_to_mode (full_mode, from, unsignedp);
451 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
452 to, from, UNKNOWN);
453 return;
455 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
457 rtx new_from;
458 enum machine_mode full_mode
459 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
460 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
461 enum insn_code icode;
463 icode = convert_optab_handler (ctab, full_mode, from_mode);
464 gcc_assert (icode != CODE_FOR_nothing);
466 if (to_mode == full_mode)
468 emit_unop_insn (icode, to, from, UNKNOWN);
469 return;
472 new_from = gen_reg_rtx (full_mode);
473 emit_unop_insn (icode, new_from, from, UNKNOWN);
475 /* else proceed to integer conversions below. */
476 from_mode = full_mode;
477 from = new_from;
480 /* Make sure both are fixed-point modes or both are not. */
481 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
482 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
483 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
485 /* If we widen from_mode to to_mode and they are in the same class,
486 we won't saturate the result.
487 Otherwise, always saturate the result to play safe. */
488 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
489 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
490 expand_fixed_convert (to, from, 0, 0);
491 else
492 expand_fixed_convert (to, from, 0, 1);
493 return;
496 /* Now both modes are integers. */
498 /* Handle expanding beyond a word. */
499 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
500 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
502 rtx_insn *insns;
503 rtx lowpart;
504 rtx fill_value;
505 rtx lowfrom;
506 int i;
507 enum machine_mode lowpart_mode;
508 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
510 /* Try converting directly if the insn is supported. */
511 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
512 != CODE_FOR_nothing)
514 /* If FROM is a SUBREG, put it into a register. Do this
515 so that we always generate the same set of insns for
516 better cse'ing; if an intermediate assignment occurred,
517 we won't be doing the operation directly on the SUBREG. */
518 if (optimize > 0 && GET_CODE (from) == SUBREG)
519 from = force_reg (from_mode, from);
520 emit_unop_insn (code, to, from, equiv_code);
521 return;
523 /* Next, try converting via full word. */
524 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
525 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
526 != CODE_FOR_nothing))
528 rtx word_to = gen_reg_rtx (word_mode);
529 if (REG_P (to))
531 if (reg_overlap_mentioned_p (to, from))
532 from = force_reg (from_mode, from);
533 emit_clobber (to);
535 convert_move (word_to, from, unsignedp);
536 emit_unop_insn (code, to, word_to, equiv_code);
537 return;
540 /* No special multiword conversion insn; do it by hand. */
541 start_sequence ();
543 /* Since we will turn this into a no conflict block, we must ensure the
544 the source does not overlap the target so force it into an isolated
545 register when maybe so. Likewise for any MEM input, since the
546 conversion sequence might require several references to it and we
547 must ensure we're getting the same value every time. */
549 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
550 from = force_reg (from_mode, from);
552 /* Get a copy of FROM widened to a word, if necessary. */
553 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
554 lowpart_mode = word_mode;
555 else
556 lowpart_mode = from_mode;
558 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
560 lowpart = gen_lowpart (lowpart_mode, to);
561 emit_move_insn (lowpart, lowfrom);
563 /* Compute the value to put in each remaining word. */
564 if (unsignedp)
565 fill_value = const0_rtx;
566 else
567 fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
568 LT, lowfrom, const0_rtx,
569 lowpart_mode, 0, -1);
571 /* Fill the remaining words. */
572 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
574 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
575 rtx subword = operand_subword (to, index, 1, to_mode);
577 gcc_assert (subword);
579 if (fill_value != subword)
580 emit_move_insn (subword, fill_value);
583 insns = get_insns ();
584 end_sequence ();
586 emit_insn (insns);
587 return;
590 /* Truncating multi-word to a word or less. */
591 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
592 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
594 if (!((MEM_P (from)
595 && ! MEM_VOLATILE_P (from)
596 && direct_load[(int) to_mode]
597 && ! mode_dependent_address_p (XEXP (from, 0),
598 MEM_ADDR_SPACE (from)))
599 || REG_P (from)
600 || GET_CODE (from) == SUBREG))
601 from = force_reg (from_mode, from);
602 convert_move (to, gen_lowpart (word_mode, from), 0);
603 return;
606 /* Now follow all the conversions between integers
607 no more than a word long. */
609 /* For truncation, usually we can just refer to FROM in a narrower mode. */
610 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
611 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
613 if (!((MEM_P (from)
614 && ! MEM_VOLATILE_P (from)
615 && direct_load[(int) to_mode]
616 && ! mode_dependent_address_p (XEXP (from, 0),
617 MEM_ADDR_SPACE (from)))
618 || REG_P (from)
619 || GET_CODE (from) == SUBREG))
620 from = force_reg (from_mode, from);
621 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
622 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
623 from = copy_to_reg (from);
624 emit_move_insn (to, gen_lowpart (to_mode, from));
625 return;
628 /* Handle extension. */
629 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
631 /* Convert directly if that works. */
632 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
633 != CODE_FOR_nothing)
635 emit_unop_insn (code, to, from, equiv_code);
636 return;
638 else
640 enum machine_mode intermediate;
641 rtx tmp;
642 int shift_amount;
644 /* Search for a mode to convert via. */
645 for (intermediate = from_mode; intermediate != VOIDmode;
646 intermediate = GET_MODE_WIDER_MODE (intermediate))
647 if (((can_extend_p (to_mode, intermediate, unsignedp)
648 != CODE_FOR_nothing)
649 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
650 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
651 && (can_extend_p (intermediate, from_mode, unsignedp)
652 != CODE_FOR_nothing))
654 convert_move (to, convert_to_mode (intermediate, from,
655 unsignedp), unsignedp);
656 return;
659 /* No suitable intermediate mode.
660 Generate what we need with shifts. */
661 shift_amount = (GET_MODE_PRECISION (to_mode)
662 - GET_MODE_PRECISION (from_mode));
663 from = gen_lowpart (to_mode, force_reg (from_mode, from));
664 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
665 to, unsignedp);
666 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
667 to, unsignedp);
668 if (tmp != to)
669 emit_move_insn (to, tmp);
670 return;
674 /* Support special truncate insns for certain modes. */
675 if (convert_optab_handler (trunc_optab, to_mode,
676 from_mode) != CODE_FOR_nothing)
678 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
679 to, from, UNKNOWN);
680 return;
683 /* Handle truncation of volatile memrefs, and so on;
684 the things that couldn't be truncated directly,
685 and for which there was no special instruction.
687 ??? Code above formerly short-circuited this, for most integer
688 mode pairs, with a force_reg in from_mode followed by a recursive
689 call to this routine. Appears always to have been wrong. */
690 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
692 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
693 emit_move_insn (to, temp);
694 return;
697 /* Mode combination is not recognized. */
698 gcc_unreachable ();
701 /* Return an rtx for a value that would result
702 from converting X to mode MODE.
703 Both X and MODE may be floating, or both integer.
704 UNSIGNEDP is nonzero if X is an unsigned value.
705 This can be done by referring to a part of X in place
706 or by copying to a new temporary with conversion. */
709 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
711 return convert_modes (mode, VOIDmode, x, unsignedp);
714 /* Return an rtx for a value that would result
715 from converting X from mode OLDMODE to mode MODE.
716 Both modes may be floating, or both integer.
717 UNSIGNEDP is nonzero if X is an unsigned value.
719 This can be done by referring to a part of X in place
720 or by copying to a new temporary with conversion.
722 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
725 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
727 rtx temp;
729 /* If FROM is a SUBREG that indicates that we have already done at least
730 the required extension, strip it. */
732 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
733 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
734 && SUBREG_CHECK_PROMOTED_SIGN (x, unsignedp))
735 x = gen_lowpart (mode, SUBREG_REG (x));
737 if (GET_MODE (x) != VOIDmode)
738 oldmode = GET_MODE (x);
740 if (mode == oldmode)
741 return x;
743 if (CONST_SCALAR_INT_P (x) && GET_MODE_CLASS (mode) == MODE_INT)
745 /* If the caller did not tell us the old mode, then there is not
746 much to do with respect to canonicalization. We have to
747 assume that all the bits are significant. */
748 if (GET_MODE_CLASS (oldmode) != MODE_INT)
749 oldmode = MAX_MODE_INT;
750 wide_int w = wide_int::from (std::make_pair (x, oldmode),
751 GET_MODE_PRECISION (mode),
752 unsignedp ? UNSIGNED : SIGNED);
753 return immed_wide_int_const (w, mode);
756 /* We can do this with a gen_lowpart if both desired and current modes
757 are integer, and this is either a constant integer, a register, or a
758 non-volatile MEM. */
759 if (GET_MODE_CLASS (mode) == MODE_INT
760 && GET_MODE_CLASS (oldmode) == MODE_INT
761 && GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
762 && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) mode])
763 || (REG_P (x)
764 && (!HARD_REGISTER_P (x)
765 || HARD_REGNO_MODE_OK (REGNO (x), mode))
766 && TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (x)))))
768 return gen_lowpart (mode, x);
770 /* Converting from integer constant into mode is always equivalent to an
771 subreg operation. */
772 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
774 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
775 return simplify_gen_subreg (mode, x, oldmode, 0);
778 temp = gen_reg_rtx (mode);
779 convert_move (temp, x, unsignedp);
780 return temp;
783 /* Return the largest alignment we can use for doing a move (or store)
784 of MAX_PIECES. ALIGN is the largest alignment we could use. */
786 static unsigned int
787 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
789 enum machine_mode tmode;
791 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
792 if (align >= GET_MODE_ALIGNMENT (tmode))
793 align = GET_MODE_ALIGNMENT (tmode);
794 else
796 enum machine_mode tmode, xmode;
798 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
799 tmode != VOIDmode;
800 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
801 if (GET_MODE_SIZE (tmode) > max_pieces
802 || SLOW_UNALIGNED_ACCESS (tmode, align))
803 break;
805 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
808 return align;
811 /* Return the widest integer mode no wider than SIZE. If no such mode
812 can be found, return VOIDmode. */
814 static enum machine_mode
815 widest_int_mode_for_size (unsigned int size)
817 enum machine_mode tmode, mode = VOIDmode;
819 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
820 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
821 if (GET_MODE_SIZE (tmode) < size)
822 mode = tmode;
824 return mode;
827 /* STORE_MAX_PIECES is the number of bytes at a time that we can
828 store efficiently. Due to internal GCC limitations, this is
829 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
830 for an immediate constant. */
832 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
834 /* Determine whether the LEN bytes can be moved by using several move
835 instructions. Return nonzero if a call to move_by_pieces should
836 succeed. */
839 can_move_by_pieces (unsigned HOST_WIDE_INT len ATTRIBUTE_UNUSED,
840 unsigned int align ATTRIBUTE_UNUSED)
842 return MOVE_BY_PIECES_P (len, align);
845 /* Generate several move instructions to copy LEN bytes from block FROM to
846 block TO. (These are MEM rtx's with BLKmode).
848 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
849 used to push FROM to the stack.
851 ALIGN is maximum stack alignment we can assume.
853 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
854 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
855 stpcpy. */
858 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
859 unsigned int align, int endp)
861 struct move_by_pieces_d data;
862 enum machine_mode to_addr_mode;
863 enum machine_mode from_addr_mode = get_address_mode (from);
864 rtx to_addr, from_addr = XEXP (from, 0);
865 unsigned int max_size = MOVE_MAX_PIECES + 1;
866 enum insn_code icode;
868 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
870 data.offset = 0;
871 data.from_addr = from_addr;
872 if (to)
874 to_addr_mode = get_address_mode (to);
875 to_addr = XEXP (to, 0);
876 data.to = to;
877 data.autinc_to
878 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
879 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
880 data.reverse
881 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
883 else
885 to_addr_mode = VOIDmode;
886 to_addr = NULL_RTX;
887 data.to = NULL_RTX;
888 data.autinc_to = 1;
889 #ifdef STACK_GROWS_DOWNWARD
890 data.reverse = 1;
891 #else
892 data.reverse = 0;
893 #endif
895 data.to_addr = to_addr;
896 data.from = from;
897 data.autinc_from
898 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
899 || GET_CODE (from_addr) == POST_INC
900 || GET_CODE (from_addr) == POST_DEC);
902 data.explicit_inc_from = 0;
903 data.explicit_inc_to = 0;
904 if (data.reverse) data.offset = len;
905 data.len = len;
907 /* If copying requires more than two move insns,
908 copy addresses to registers (to make displacements shorter)
909 and use post-increment if available. */
910 if (!(data.autinc_from && data.autinc_to)
911 && move_by_pieces_ninsns (len, align, max_size) > 2)
913 /* Find the mode of the largest move...
914 MODE might not be used depending on the definitions of the
915 USE_* macros below. */
916 enum machine_mode mode ATTRIBUTE_UNUSED
917 = widest_int_mode_for_size (max_size);
919 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
921 data.from_addr = copy_to_mode_reg (from_addr_mode,
922 plus_constant (from_addr_mode,
923 from_addr, len));
924 data.autinc_from = 1;
925 data.explicit_inc_from = -1;
927 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
929 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
930 data.autinc_from = 1;
931 data.explicit_inc_from = 1;
933 if (!data.autinc_from && CONSTANT_P (from_addr))
934 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
935 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
937 data.to_addr = copy_to_mode_reg (to_addr_mode,
938 plus_constant (to_addr_mode,
939 to_addr, len));
940 data.autinc_to = 1;
941 data.explicit_inc_to = -1;
943 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
945 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
946 data.autinc_to = 1;
947 data.explicit_inc_to = 1;
949 if (!data.autinc_to && CONSTANT_P (to_addr))
950 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
953 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
955 /* First move what we can in the largest integer mode, then go to
956 successively smaller modes. */
958 while (max_size > 1 && data.len > 0)
960 enum machine_mode mode = widest_int_mode_for_size (max_size);
962 if (mode == VOIDmode)
963 break;
965 icode = optab_handler (mov_optab, mode);
966 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
967 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
969 max_size = GET_MODE_SIZE (mode);
972 /* The code above should have handled everything. */
973 gcc_assert (!data.len);
975 if (endp)
977 rtx to1;
979 gcc_assert (!data.reverse);
980 if (data.autinc_to)
982 if (endp == 2)
984 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
985 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
986 else
987 data.to_addr = copy_to_mode_reg (to_addr_mode,
988 plus_constant (to_addr_mode,
989 data.to_addr,
990 -1));
992 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
993 data.offset);
995 else
997 if (endp == 2)
998 --data.offset;
999 to1 = adjust_address (data.to, QImode, data.offset);
1001 return to1;
1003 else
1004 return data.to;
1007 /* Return number of insns required to move L bytes by pieces.
1008 ALIGN (in bits) is maximum alignment we can assume. */
1010 unsigned HOST_WIDE_INT
1011 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1012 unsigned int max_size)
1014 unsigned HOST_WIDE_INT n_insns = 0;
1016 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1018 while (max_size > 1 && l > 0)
1020 enum machine_mode mode;
1021 enum insn_code icode;
1023 mode = widest_int_mode_for_size (max_size);
1025 if (mode == VOIDmode)
1026 break;
1028 icode = optab_handler (mov_optab, mode);
1029 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1030 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1032 max_size = GET_MODE_SIZE (mode);
1035 gcc_assert (!l);
1036 return n_insns;
1039 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1040 with move instructions for mode MODE. GENFUN is the gen_... function
1041 to make a move insn for that mode. DATA has all the other info. */
1043 static void
1044 move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
1045 struct move_by_pieces_d *data)
1047 unsigned int size = GET_MODE_SIZE (mode);
1048 rtx to1 = NULL_RTX, from1;
1050 while (data->len >= size)
1052 if (data->reverse)
1053 data->offset -= size;
1055 if (data->to)
1057 if (data->autinc_to)
1058 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1059 data->offset);
1060 else
1061 to1 = adjust_address (data->to, mode, data->offset);
1064 if (data->autinc_from)
1065 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1066 data->offset);
1067 else
1068 from1 = adjust_address (data->from, mode, data->offset);
1070 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1071 emit_insn (gen_add2_insn (data->to_addr,
1072 gen_int_mode (-(HOST_WIDE_INT) size,
1073 GET_MODE (data->to_addr))));
1074 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1075 emit_insn (gen_add2_insn (data->from_addr,
1076 gen_int_mode (-(HOST_WIDE_INT) size,
1077 GET_MODE (data->from_addr))));
1079 if (data->to)
1080 emit_insn ((*genfun) (to1, from1));
1081 else
1083 #ifdef PUSH_ROUNDING
1084 emit_single_push_insn (mode, from1, NULL);
1085 #else
1086 gcc_unreachable ();
1087 #endif
1090 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1091 emit_insn (gen_add2_insn (data->to_addr,
1092 gen_int_mode (size,
1093 GET_MODE (data->to_addr))));
1094 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1095 emit_insn (gen_add2_insn (data->from_addr,
1096 gen_int_mode (size,
1097 GET_MODE (data->from_addr))));
1099 if (! data->reverse)
1100 data->offset += size;
1102 data->len -= size;
1106 /* Emit code to move a block Y to a block X. This may be done with
1107 string-move instructions, with multiple scalar move instructions,
1108 or with a library call.
1110 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1111 SIZE is an rtx that says how long they are.
1112 ALIGN is the maximum alignment we can assume they have.
1113 METHOD describes what kind of copy this is, and what mechanisms may be used.
1114 MIN_SIZE is the minimal size of block to move
1115 MAX_SIZE is the maximal size of block to move, if it can not be represented
1116 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1118 Return the address of the new block, if memcpy is called and returns it,
1119 0 otherwise. */
1122 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1123 unsigned int expected_align, HOST_WIDE_INT expected_size,
1124 unsigned HOST_WIDE_INT min_size,
1125 unsigned HOST_WIDE_INT max_size,
1126 unsigned HOST_WIDE_INT probable_max_size)
1128 bool may_use_call;
1129 rtx retval = 0;
1130 unsigned int align;
1132 gcc_assert (size);
1133 if (CONST_INT_P (size)
1134 && INTVAL (size) == 0)
1135 return 0;
1137 switch (method)
1139 case BLOCK_OP_NORMAL:
1140 case BLOCK_OP_TAILCALL:
1141 may_use_call = true;
1142 break;
1144 case BLOCK_OP_CALL_PARM:
1145 may_use_call = block_move_libcall_safe_for_call_parm ();
1147 /* Make inhibit_defer_pop nonzero around the library call
1148 to force it to pop the arguments right away. */
1149 NO_DEFER_POP;
1150 break;
1152 case BLOCK_OP_NO_LIBCALL:
1153 may_use_call = false;
1154 break;
1156 default:
1157 gcc_unreachable ();
1160 gcc_assert (MEM_P (x) && MEM_P (y));
1161 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1162 gcc_assert (align >= BITS_PER_UNIT);
1164 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1165 block copy is more efficient for other large modes, e.g. DCmode. */
1166 x = adjust_address (x, BLKmode, 0);
1167 y = adjust_address (y, BLKmode, 0);
1169 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1170 can be incorrect is coming from __builtin_memcpy. */
1171 if (CONST_INT_P (size))
1173 x = shallow_copy_rtx (x);
1174 y = shallow_copy_rtx (y);
1175 set_mem_size (x, INTVAL (size));
1176 set_mem_size (y, INTVAL (size));
1179 if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1180 move_by_pieces (x, y, INTVAL (size), align, 0);
1181 else if (emit_block_move_via_movmem (x, y, size, align,
1182 expected_align, expected_size,
1183 min_size, max_size, probable_max_size))
1185 else if (may_use_call
1186 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1187 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1189 /* Since x and y are passed to a libcall, mark the corresponding
1190 tree EXPR as addressable. */
1191 tree y_expr = MEM_EXPR (y);
1192 tree x_expr = MEM_EXPR (x);
1193 if (y_expr)
1194 mark_addressable (y_expr);
1195 if (x_expr)
1196 mark_addressable (x_expr);
1197 retval = emit_block_move_via_libcall (x, y, size,
1198 method == BLOCK_OP_TAILCALL);
1201 else
1202 emit_block_move_via_loop (x, y, size, align);
1204 if (method == BLOCK_OP_CALL_PARM)
1205 OK_DEFER_POP;
1207 return retval;
1211 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1213 unsigned HOST_WIDE_INT max, min = 0;
1214 if (GET_CODE (size) == CONST_INT)
1215 min = max = UINTVAL (size);
1216 else
1217 max = GET_MODE_MASK (GET_MODE (size));
1218 return emit_block_move_hints (x, y, size, method, 0, -1,
1219 min, max, max);
1222 /* A subroutine of emit_block_move. Returns true if calling the
1223 block move libcall will not clobber any parameters which may have
1224 already been placed on the stack. */
1226 static bool
1227 block_move_libcall_safe_for_call_parm (void)
1229 #if defined (REG_PARM_STACK_SPACE)
1230 tree fn;
1231 #endif
1233 /* If arguments are pushed on the stack, then they're safe. */
1234 if (PUSH_ARGS)
1235 return true;
1237 /* If registers go on the stack anyway, any argument is sure to clobber
1238 an outgoing argument. */
1239 #if defined (REG_PARM_STACK_SPACE)
1240 fn = emit_block_move_libcall_fn (false);
1241 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1242 depend on its argument. */
1243 (void) fn;
1244 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1245 && REG_PARM_STACK_SPACE (fn) != 0)
1246 return false;
1247 #endif
1249 /* If any argument goes in memory, then it might clobber an outgoing
1250 argument. */
1252 CUMULATIVE_ARGS args_so_far_v;
1253 cumulative_args_t args_so_far;
1254 tree fn, arg;
1256 fn = emit_block_move_libcall_fn (false);
1257 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1258 args_so_far = pack_cumulative_args (&args_so_far_v);
1260 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1261 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1263 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1264 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1265 NULL_TREE, true);
1266 if (!tmp || !REG_P (tmp))
1267 return false;
1268 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1269 return false;
1270 targetm.calls.function_arg_advance (args_so_far, mode,
1271 NULL_TREE, true);
1274 return true;
1277 /* A subroutine of emit_block_move. Expand a movmem pattern;
1278 return true if successful. */
1280 static bool
1281 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1282 unsigned int expected_align, HOST_WIDE_INT expected_size,
1283 unsigned HOST_WIDE_INT min_size,
1284 unsigned HOST_WIDE_INT max_size,
1285 unsigned HOST_WIDE_INT probable_max_size)
1287 int save_volatile_ok = volatile_ok;
1288 enum machine_mode mode;
1290 if (expected_align < align)
1291 expected_align = align;
1292 if (expected_size != -1)
1294 if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1295 expected_size = probable_max_size;
1296 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1297 expected_size = min_size;
1300 /* Since this is a move insn, we don't care about volatility. */
1301 volatile_ok = 1;
1303 /* Try the most limited insn first, because there's no point
1304 including more than one in the machine description unless
1305 the more limited one has some advantage. */
1307 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1308 mode = GET_MODE_WIDER_MODE (mode))
1310 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1312 if (code != CODE_FOR_nothing
1313 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1314 here because if SIZE is less than the mode mask, as it is
1315 returned by the macro, it will definitely be less than the
1316 actual mode mask. Since SIZE is within the Pmode address
1317 space, we limit MODE to Pmode. */
1318 && ((CONST_INT_P (size)
1319 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1320 <= (GET_MODE_MASK (mode) >> 1)))
1321 || max_size <= (GET_MODE_MASK (mode) >> 1)
1322 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1324 struct expand_operand ops[9];
1325 unsigned int nops;
1327 /* ??? When called via emit_block_move_for_call, it'd be
1328 nice if there were some way to inform the backend, so
1329 that it doesn't fail the expansion because it thinks
1330 emitting the libcall would be more efficient. */
1331 nops = insn_data[(int) code].n_generator_args;
1332 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1334 create_fixed_operand (&ops[0], x);
1335 create_fixed_operand (&ops[1], y);
1336 /* The check above guarantees that this size conversion is valid. */
1337 create_convert_operand_to (&ops[2], size, mode, true);
1338 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1339 if (nops >= 6)
1341 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1342 create_integer_operand (&ops[5], expected_size);
1344 if (nops >= 8)
1346 create_integer_operand (&ops[6], min_size);
1347 /* If we can not represent the maximal size,
1348 make parameter NULL. */
1349 if ((HOST_WIDE_INT) max_size != -1)
1350 create_integer_operand (&ops[7], max_size);
1351 else
1352 create_fixed_operand (&ops[7], NULL);
1354 if (nops == 9)
1356 /* If we can not represent the maximal size,
1357 make parameter NULL. */
1358 if ((HOST_WIDE_INT) probable_max_size != -1)
1359 create_integer_operand (&ops[8], probable_max_size);
1360 else
1361 create_fixed_operand (&ops[8], NULL);
1363 if (maybe_expand_insn (code, nops, ops))
1365 volatile_ok = save_volatile_ok;
1366 return true;
1371 volatile_ok = save_volatile_ok;
1372 return false;
1375 /* A subroutine of emit_block_move. Expand a call to memcpy.
1376 Return the return value from memcpy, 0 otherwise. */
1379 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1381 rtx dst_addr, src_addr;
1382 tree call_expr, fn, src_tree, dst_tree, size_tree;
1383 enum machine_mode size_mode;
1384 rtx retval;
1386 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1387 pseudos. We can then place those new pseudos into a VAR_DECL and
1388 use them later. */
1390 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1391 src_addr = copy_addr_to_reg (XEXP (src, 0));
1393 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1394 src_addr = convert_memory_address (ptr_mode, src_addr);
1396 dst_tree = make_tree (ptr_type_node, dst_addr);
1397 src_tree = make_tree (ptr_type_node, src_addr);
1399 size_mode = TYPE_MODE (sizetype);
1401 size = convert_to_mode (size_mode, size, 1);
1402 size = copy_to_mode_reg (size_mode, size);
1404 /* It is incorrect to use the libcall calling conventions to call
1405 memcpy in this context. This could be a user call to memcpy and
1406 the user may wish to examine the return value from memcpy. For
1407 targets where libcalls and normal calls have different conventions
1408 for returning pointers, we could end up generating incorrect code. */
1410 size_tree = make_tree (sizetype, size);
1412 fn = emit_block_move_libcall_fn (true);
1413 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1414 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1416 retval = expand_normal (call_expr);
1418 return retval;
1421 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1422 for the function we use for block copies. */
1424 static GTY(()) tree block_move_fn;
1426 void
1427 init_block_move_fn (const char *asmspec)
1429 if (!block_move_fn)
1431 tree args, fn, attrs, attr_args;
1433 fn = get_identifier ("memcpy");
1434 args = build_function_type_list (ptr_type_node, ptr_type_node,
1435 const_ptr_type_node, sizetype,
1436 NULL_TREE);
1438 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1439 DECL_EXTERNAL (fn) = 1;
1440 TREE_PUBLIC (fn) = 1;
1441 DECL_ARTIFICIAL (fn) = 1;
1442 TREE_NOTHROW (fn) = 1;
1443 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1444 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1446 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1447 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1449 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1451 block_move_fn = fn;
1454 if (asmspec)
1455 set_user_assembler_name (block_move_fn, asmspec);
1458 static tree
1459 emit_block_move_libcall_fn (int for_call)
1461 static bool emitted_extern;
1463 if (!block_move_fn)
1464 init_block_move_fn (NULL);
1466 if (for_call && !emitted_extern)
1468 emitted_extern = true;
1469 make_decl_rtl (block_move_fn);
1472 return block_move_fn;
1475 /* A subroutine of emit_block_move. Copy the data via an explicit
1476 loop. This is used only when libcalls are forbidden. */
1477 /* ??? It'd be nice to copy in hunks larger than QImode. */
1479 static void
1480 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1481 unsigned int align ATTRIBUTE_UNUSED)
1483 rtx_code_label *cmp_label, *top_label;
1484 rtx iter, x_addr, y_addr, tmp;
1485 enum machine_mode x_addr_mode = get_address_mode (x);
1486 enum machine_mode y_addr_mode = get_address_mode (y);
1487 enum machine_mode iter_mode;
1489 iter_mode = GET_MODE (size);
1490 if (iter_mode == VOIDmode)
1491 iter_mode = word_mode;
1493 top_label = gen_label_rtx ();
1494 cmp_label = gen_label_rtx ();
1495 iter = gen_reg_rtx (iter_mode);
1497 emit_move_insn (iter, const0_rtx);
1499 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1500 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1501 do_pending_stack_adjust ();
1503 emit_jump (cmp_label);
1504 emit_label (top_label);
1506 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1507 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1509 if (x_addr_mode != y_addr_mode)
1510 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1511 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1513 x = change_address (x, QImode, x_addr);
1514 y = change_address (y, QImode, y_addr);
1516 emit_move_insn (x, y);
1518 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1519 true, OPTAB_LIB_WIDEN);
1520 if (tmp != iter)
1521 emit_move_insn (iter, tmp);
1523 emit_label (cmp_label);
1525 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1526 true, top_label, REG_BR_PROB_BASE * 90 / 100);
1529 /* Copy all or part of a value X into registers starting at REGNO.
1530 The number of registers to be filled is NREGS. */
1532 void
1533 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1535 int i;
1536 #ifdef HAVE_load_multiple
1537 rtx pat;
1538 rtx_insn *last;
1539 #endif
1541 if (nregs == 0)
1542 return;
1544 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1545 x = validize_mem (force_const_mem (mode, x));
1547 /* See if the machine can do this with a load multiple insn. */
1548 #ifdef HAVE_load_multiple
1549 if (HAVE_load_multiple)
1551 last = get_last_insn ();
1552 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1553 GEN_INT (nregs));
1554 if (pat)
1556 emit_insn (pat);
1557 return;
1559 else
1560 delete_insns_since (last);
1562 #endif
1564 for (i = 0; i < nregs; i++)
1565 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1566 operand_subword_force (x, i, mode));
1569 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1570 The number of registers to be filled is NREGS. */
1572 void
1573 move_block_from_reg (int regno, rtx x, int nregs)
1575 int i;
1577 if (nregs == 0)
1578 return;
1580 /* See if the machine can do this with a store multiple insn. */
1581 #ifdef HAVE_store_multiple
1582 if (HAVE_store_multiple)
1584 rtx_insn *last = get_last_insn ();
1585 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1586 GEN_INT (nregs));
1587 if (pat)
1589 emit_insn (pat);
1590 return;
1592 else
1593 delete_insns_since (last);
1595 #endif
1597 for (i = 0; i < nregs; i++)
1599 rtx tem = operand_subword (x, i, 1, BLKmode);
1601 gcc_assert (tem);
1603 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1607 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1608 ORIG, where ORIG is a non-consecutive group of registers represented by
1609 a PARALLEL. The clone is identical to the original except in that the
1610 original set of registers is replaced by a new set of pseudo registers.
1611 The new set has the same modes as the original set. */
1614 gen_group_rtx (rtx orig)
1616 int i, length;
1617 rtx *tmps;
1619 gcc_assert (GET_CODE (orig) == PARALLEL);
1621 length = XVECLEN (orig, 0);
1622 tmps = XALLOCAVEC (rtx, length);
1624 /* Skip a NULL entry in first slot. */
1625 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1627 if (i)
1628 tmps[0] = 0;
1630 for (; i < length; i++)
1632 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1633 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1635 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1638 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1641 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1642 except that values are placed in TMPS[i], and must later be moved
1643 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1645 static void
1646 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1648 rtx src;
1649 int start, i;
1650 enum machine_mode m = GET_MODE (orig_src);
1652 gcc_assert (GET_CODE (dst) == PARALLEL);
1654 if (m != VOIDmode
1655 && !SCALAR_INT_MODE_P (m)
1656 && !MEM_P (orig_src)
1657 && GET_CODE (orig_src) != CONCAT)
1659 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1660 if (imode == BLKmode)
1661 src = assign_stack_temp (GET_MODE (orig_src), ssize);
1662 else
1663 src = gen_reg_rtx (imode);
1664 if (imode != BLKmode)
1665 src = gen_lowpart (GET_MODE (orig_src), src);
1666 emit_move_insn (src, orig_src);
1667 /* ...and back again. */
1668 if (imode != BLKmode)
1669 src = gen_lowpart (imode, src);
1670 emit_group_load_1 (tmps, dst, src, type, ssize);
1671 return;
1674 /* Check for a NULL entry, used to indicate that the parameter goes
1675 both on the stack and in registers. */
1676 if (XEXP (XVECEXP (dst, 0, 0), 0))
1677 start = 0;
1678 else
1679 start = 1;
1681 /* Process the pieces. */
1682 for (i = start; i < XVECLEN (dst, 0); i++)
1684 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1685 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1686 unsigned int bytelen = GET_MODE_SIZE (mode);
1687 int shift = 0;
1689 /* Handle trailing fragments that run over the size of the struct. */
1690 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1692 /* Arrange to shift the fragment to where it belongs.
1693 extract_bit_field loads to the lsb of the reg. */
1694 if (
1695 #ifdef BLOCK_REG_PADDING
1696 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1697 == (BYTES_BIG_ENDIAN ? upward : downward)
1698 #else
1699 BYTES_BIG_ENDIAN
1700 #endif
1702 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1703 bytelen = ssize - bytepos;
1704 gcc_assert (bytelen > 0);
1707 /* If we won't be loading directly from memory, protect the real source
1708 from strange tricks we might play; but make sure that the source can
1709 be loaded directly into the destination. */
1710 src = orig_src;
1711 if (!MEM_P (orig_src)
1712 && (!CONSTANT_P (orig_src)
1713 || (GET_MODE (orig_src) != mode
1714 && GET_MODE (orig_src) != VOIDmode)))
1716 if (GET_MODE (orig_src) == VOIDmode)
1717 src = gen_reg_rtx (mode);
1718 else
1719 src = gen_reg_rtx (GET_MODE (orig_src));
1721 emit_move_insn (src, orig_src);
1724 /* Optimize the access just a bit. */
1725 if (MEM_P (src)
1726 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1727 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1728 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1729 && bytelen == GET_MODE_SIZE (mode))
1731 tmps[i] = gen_reg_rtx (mode);
1732 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1734 else if (COMPLEX_MODE_P (mode)
1735 && GET_MODE (src) == mode
1736 && bytelen == GET_MODE_SIZE (mode))
1737 /* Let emit_move_complex do the bulk of the work. */
1738 tmps[i] = src;
1739 else if (GET_CODE (src) == CONCAT)
1741 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1742 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1744 if ((bytepos == 0 && bytelen == slen0)
1745 || (bytepos != 0 && bytepos + bytelen <= slen))
1747 /* The following assumes that the concatenated objects all
1748 have the same size. In this case, a simple calculation
1749 can be used to determine the object and the bit field
1750 to be extracted. */
1751 tmps[i] = XEXP (src, bytepos / slen0);
1752 if (! CONSTANT_P (tmps[i])
1753 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1754 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1755 (bytepos % slen0) * BITS_PER_UNIT,
1756 1, NULL_RTX, mode, mode);
1758 else
1760 rtx mem;
1762 gcc_assert (!bytepos);
1763 mem = assign_stack_temp (GET_MODE (src), slen);
1764 emit_move_insn (mem, src);
1765 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1766 0, 1, NULL_RTX, mode, mode);
1769 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1770 SIMD register, which is currently broken. While we get GCC
1771 to emit proper RTL for these cases, let's dump to memory. */
1772 else if (VECTOR_MODE_P (GET_MODE (dst))
1773 && REG_P (src))
1775 int slen = GET_MODE_SIZE (GET_MODE (src));
1776 rtx mem;
1778 mem = assign_stack_temp (GET_MODE (src), slen);
1779 emit_move_insn (mem, src);
1780 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1782 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1783 && XVECLEN (dst, 0) > 1)
1784 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
1785 else if (CONSTANT_P (src))
1787 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1789 if (len == ssize)
1790 tmps[i] = src;
1791 else
1793 rtx first, second;
1795 /* TODO: const_wide_int can have sizes other than this... */
1796 gcc_assert (2 * len == ssize);
1797 split_double (src, &first, &second);
1798 if (i)
1799 tmps[i] = second;
1800 else
1801 tmps[i] = first;
1804 else if (REG_P (src) && GET_MODE (src) == mode)
1805 tmps[i] = src;
1806 else
1807 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1808 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1809 mode, mode);
1811 if (shift)
1812 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1813 shift, tmps[i], 0);
1817 /* Emit code to move a block SRC of type TYPE to a block DST,
1818 where DST is non-consecutive registers represented by a PARALLEL.
1819 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1820 if not known. */
1822 void
1823 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1825 rtx *tmps;
1826 int i;
1828 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1829 emit_group_load_1 (tmps, dst, src, type, ssize);
1831 /* Copy the extracted pieces into the proper (probable) hard regs. */
1832 for (i = 0; i < XVECLEN (dst, 0); i++)
1834 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1835 if (d == NULL)
1836 continue;
1837 emit_move_insn (d, tmps[i]);
1841 /* Similar, but load SRC into new pseudos in a format that looks like
1842 PARALLEL. This can later be fed to emit_group_move to get things
1843 in the right place. */
1846 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1848 rtvec vec;
1849 int i;
1851 vec = rtvec_alloc (XVECLEN (parallel, 0));
1852 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1854 /* Convert the vector to look just like the original PARALLEL, except
1855 with the computed values. */
1856 for (i = 0; i < XVECLEN (parallel, 0); i++)
1858 rtx e = XVECEXP (parallel, 0, i);
1859 rtx d = XEXP (e, 0);
1861 if (d)
1863 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1864 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1866 RTVEC_ELT (vec, i) = e;
1869 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1872 /* Emit code to move a block SRC to block DST, where SRC and DST are
1873 non-consecutive groups of registers, each represented by a PARALLEL. */
1875 void
1876 emit_group_move (rtx dst, rtx src)
1878 int i;
1880 gcc_assert (GET_CODE (src) == PARALLEL
1881 && GET_CODE (dst) == PARALLEL
1882 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1884 /* Skip first entry if NULL. */
1885 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1886 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1887 XEXP (XVECEXP (src, 0, i), 0));
1890 /* Move a group of registers represented by a PARALLEL into pseudos. */
1893 emit_group_move_into_temps (rtx src)
1895 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1896 int i;
1898 for (i = 0; i < XVECLEN (src, 0); i++)
1900 rtx e = XVECEXP (src, 0, i);
1901 rtx d = XEXP (e, 0);
1903 if (d)
1904 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1905 RTVEC_ELT (vec, i) = e;
1908 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1911 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1912 where SRC is non-consecutive registers represented by a PARALLEL.
1913 SSIZE represents the total size of block ORIG_DST, or -1 if not
1914 known. */
1916 void
1917 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1919 rtx *tmps, dst;
1920 int start, finish, i;
1921 enum machine_mode m = GET_MODE (orig_dst);
1923 gcc_assert (GET_CODE (src) == PARALLEL);
1925 if (!SCALAR_INT_MODE_P (m)
1926 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1928 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1929 if (imode == BLKmode)
1930 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1931 else
1932 dst = gen_reg_rtx (imode);
1933 emit_group_store (dst, src, type, ssize);
1934 if (imode != BLKmode)
1935 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1936 emit_move_insn (orig_dst, dst);
1937 return;
1940 /* Check for a NULL entry, used to indicate that the parameter goes
1941 both on the stack and in registers. */
1942 if (XEXP (XVECEXP (src, 0, 0), 0))
1943 start = 0;
1944 else
1945 start = 1;
1946 finish = XVECLEN (src, 0);
1948 tmps = XALLOCAVEC (rtx, finish);
1950 /* Copy the (probable) hard regs into pseudos. */
1951 for (i = start; i < finish; i++)
1953 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1954 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1956 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1957 emit_move_insn (tmps[i], reg);
1959 else
1960 tmps[i] = reg;
1963 /* If we won't be storing directly into memory, protect the real destination
1964 from strange tricks we might play. */
1965 dst = orig_dst;
1966 if (GET_CODE (dst) == PARALLEL)
1968 rtx temp;
1970 /* We can get a PARALLEL dst if there is a conditional expression in
1971 a return statement. In that case, the dst and src are the same,
1972 so no action is necessary. */
1973 if (rtx_equal_p (dst, src))
1974 return;
1976 /* It is unclear if we can ever reach here, but we may as well handle
1977 it. Allocate a temporary, and split this into a store/load to/from
1978 the temporary. */
1979 temp = assign_stack_temp (GET_MODE (dst), ssize);
1980 emit_group_store (temp, src, type, ssize);
1981 emit_group_load (dst, temp, type, ssize);
1982 return;
1984 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1986 enum machine_mode outer = GET_MODE (dst);
1987 enum machine_mode inner;
1988 HOST_WIDE_INT bytepos;
1989 bool done = false;
1990 rtx temp;
1992 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1993 dst = gen_reg_rtx (outer);
1995 /* Make life a bit easier for combine. */
1996 /* If the first element of the vector is the low part
1997 of the destination mode, use a paradoxical subreg to
1998 initialize the destination. */
1999 if (start < finish)
2001 inner = GET_MODE (tmps[start]);
2002 bytepos = subreg_lowpart_offset (inner, outer);
2003 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
2005 temp = simplify_gen_subreg (outer, tmps[start],
2006 inner, 0);
2007 if (temp)
2009 emit_move_insn (dst, temp);
2010 done = true;
2011 start++;
2016 /* If the first element wasn't the low part, try the last. */
2017 if (!done
2018 && start < finish - 1)
2020 inner = GET_MODE (tmps[finish - 1]);
2021 bytepos = subreg_lowpart_offset (inner, outer);
2022 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
2024 temp = simplify_gen_subreg (outer, tmps[finish - 1],
2025 inner, 0);
2026 if (temp)
2028 emit_move_insn (dst, temp);
2029 done = true;
2030 finish--;
2035 /* Otherwise, simply initialize the result to zero. */
2036 if (!done)
2037 emit_move_insn (dst, CONST0_RTX (outer));
2040 /* Process the pieces. */
2041 for (i = start; i < finish; i++)
2043 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2044 enum machine_mode mode = GET_MODE (tmps[i]);
2045 unsigned int bytelen = GET_MODE_SIZE (mode);
2046 unsigned int adj_bytelen;
2047 rtx dest = dst;
2049 /* Handle trailing fragments that run over the size of the struct. */
2050 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2051 adj_bytelen = ssize - bytepos;
2052 else
2053 adj_bytelen = bytelen;
2055 if (GET_CODE (dst) == CONCAT)
2057 if (bytepos + adj_bytelen
2058 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2059 dest = XEXP (dst, 0);
2060 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2062 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2063 dest = XEXP (dst, 1);
2065 else
2067 enum machine_mode dest_mode = GET_MODE (dest);
2068 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2070 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2072 if (GET_MODE_ALIGNMENT (dest_mode)
2073 >= GET_MODE_ALIGNMENT (tmp_mode))
2075 dest = assign_stack_temp (dest_mode,
2076 GET_MODE_SIZE (dest_mode));
2077 emit_move_insn (adjust_address (dest,
2078 tmp_mode,
2079 bytepos),
2080 tmps[i]);
2081 dst = dest;
2083 else
2085 dest = assign_stack_temp (tmp_mode,
2086 GET_MODE_SIZE (tmp_mode));
2087 emit_move_insn (dest, tmps[i]);
2088 dst = adjust_address (dest, dest_mode, bytepos);
2090 break;
2094 /* Handle trailing fragments that run over the size of the struct. */
2095 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2097 /* store_bit_field always takes its value from the lsb.
2098 Move the fragment to the lsb if it's not already there. */
2099 if (
2100 #ifdef BLOCK_REG_PADDING
2101 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2102 == (BYTES_BIG_ENDIAN ? upward : downward)
2103 #else
2104 BYTES_BIG_ENDIAN
2105 #endif
2108 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2109 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2110 shift, tmps[i], 0);
2113 /* Make sure not to write past the end of the struct. */
2114 store_bit_field (dest,
2115 adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2116 bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2117 VOIDmode, tmps[i]);
2120 /* Optimize the access just a bit. */
2121 else if (MEM_P (dest)
2122 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2123 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2124 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2125 && bytelen == GET_MODE_SIZE (mode))
2126 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2128 else
2129 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2130 0, 0, mode, tmps[i]);
2133 /* Copy from the pseudo into the (probable) hard reg. */
2134 if (orig_dst != dst)
2135 emit_move_insn (orig_dst, dst);
2138 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2139 of the value stored in X. */
2142 maybe_emit_group_store (rtx x, tree type)
2144 enum machine_mode mode = TYPE_MODE (type);
2145 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2146 if (GET_CODE (x) == PARALLEL)
2148 rtx result = gen_reg_rtx (mode);
2149 emit_group_store (result, x, type, int_size_in_bytes (type));
2150 return result;
2152 return x;
2155 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2157 This is used on targets that return BLKmode values in registers. */
2159 void
2160 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2162 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2163 rtx src = NULL, dst = NULL;
2164 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2165 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2166 enum machine_mode mode = GET_MODE (srcreg);
2167 enum machine_mode tmode = GET_MODE (target);
2168 enum machine_mode copy_mode;
2170 /* BLKmode registers created in the back-end shouldn't have survived. */
2171 gcc_assert (mode != BLKmode);
2173 /* If the structure doesn't take up a whole number of words, see whether
2174 SRCREG is padded on the left or on the right. If it's on the left,
2175 set PADDING_CORRECTION to the number of bits to skip.
2177 In most ABIs, the structure will be returned at the least end of
2178 the register, which translates to right padding on little-endian
2179 targets and left padding on big-endian targets. The opposite
2180 holds if the structure is returned at the most significant
2181 end of the register. */
2182 if (bytes % UNITS_PER_WORD != 0
2183 && (targetm.calls.return_in_msb (type)
2184 ? !BYTES_BIG_ENDIAN
2185 : BYTES_BIG_ENDIAN))
2186 padding_correction
2187 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2189 /* We can use a single move if we have an exact mode for the size. */
2190 else if (MEM_P (target)
2191 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2192 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2193 && bytes == GET_MODE_SIZE (mode))
2195 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2196 return;
2199 /* And if we additionally have the same mode for a register. */
2200 else if (REG_P (target)
2201 && GET_MODE (target) == mode
2202 && bytes == GET_MODE_SIZE (mode))
2204 emit_move_insn (target, srcreg);
2205 return;
2208 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2209 into a new pseudo which is a full word. */
2210 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2212 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2213 mode = word_mode;
2216 /* Copy the structure BITSIZE bits at a time. If the target lives in
2217 memory, take care of not reading/writing past its end by selecting
2218 a copy mode suited to BITSIZE. This should always be possible given
2219 how it is computed.
2221 If the target lives in register, make sure not to select a copy mode
2222 larger than the mode of the register.
2224 We could probably emit more efficient code for machines which do not use
2225 strict alignment, but it doesn't seem worth the effort at the current
2226 time. */
2228 copy_mode = word_mode;
2229 if (MEM_P (target))
2231 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2232 if (mem_mode != BLKmode)
2233 copy_mode = mem_mode;
2235 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2236 copy_mode = tmode;
2238 for (bitpos = 0, xbitpos = padding_correction;
2239 bitpos < bytes * BITS_PER_UNIT;
2240 bitpos += bitsize, xbitpos += bitsize)
2242 /* We need a new source operand each time xbitpos is on a
2243 word boundary and when xbitpos == padding_correction
2244 (the first time through). */
2245 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2246 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2248 /* We need a new destination operand each time bitpos is on
2249 a word boundary. */
2250 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2251 dst = target;
2252 else if (bitpos % BITS_PER_WORD == 0)
2253 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2255 /* Use xbitpos for the source extraction (right justified) and
2256 bitpos for the destination store (left justified). */
2257 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2258 extract_bit_field (src, bitsize,
2259 xbitpos % BITS_PER_WORD, 1,
2260 NULL_RTX, copy_mode, copy_mode));
2264 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2265 register if it contains any data, otherwise return null.
2267 This is used on targets that return BLKmode values in registers. */
2270 copy_blkmode_to_reg (enum machine_mode mode, tree src)
2272 int i, n_regs;
2273 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2274 unsigned int bitsize;
2275 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2276 enum machine_mode dst_mode;
2278 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2280 x = expand_normal (src);
2282 bytes = int_size_in_bytes (TREE_TYPE (src));
2283 if (bytes == 0)
2284 return NULL_RTX;
2286 /* If the structure doesn't take up a whole number of words, see
2287 whether the register value should be padded on the left or on
2288 the right. Set PADDING_CORRECTION to the number of padding
2289 bits needed on the left side.
2291 In most ABIs, the structure will be returned at the least end of
2292 the register, which translates to right padding on little-endian
2293 targets and left padding on big-endian targets. The opposite
2294 holds if the structure is returned at the most significant
2295 end of the register. */
2296 if (bytes % UNITS_PER_WORD != 0
2297 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2298 ? !BYTES_BIG_ENDIAN
2299 : BYTES_BIG_ENDIAN))
2300 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2301 * BITS_PER_UNIT));
2303 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2304 dst_words = XALLOCAVEC (rtx, n_regs);
2305 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2307 /* Copy the structure BITSIZE bits at a time. */
2308 for (bitpos = 0, xbitpos = padding_correction;
2309 bitpos < bytes * BITS_PER_UNIT;
2310 bitpos += bitsize, xbitpos += bitsize)
2312 /* We need a new destination pseudo each time xbitpos is
2313 on a word boundary and when xbitpos == padding_correction
2314 (the first time through). */
2315 if (xbitpos % BITS_PER_WORD == 0
2316 || xbitpos == padding_correction)
2318 /* Generate an appropriate register. */
2319 dst_word = gen_reg_rtx (word_mode);
2320 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2322 /* Clear the destination before we move anything into it. */
2323 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2326 /* We need a new source operand each time bitpos is on a word
2327 boundary. */
2328 if (bitpos % BITS_PER_WORD == 0)
2329 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2331 /* Use bitpos for the source extraction (left justified) and
2332 xbitpos for the destination store (right justified). */
2333 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2334 0, 0, word_mode,
2335 extract_bit_field (src_word, bitsize,
2336 bitpos % BITS_PER_WORD, 1,
2337 NULL_RTX, word_mode, word_mode));
2340 if (mode == BLKmode)
2342 /* Find the smallest integer mode large enough to hold the
2343 entire structure. */
2344 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2345 mode != VOIDmode;
2346 mode = GET_MODE_WIDER_MODE (mode))
2347 /* Have we found a large enough mode? */
2348 if (GET_MODE_SIZE (mode) >= bytes)
2349 break;
2351 /* A suitable mode should have been found. */
2352 gcc_assert (mode != VOIDmode);
2355 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2356 dst_mode = word_mode;
2357 else
2358 dst_mode = mode;
2359 dst = gen_reg_rtx (dst_mode);
2361 for (i = 0; i < n_regs; i++)
2362 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2364 if (mode != dst_mode)
2365 dst = gen_lowpart (mode, dst);
2367 return dst;
2370 /* Add a USE expression for REG to the (possibly empty) list pointed
2371 to by CALL_FUSAGE. REG must denote a hard register. */
2373 void
2374 use_reg_mode (rtx *call_fusage, rtx reg, enum machine_mode mode)
2376 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2378 *call_fusage
2379 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2382 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2383 to by CALL_FUSAGE. REG must denote a hard register. */
2385 void
2386 clobber_reg_mode (rtx *call_fusage, rtx reg, enum machine_mode mode)
2388 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2390 *call_fusage
2391 = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2394 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2395 starting at REGNO. All of these registers must be hard registers. */
2397 void
2398 use_regs (rtx *call_fusage, int regno, int nregs)
2400 int i;
2402 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2404 for (i = 0; i < nregs; i++)
2405 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2408 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2409 PARALLEL REGS. This is for calls that pass values in multiple
2410 non-contiguous locations. The Irix 6 ABI has examples of this. */
2412 void
2413 use_group_regs (rtx *call_fusage, rtx regs)
2415 int i;
2417 for (i = 0; i < XVECLEN (regs, 0); i++)
2419 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2421 /* A NULL entry means the parameter goes both on the stack and in
2422 registers. This can also be a MEM for targets that pass values
2423 partially on the stack and partially in registers. */
2424 if (reg != 0 && REG_P (reg))
2425 use_reg (call_fusage, reg);
2429 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2430 assigment and the code of the expresion on the RHS is CODE. Return
2431 NULL otherwise. */
2433 static gimple
2434 get_def_for_expr (tree name, enum tree_code code)
2436 gimple def_stmt;
2438 if (TREE_CODE (name) != SSA_NAME)
2439 return NULL;
2441 def_stmt = get_gimple_for_ssa_name (name);
2442 if (!def_stmt
2443 || gimple_assign_rhs_code (def_stmt) != code)
2444 return NULL;
2446 return def_stmt;
2449 #ifdef HAVE_conditional_move
2450 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2451 assigment and the class of the expresion on the RHS is CLASS. Return
2452 NULL otherwise. */
2454 static gimple
2455 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2457 gimple def_stmt;
2459 if (TREE_CODE (name) != SSA_NAME)
2460 return NULL;
2462 def_stmt = get_gimple_for_ssa_name (name);
2463 if (!def_stmt
2464 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2465 return NULL;
2467 return def_stmt;
2469 #endif
2472 /* Determine whether the LEN bytes generated by CONSTFUN can be
2473 stored to memory using several move instructions. CONSTFUNDATA is
2474 a pointer which will be passed as argument in every CONSTFUN call.
2475 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2476 a memset operation and false if it's a copy of a constant string.
2477 Return nonzero if a call to store_by_pieces should succeed. */
2480 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2481 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2482 void *constfundata, unsigned int align, bool memsetp)
2484 unsigned HOST_WIDE_INT l;
2485 unsigned int max_size;
2486 HOST_WIDE_INT offset = 0;
2487 enum machine_mode mode;
2488 enum insn_code icode;
2489 int reverse;
2490 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2491 rtx cst ATTRIBUTE_UNUSED;
2493 if (len == 0)
2494 return 1;
2496 if (! (memsetp
2497 ? SET_BY_PIECES_P (len, align)
2498 : STORE_BY_PIECES_P (len, align)))
2499 return 0;
2501 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2503 /* We would first store what we can in the largest integer mode, then go to
2504 successively smaller modes. */
2506 for (reverse = 0;
2507 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2508 reverse++)
2510 l = len;
2511 max_size = STORE_MAX_PIECES + 1;
2512 while (max_size > 1 && l > 0)
2514 mode = widest_int_mode_for_size (max_size);
2516 if (mode == VOIDmode)
2517 break;
2519 icode = optab_handler (mov_optab, mode);
2520 if (icode != CODE_FOR_nothing
2521 && align >= GET_MODE_ALIGNMENT (mode))
2523 unsigned int size = GET_MODE_SIZE (mode);
2525 while (l >= size)
2527 if (reverse)
2528 offset -= size;
2530 cst = (*constfun) (constfundata, offset, mode);
2531 if (!targetm.legitimate_constant_p (mode, cst))
2532 return 0;
2534 if (!reverse)
2535 offset += size;
2537 l -= size;
2541 max_size = GET_MODE_SIZE (mode);
2544 /* The code above should have handled everything. */
2545 gcc_assert (!l);
2548 return 1;
2551 /* Generate several move instructions to store LEN bytes generated by
2552 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2553 pointer which will be passed as argument in every CONSTFUN call.
2554 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2555 a memset operation and false if it's a copy of a constant string.
2556 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2557 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2558 stpcpy. */
2561 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2562 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2563 void *constfundata, unsigned int align, bool memsetp, int endp)
2565 enum machine_mode to_addr_mode = get_address_mode (to);
2566 struct store_by_pieces_d data;
2568 if (len == 0)
2570 gcc_assert (endp != 2);
2571 return to;
2574 gcc_assert (memsetp
2575 ? SET_BY_PIECES_P (len, align)
2576 : STORE_BY_PIECES_P (len, align));
2577 data.constfun = constfun;
2578 data.constfundata = constfundata;
2579 data.len = len;
2580 data.to = to;
2581 store_by_pieces_1 (&data, align);
2582 if (endp)
2584 rtx to1;
2586 gcc_assert (!data.reverse);
2587 if (data.autinc_to)
2589 if (endp == 2)
2591 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2592 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2593 else
2594 data.to_addr = copy_to_mode_reg (to_addr_mode,
2595 plus_constant (to_addr_mode,
2596 data.to_addr,
2597 -1));
2599 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2600 data.offset);
2602 else
2604 if (endp == 2)
2605 --data.offset;
2606 to1 = adjust_address (data.to, QImode, data.offset);
2608 return to1;
2610 else
2611 return data.to;
2614 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2615 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2617 static void
2618 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2620 struct store_by_pieces_d data;
2622 if (len == 0)
2623 return;
2625 data.constfun = clear_by_pieces_1;
2626 data.constfundata = NULL;
2627 data.len = len;
2628 data.to = to;
2629 store_by_pieces_1 (&data, align);
2632 /* Callback routine for clear_by_pieces.
2633 Return const0_rtx unconditionally. */
2635 static rtx
2636 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2637 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2638 enum machine_mode mode ATTRIBUTE_UNUSED)
2640 return const0_rtx;
2643 /* Subroutine of clear_by_pieces and store_by_pieces.
2644 Generate several move instructions to store LEN bytes of block TO. (A MEM
2645 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2647 static void
2648 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2649 unsigned int align ATTRIBUTE_UNUSED)
2651 enum machine_mode to_addr_mode = get_address_mode (data->to);
2652 rtx to_addr = XEXP (data->to, 0);
2653 unsigned int max_size = STORE_MAX_PIECES + 1;
2654 enum insn_code icode;
2656 data->offset = 0;
2657 data->to_addr = to_addr;
2658 data->autinc_to
2659 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2660 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2662 data->explicit_inc_to = 0;
2663 data->reverse
2664 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2665 if (data->reverse)
2666 data->offset = data->len;
2668 /* If storing requires more than two move insns,
2669 copy addresses to registers (to make displacements shorter)
2670 and use post-increment if available. */
2671 if (!data->autinc_to
2672 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2674 /* Determine the main mode we'll be using.
2675 MODE might not be used depending on the definitions of the
2676 USE_* macros below. */
2677 enum machine_mode mode ATTRIBUTE_UNUSED
2678 = widest_int_mode_for_size (max_size);
2680 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2682 data->to_addr = copy_to_mode_reg (to_addr_mode,
2683 plus_constant (to_addr_mode,
2684 to_addr,
2685 data->len));
2686 data->autinc_to = 1;
2687 data->explicit_inc_to = -1;
2690 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2691 && ! data->autinc_to)
2693 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2694 data->autinc_to = 1;
2695 data->explicit_inc_to = 1;
2698 if ( !data->autinc_to && CONSTANT_P (to_addr))
2699 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2702 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2704 /* First store what we can in the largest integer mode, then go to
2705 successively smaller modes. */
2707 while (max_size > 1 && data->len > 0)
2709 enum machine_mode mode = widest_int_mode_for_size (max_size);
2711 if (mode == VOIDmode)
2712 break;
2714 icode = optab_handler (mov_optab, mode);
2715 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2716 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2718 max_size = GET_MODE_SIZE (mode);
2721 /* The code above should have handled everything. */
2722 gcc_assert (!data->len);
2725 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2726 with move instructions for mode MODE. GENFUN is the gen_... function
2727 to make a move insn for that mode. DATA has all the other info. */
2729 static void
2730 store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
2731 struct store_by_pieces_d *data)
2733 unsigned int size = GET_MODE_SIZE (mode);
2734 rtx to1, cst;
2736 while (data->len >= size)
2738 if (data->reverse)
2739 data->offset -= size;
2741 if (data->autinc_to)
2742 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2743 data->offset);
2744 else
2745 to1 = adjust_address (data->to, mode, data->offset);
2747 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2748 emit_insn (gen_add2_insn (data->to_addr,
2749 gen_int_mode (-(HOST_WIDE_INT) size,
2750 GET_MODE (data->to_addr))));
2752 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2753 emit_insn ((*genfun) (to1, cst));
2755 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2756 emit_insn (gen_add2_insn (data->to_addr,
2757 gen_int_mode (size,
2758 GET_MODE (data->to_addr))));
2760 if (! data->reverse)
2761 data->offset += size;
2763 data->len -= size;
2767 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2768 its length in bytes. */
2771 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2772 unsigned int expected_align, HOST_WIDE_INT expected_size,
2773 unsigned HOST_WIDE_INT min_size,
2774 unsigned HOST_WIDE_INT max_size,
2775 unsigned HOST_WIDE_INT probable_max_size)
2777 enum machine_mode mode = GET_MODE (object);
2778 unsigned int align;
2780 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2782 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2783 just move a zero. Otherwise, do this a piece at a time. */
2784 if (mode != BLKmode
2785 && CONST_INT_P (size)
2786 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2788 rtx zero = CONST0_RTX (mode);
2789 if (zero != NULL)
2791 emit_move_insn (object, zero);
2792 return NULL;
2795 if (COMPLEX_MODE_P (mode))
2797 zero = CONST0_RTX (GET_MODE_INNER (mode));
2798 if (zero != NULL)
2800 write_complex_part (object, zero, 0);
2801 write_complex_part (object, zero, 1);
2802 return NULL;
2807 if (size == const0_rtx)
2808 return NULL;
2810 align = MEM_ALIGN (object);
2812 if (CONST_INT_P (size)
2813 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2814 clear_by_pieces (object, INTVAL (size), align);
2815 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2816 expected_align, expected_size,
2817 min_size, max_size, probable_max_size))
2819 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2820 return set_storage_via_libcall (object, size, const0_rtx,
2821 method == BLOCK_OP_TAILCALL);
2822 else
2823 gcc_unreachable ();
2825 return NULL;
2829 clear_storage (rtx object, rtx size, enum block_op_methods method)
2831 unsigned HOST_WIDE_INT max, min = 0;
2832 if (GET_CODE (size) == CONST_INT)
2833 min = max = UINTVAL (size);
2834 else
2835 max = GET_MODE_MASK (GET_MODE (size));
2836 return clear_storage_hints (object, size, method, 0, -1, min, max, max);
2840 /* A subroutine of clear_storage. Expand a call to memset.
2841 Return the return value of memset, 0 otherwise. */
2844 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2846 tree call_expr, fn, object_tree, size_tree, val_tree;
2847 enum machine_mode size_mode;
2848 rtx retval;
2850 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2851 place those into new pseudos into a VAR_DECL and use them later. */
2853 object = copy_addr_to_reg (XEXP (object, 0));
2855 size_mode = TYPE_MODE (sizetype);
2856 size = convert_to_mode (size_mode, size, 1);
2857 size = copy_to_mode_reg (size_mode, size);
2859 /* It is incorrect to use the libcall calling conventions to call
2860 memset in this context. This could be a user call to memset and
2861 the user may wish to examine the return value from memset. For
2862 targets where libcalls and normal calls have different conventions
2863 for returning pointers, we could end up generating incorrect code. */
2865 object_tree = make_tree (ptr_type_node, object);
2866 if (!CONST_INT_P (val))
2867 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2868 size_tree = make_tree (sizetype, size);
2869 val_tree = make_tree (integer_type_node, val);
2871 fn = clear_storage_libcall_fn (true);
2872 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2873 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2875 retval = expand_normal (call_expr);
2877 return retval;
2880 /* A subroutine of set_storage_via_libcall. Create the tree node
2881 for the function we use for block clears. */
2883 tree block_clear_fn;
2885 void
2886 init_block_clear_fn (const char *asmspec)
2888 if (!block_clear_fn)
2890 tree fn, args;
2892 fn = get_identifier ("memset");
2893 args = build_function_type_list (ptr_type_node, ptr_type_node,
2894 integer_type_node, sizetype,
2895 NULL_TREE);
2897 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2898 DECL_EXTERNAL (fn) = 1;
2899 TREE_PUBLIC (fn) = 1;
2900 DECL_ARTIFICIAL (fn) = 1;
2901 TREE_NOTHROW (fn) = 1;
2902 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2903 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2905 block_clear_fn = fn;
2908 if (asmspec)
2909 set_user_assembler_name (block_clear_fn, asmspec);
2912 static tree
2913 clear_storage_libcall_fn (int for_call)
2915 static bool emitted_extern;
2917 if (!block_clear_fn)
2918 init_block_clear_fn (NULL);
2920 if (for_call && !emitted_extern)
2922 emitted_extern = true;
2923 make_decl_rtl (block_clear_fn);
2926 return block_clear_fn;
2929 /* Expand a setmem pattern; return true if successful. */
2931 bool
2932 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2933 unsigned int expected_align, HOST_WIDE_INT expected_size,
2934 unsigned HOST_WIDE_INT min_size,
2935 unsigned HOST_WIDE_INT max_size,
2936 unsigned HOST_WIDE_INT probable_max_size)
2938 /* Try the most limited insn first, because there's no point
2939 including more than one in the machine description unless
2940 the more limited one has some advantage. */
2942 enum machine_mode mode;
2944 if (expected_align < align)
2945 expected_align = align;
2946 if (expected_size != -1)
2948 if ((unsigned HOST_WIDE_INT)expected_size > max_size)
2949 expected_size = max_size;
2950 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
2951 expected_size = min_size;
2954 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2955 mode = GET_MODE_WIDER_MODE (mode))
2957 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2959 if (code != CODE_FOR_nothing
2960 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2961 here because if SIZE is less than the mode mask, as it is
2962 returned by the macro, it will definitely be less than the
2963 actual mode mask. Since SIZE is within the Pmode address
2964 space, we limit MODE to Pmode. */
2965 && ((CONST_INT_P (size)
2966 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2967 <= (GET_MODE_MASK (mode) >> 1)))
2968 || max_size <= (GET_MODE_MASK (mode) >> 1)
2969 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
2971 struct expand_operand ops[9];
2972 unsigned int nops;
2974 nops = insn_data[(int) code].n_generator_args;
2975 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
2977 create_fixed_operand (&ops[0], object);
2978 /* The check above guarantees that this size conversion is valid. */
2979 create_convert_operand_to (&ops[1], size, mode, true);
2980 create_convert_operand_from (&ops[2], val, byte_mode, true);
2981 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2982 if (nops >= 6)
2984 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2985 create_integer_operand (&ops[5], expected_size);
2987 if (nops >= 8)
2989 create_integer_operand (&ops[6], min_size);
2990 /* If we can not represent the maximal size,
2991 make parameter NULL. */
2992 if ((HOST_WIDE_INT) max_size != -1)
2993 create_integer_operand (&ops[7], max_size);
2994 else
2995 create_fixed_operand (&ops[7], NULL);
2997 if (nops == 9)
2999 /* If we can not represent the maximal size,
3000 make parameter NULL. */
3001 if ((HOST_WIDE_INT) probable_max_size != -1)
3002 create_integer_operand (&ops[8], probable_max_size);
3003 else
3004 create_fixed_operand (&ops[8], NULL);
3006 if (maybe_expand_insn (code, nops, ops))
3007 return true;
3011 return false;
3015 /* Write to one of the components of the complex value CPLX. Write VAL to
3016 the real part if IMAG_P is false, and the imaginary part if its true. */
3018 static void
3019 write_complex_part (rtx cplx, rtx val, bool imag_p)
3021 enum machine_mode cmode;
3022 enum machine_mode imode;
3023 unsigned ibitsize;
3025 if (GET_CODE (cplx) == CONCAT)
3027 emit_move_insn (XEXP (cplx, imag_p), val);
3028 return;
3031 cmode = GET_MODE (cplx);
3032 imode = GET_MODE_INNER (cmode);
3033 ibitsize = GET_MODE_BITSIZE (imode);
3035 /* For MEMs simplify_gen_subreg may generate an invalid new address
3036 because, e.g., the original address is considered mode-dependent
3037 by the target, which restricts simplify_subreg from invoking
3038 adjust_address_nv. Instead of preparing fallback support for an
3039 invalid address, we call adjust_address_nv directly. */
3040 if (MEM_P (cplx))
3042 emit_move_insn (adjust_address_nv (cplx, imode,
3043 imag_p ? GET_MODE_SIZE (imode) : 0),
3044 val);
3045 return;
3048 /* If the sub-object is at least word sized, then we know that subregging
3049 will work. This special case is important, since store_bit_field
3050 wants to operate on integer modes, and there's rarely an OImode to
3051 correspond to TCmode. */
3052 if (ibitsize >= BITS_PER_WORD
3053 /* For hard regs we have exact predicates. Assume we can split
3054 the original object if it spans an even number of hard regs.
3055 This special case is important for SCmode on 64-bit platforms
3056 where the natural size of floating-point regs is 32-bit. */
3057 || (REG_P (cplx)
3058 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3059 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3061 rtx part = simplify_gen_subreg (imode, cplx, cmode,
3062 imag_p ? GET_MODE_SIZE (imode) : 0);
3063 if (part)
3065 emit_move_insn (part, val);
3066 return;
3068 else
3069 /* simplify_gen_subreg may fail for sub-word MEMs. */
3070 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3073 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
3076 /* Extract one of the components of the complex value CPLX. Extract the
3077 real part if IMAG_P is false, and the imaginary part if it's true. */
3079 static rtx
3080 read_complex_part (rtx cplx, bool imag_p)
3082 enum machine_mode cmode, imode;
3083 unsigned ibitsize;
3085 if (GET_CODE (cplx) == CONCAT)
3086 return XEXP (cplx, imag_p);
3088 cmode = GET_MODE (cplx);
3089 imode = GET_MODE_INNER (cmode);
3090 ibitsize = GET_MODE_BITSIZE (imode);
3092 /* Special case reads from complex constants that got spilled to memory. */
3093 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3095 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3096 if (decl && TREE_CODE (decl) == COMPLEX_CST)
3098 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3099 if (CONSTANT_CLASS_P (part))
3100 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3104 /* For MEMs simplify_gen_subreg may generate an invalid new address
3105 because, e.g., the original address is considered mode-dependent
3106 by the target, which restricts simplify_subreg from invoking
3107 adjust_address_nv. Instead of preparing fallback support for an
3108 invalid address, we call adjust_address_nv directly. */
3109 if (MEM_P (cplx))
3110 return adjust_address_nv (cplx, imode,
3111 imag_p ? GET_MODE_SIZE (imode) : 0);
3113 /* If the sub-object is at least word sized, then we know that subregging
3114 will work. This special case is important, since extract_bit_field
3115 wants to operate on integer modes, and there's rarely an OImode to
3116 correspond to TCmode. */
3117 if (ibitsize >= BITS_PER_WORD
3118 /* For hard regs we have exact predicates. Assume we can split
3119 the original object if it spans an even number of hard regs.
3120 This special case is important for SCmode on 64-bit platforms
3121 where the natural size of floating-point regs is 32-bit. */
3122 || (REG_P (cplx)
3123 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3124 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3126 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3127 imag_p ? GET_MODE_SIZE (imode) : 0);
3128 if (ret)
3129 return ret;
3130 else
3131 /* simplify_gen_subreg may fail for sub-word MEMs. */
3132 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3135 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3136 true, NULL_RTX, imode, imode);
3139 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3140 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3141 represented in NEW_MODE. If FORCE is true, this will never happen, as
3142 we'll force-create a SUBREG if needed. */
3144 static rtx
3145 emit_move_change_mode (enum machine_mode new_mode,
3146 enum machine_mode old_mode, rtx x, bool force)
3148 rtx ret;
3150 if (push_operand (x, GET_MODE (x)))
3152 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3153 MEM_COPY_ATTRIBUTES (ret, x);
3155 else if (MEM_P (x))
3157 /* We don't have to worry about changing the address since the
3158 size in bytes is supposed to be the same. */
3159 if (reload_in_progress)
3161 /* Copy the MEM to change the mode and move any
3162 substitutions from the old MEM to the new one. */
3163 ret = adjust_address_nv (x, new_mode, 0);
3164 copy_replacements (x, ret);
3166 else
3167 ret = adjust_address (x, new_mode, 0);
3169 else
3171 /* Note that we do want simplify_subreg's behavior of validating
3172 that the new mode is ok for a hard register. If we were to use
3173 simplify_gen_subreg, we would create the subreg, but would
3174 probably run into the target not being able to implement it. */
3175 /* Except, of course, when FORCE is true, when this is exactly what
3176 we want. Which is needed for CCmodes on some targets. */
3177 if (force)
3178 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3179 else
3180 ret = simplify_subreg (new_mode, x, old_mode, 0);
3183 return ret;
3186 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3187 an integer mode of the same size as MODE. Returns the instruction
3188 emitted, or NULL if such a move could not be generated. */
3190 static rtx_insn *
3191 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
3193 enum machine_mode imode;
3194 enum insn_code code;
3196 /* There must exist a mode of the exact size we require. */
3197 imode = int_mode_for_mode (mode);
3198 if (imode == BLKmode)
3199 return NULL;
3201 /* The target must support moves in this mode. */
3202 code = optab_handler (mov_optab, imode);
3203 if (code == CODE_FOR_nothing)
3204 return NULL;
3206 x = emit_move_change_mode (imode, mode, x, force);
3207 if (x == NULL_RTX)
3208 return NULL;
3209 y = emit_move_change_mode (imode, mode, y, force);
3210 if (y == NULL_RTX)
3211 return NULL;
3212 return emit_insn (GEN_FCN (code) (x, y));
3215 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3216 Return an equivalent MEM that does not use an auto-increment. */
3219 emit_move_resolve_push (enum machine_mode mode, rtx x)
3221 enum rtx_code code = GET_CODE (XEXP (x, 0));
3222 HOST_WIDE_INT adjust;
3223 rtx temp;
3225 adjust = GET_MODE_SIZE (mode);
3226 #ifdef PUSH_ROUNDING
3227 adjust = PUSH_ROUNDING (adjust);
3228 #endif
3229 if (code == PRE_DEC || code == POST_DEC)
3230 adjust = -adjust;
3231 else if (code == PRE_MODIFY || code == POST_MODIFY)
3233 rtx expr = XEXP (XEXP (x, 0), 1);
3234 HOST_WIDE_INT val;
3236 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3237 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3238 val = INTVAL (XEXP (expr, 1));
3239 if (GET_CODE (expr) == MINUS)
3240 val = -val;
3241 gcc_assert (adjust == val || adjust == -val);
3242 adjust = val;
3245 /* Do not use anti_adjust_stack, since we don't want to update
3246 stack_pointer_delta. */
3247 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3248 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3249 0, OPTAB_LIB_WIDEN);
3250 if (temp != stack_pointer_rtx)
3251 emit_move_insn (stack_pointer_rtx, temp);
3253 switch (code)
3255 case PRE_INC:
3256 case PRE_DEC:
3257 case PRE_MODIFY:
3258 temp = stack_pointer_rtx;
3259 break;
3260 case POST_INC:
3261 case POST_DEC:
3262 case POST_MODIFY:
3263 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3264 break;
3265 default:
3266 gcc_unreachable ();
3269 return replace_equiv_address (x, temp);
3272 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3273 X is known to satisfy push_operand, and MODE is known to be complex.
3274 Returns the last instruction emitted. */
3276 rtx_insn *
3277 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3279 enum machine_mode submode = GET_MODE_INNER (mode);
3280 bool imag_first;
3282 #ifdef PUSH_ROUNDING
3283 unsigned int submodesize = GET_MODE_SIZE (submode);
3285 /* In case we output to the stack, but the size is smaller than the
3286 machine can push exactly, we need to use move instructions. */
3287 if (PUSH_ROUNDING (submodesize) != submodesize)
3289 x = emit_move_resolve_push (mode, x);
3290 return emit_move_insn (x, y);
3292 #endif
3294 /* Note that the real part always precedes the imag part in memory
3295 regardless of machine's endianness. */
3296 switch (GET_CODE (XEXP (x, 0)))
3298 case PRE_DEC:
3299 case POST_DEC:
3300 imag_first = true;
3301 break;
3302 case PRE_INC:
3303 case POST_INC:
3304 imag_first = false;
3305 break;
3306 default:
3307 gcc_unreachable ();
3310 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3311 read_complex_part (y, imag_first));
3312 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3313 read_complex_part (y, !imag_first));
3316 /* A subroutine of emit_move_complex. Perform the move from Y to X
3317 via two moves of the parts. Returns the last instruction emitted. */
3319 rtx_insn *
3320 emit_move_complex_parts (rtx x, rtx y)
3322 /* Show the output dies here. This is necessary for SUBREGs
3323 of pseudos since we cannot track their lifetimes correctly;
3324 hard regs shouldn't appear here except as return values. */
3325 if (!reload_completed && !reload_in_progress
3326 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3327 emit_clobber (x);
3329 write_complex_part (x, read_complex_part (y, false), false);
3330 write_complex_part (x, read_complex_part (y, true), true);
3332 return get_last_insn ();
3335 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3336 MODE is known to be complex. Returns the last instruction emitted. */
3338 static rtx_insn *
3339 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3341 bool try_int;
3343 /* Need to take special care for pushes, to maintain proper ordering
3344 of the data, and possibly extra padding. */
3345 if (push_operand (x, mode))
3346 return emit_move_complex_push (mode, x, y);
3348 /* See if we can coerce the target into moving both values at once, except
3349 for floating point where we favor moving as parts if this is easy. */
3350 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3351 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3352 && !(REG_P (x)
3353 && HARD_REGISTER_P (x)
3354 && hard_regno_nregs[REGNO (x)][mode] == 1)
3355 && !(REG_P (y)
3356 && HARD_REGISTER_P (y)
3357 && hard_regno_nregs[REGNO (y)][mode] == 1))
3358 try_int = false;
3359 /* Not possible if the values are inherently not adjacent. */
3360 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3361 try_int = false;
3362 /* Is possible if both are registers (or subregs of registers). */
3363 else if (register_operand (x, mode) && register_operand (y, mode))
3364 try_int = true;
3365 /* If one of the operands is a memory, and alignment constraints
3366 are friendly enough, we may be able to do combined memory operations.
3367 We do not attempt this if Y is a constant because that combination is
3368 usually better with the by-parts thing below. */
3369 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3370 && (!STRICT_ALIGNMENT
3371 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3372 try_int = true;
3373 else
3374 try_int = false;
3376 if (try_int)
3378 rtx_insn *ret;
3380 /* For memory to memory moves, optimal behavior can be had with the
3381 existing block move logic. */
3382 if (MEM_P (x) && MEM_P (y))
3384 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3385 BLOCK_OP_NO_LIBCALL);
3386 return get_last_insn ();
3389 ret = emit_move_via_integer (mode, x, y, true);
3390 if (ret)
3391 return ret;
3394 return emit_move_complex_parts (x, y);
3397 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3398 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3400 static rtx_insn *
3401 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3403 rtx_insn *ret;
3405 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3406 if (mode != CCmode)
3408 enum insn_code code = optab_handler (mov_optab, CCmode);
3409 if (code != CODE_FOR_nothing)
3411 x = emit_move_change_mode (CCmode, mode, x, true);
3412 y = emit_move_change_mode (CCmode, mode, y, true);
3413 return emit_insn (GEN_FCN (code) (x, y));
3417 /* Otherwise, find the MODE_INT mode of the same width. */
3418 ret = emit_move_via_integer (mode, x, y, false);
3419 gcc_assert (ret != NULL);
3420 return ret;
3423 /* Return true if word I of OP lies entirely in the
3424 undefined bits of a paradoxical subreg. */
3426 static bool
3427 undefined_operand_subword_p (const_rtx op, int i)
3429 enum machine_mode innermode, innermostmode;
3430 int offset;
3431 if (GET_CODE (op) != SUBREG)
3432 return false;
3433 innermode = GET_MODE (op);
3434 innermostmode = GET_MODE (SUBREG_REG (op));
3435 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3436 /* The SUBREG_BYTE represents offset, as if the value were stored in
3437 memory, except for a paradoxical subreg where we define
3438 SUBREG_BYTE to be 0; undo this exception as in
3439 simplify_subreg. */
3440 if (SUBREG_BYTE (op) == 0
3441 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3443 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3444 if (WORDS_BIG_ENDIAN)
3445 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3446 if (BYTES_BIG_ENDIAN)
3447 offset += difference % UNITS_PER_WORD;
3449 if (offset >= GET_MODE_SIZE (innermostmode)
3450 || offset <= -GET_MODE_SIZE (word_mode))
3451 return true;
3452 return false;
3455 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3456 MODE is any multi-word or full-word mode that lacks a move_insn
3457 pattern. Note that you will get better code if you define such
3458 patterns, even if they must turn into multiple assembler instructions. */
3460 static rtx_insn *
3461 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3463 rtx_insn *last_insn = 0;
3464 rtx_insn *seq;
3465 rtx inner;
3466 bool need_clobber;
3467 int i;
3469 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3471 /* If X is a push on the stack, do the push now and replace
3472 X with a reference to the stack pointer. */
3473 if (push_operand (x, mode))
3474 x = emit_move_resolve_push (mode, x);
3476 /* If we are in reload, see if either operand is a MEM whose address
3477 is scheduled for replacement. */
3478 if (reload_in_progress && MEM_P (x)
3479 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3480 x = replace_equiv_address_nv (x, inner);
3481 if (reload_in_progress && MEM_P (y)
3482 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3483 y = replace_equiv_address_nv (y, inner);
3485 start_sequence ();
3487 need_clobber = false;
3488 for (i = 0;
3489 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3490 i++)
3492 rtx xpart = operand_subword (x, i, 1, mode);
3493 rtx ypart;
3495 /* Do not generate code for a move if it would come entirely
3496 from the undefined bits of a paradoxical subreg. */
3497 if (undefined_operand_subword_p (y, i))
3498 continue;
3500 ypart = operand_subword (y, i, 1, mode);
3502 /* If we can't get a part of Y, put Y into memory if it is a
3503 constant. Otherwise, force it into a register. Then we must
3504 be able to get a part of Y. */
3505 if (ypart == 0 && CONSTANT_P (y))
3507 y = use_anchored_address (force_const_mem (mode, y));
3508 ypart = operand_subword (y, i, 1, mode);
3510 else if (ypart == 0)
3511 ypart = operand_subword_force (y, i, mode);
3513 gcc_assert (xpart && ypart);
3515 need_clobber |= (GET_CODE (xpart) == SUBREG);
3517 last_insn = emit_move_insn (xpart, ypart);
3520 seq = get_insns ();
3521 end_sequence ();
3523 /* Show the output dies here. This is necessary for SUBREGs
3524 of pseudos since we cannot track their lifetimes correctly;
3525 hard regs shouldn't appear here except as return values.
3526 We never want to emit such a clobber after reload. */
3527 if (x != y
3528 && ! (reload_in_progress || reload_completed)
3529 && need_clobber != 0)
3530 emit_clobber (x);
3532 emit_insn (seq);
3534 return last_insn;
3537 /* Low level part of emit_move_insn.
3538 Called just like emit_move_insn, but assumes X and Y
3539 are basically valid. */
3541 rtx_insn *
3542 emit_move_insn_1 (rtx x, rtx y)
3544 enum machine_mode mode = GET_MODE (x);
3545 enum insn_code code;
3547 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3549 code = optab_handler (mov_optab, mode);
3550 if (code != CODE_FOR_nothing)
3551 return emit_insn (GEN_FCN (code) (x, y));
3553 /* Expand complex moves by moving real part and imag part. */
3554 if (COMPLEX_MODE_P (mode))
3555 return emit_move_complex (mode, x, y);
3557 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3558 || ALL_FIXED_POINT_MODE_P (mode))
3560 rtx_insn *result = emit_move_via_integer (mode, x, y, true);
3562 /* If we can't find an integer mode, use multi words. */
3563 if (result)
3564 return result;
3565 else
3566 return emit_move_multi_word (mode, x, y);
3569 if (GET_MODE_CLASS (mode) == MODE_CC)
3570 return emit_move_ccmode (mode, x, y);
3572 /* Try using a move pattern for the corresponding integer mode. This is
3573 only safe when simplify_subreg can convert MODE constants into integer
3574 constants. At present, it can only do this reliably if the value
3575 fits within a HOST_WIDE_INT. */
3576 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3578 rtx_insn *ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3580 if (ret)
3582 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3583 return ret;
3587 return emit_move_multi_word (mode, x, y);
3590 /* Generate code to copy Y into X.
3591 Both Y and X must have the same mode, except that
3592 Y can be a constant with VOIDmode.
3593 This mode cannot be BLKmode; use emit_block_move for that.
3595 Return the last instruction emitted. */
3597 rtx_insn *
3598 emit_move_insn (rtx x, rtx y)
3600 enum machine_mode mode = GET_MODE (x);
3601 rtx y_cst = NULL_RTX;
3602 rtx_insn *last_insn;
3603 rtx set;
3605 gcc_assert (mode != BLKmode
3606 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3608 if (CONSTANT_P (y))
3610 if (optimize
3611 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3612 && (last_insn = compress_float_constant (x, y)))
3613 return last_insn;
3615 y_cst = y;
3617 if (!targetm.legitimate_constant_p (mode, y))
3619 y = force_const_mem (mode, y);
3621 /* If the target's cannot_force_const_mem prevented the spill,
3622 assume that the target's move expanders will also take care
3623 of the non-legitimate constant. */
3624 if (!y)
3625 y = y_cst;
3626 else
3627 y = use_anchored_address (y);
3631 /* If X or Y are memory references, verify that their addresses are valid
3632 for the machine. */
3633 if (MEM_P (x)
3634 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3635 MEM_ADDR_SPACE (x))
3636 && ! push_operand (x, GET_MODE (x))))
3637 x = validize_mem (x);
3639 if (MEM_P (y)
3640 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3641 MEM_ADDR_SPACE (y)))
3642 y = validize_mem (y);
3644 gcc_assert (mode != BLKmode);
3646 last_insn = emit_move_insn_1 (x, y);
3648 if (y_cst && REG_P (x)
3649 && (set = single_set (last_insn)) != NULL_RTX
3650 && SET_DEST (set) == x
3651 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3652 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3654 return last_insn;
3657 /* If Y is representable exactly in a narrower mode, and the target can
3658 perform the extension directly from constant or memory, then emit the
3659 move as an extension. */
3661 static rtx_insn *
3662 compress_float_constant (rtx x, rtx y)
3664 enum machine_mode dstmode = GET_MODE (x);
3665 enum machine_mode orig_srcmode = GET_MODE (y);
3666 enum machine_mode srcmode;
3667 REAL_VALUE_TYPE r;
3668 int oldcost, newcost;
3669 bool speed = optimize_insn_for_speed_p ();
3671 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3673 if (targetm.legitimate_constant_p (dstmode, y))
3674 oldcost = set_src_cost (y, speed);
3675 else
3676 oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3678 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3679 srcmode != orig_srcmode;
3680 srcmode = GET_MODE_WIDER_MODE (srcmode))
3682 enum insn_code ic;
3683 rtx trunc_y;
3684 rtx_insn *last_insn;
3686 /* Skip if the target can't extend this way. */
3687 ic = can_extend_p (dstmode, srcmode, 0);
3688 if (ic == CODE_FOR_nothing)
3689 continue;
3691 /* Skip if the narrowed value isn't exact. */
3692 if (! exact_real_truncate (srcmode, &r))
3693 continue;
3695 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3697 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3699 /* Skip if the target needs extra instructions to perform
3700 the extension. */
3701 if (!insn_operand_matches (ic, 1, trunc_y))
3702 continue;
3703 /* This is valid, but may not be cheaper than the original. */
3704 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3705 speed);
3706 if (oldcost < newcost)
3707 continue;
3709 else if (float_extend_from_mem[dstmode][srcmode])
3711 trunc_y = force_const_mem (srcmode, trunc_y);
3712 /* This is valid, but may not be cheaper than the original. */
3713 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3714 speed);
3715 if (oldcost < newcost)
3716 continue;
3717 trunc_y = validize_mem (trunc_y);
3719 else
3720 continue;
3722 /* For CSE's benefit, force the compressed constant pool entry
3723 into a new pseudo. This constant may be used in different modes,
3724 and if not, combine will put things back together for us. */
3725 trunc_y = force_reg (srcmode, trunc_y);
3727 /* If x is a hard register, perform the extension into a pseudo,
3728 so that e.g. stack realignment code is aware of it. */
3729 rtx target = x;
3730 if (REG_P (x) && HARD_REGISTER_P (x))
3731 target = gen_reg_rtx (dstmode);
3733 emit_unop_insn (ic, target, trunc_y, UNKNOWN);
3734 last_insn = get_last_insn ();
3736 if (REG_P (target))
3737 set_unique_reg_note (last_insn, REG_EQUAL, y);
3739 if (target != x)
3740 return emit_move_insn (x, target);
3741 return last_insn;
3744 return NULL;
3747 /* Pushing data onto the stack. */
3749 /* Push a block of length SIZE (perhaps variable)
3750 and return an rtx to address the beginning of the block.
3751 The value may be virtual_outgoing_args_rtx.
3753 EXTRA is the number of bytes of padding to push in addition to SIZE.
3754 BELOW nonzero means this padding comes at low addresses;
3755 otherwise, the padding comes at high addresses. */
3758 push_block (rtx size, int extra, int below)
3760 rtx temp;
3762 size = convert_modes (Pmode, ptr_mode, size, 1);
3763 if (CONSTANT_P (size))
3764 anti_adjust_stack (plus_constant (Pmode, size, extra));
3765 else if (REG_P (size) && extra == 0)
3766 anti_adjust_stack (size);
3767 else
3769 temp = copy_to_mode_reg (Pmode, size);
3770 if (extra != 0)
3771 temp = expand_binop (Pmode, add_optab, temp,
3772 gen_int_mode (extra, Pmode),
3773 temp, 0, OPTAB_LIB_WIDEN);
3774 anti_adjust_stack (temp);
3777 #ifndef STACK_GROWS_DOWNWARD
3778 if (0)
3779 #else
3780 if (1)
3781 #endif
3783 temp = virtual_outgoing_args_rtx;
3784 if (extra != 0 && below)
3785 temp = plus_constant (Pmode, temp, extra);
3787 else
3789 if (CONST_INT_P (size))
3790 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3791 -INTVAL (size) - (below ? 0 : extra));
3792 else if (extra != 0 && !below)
3793 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3794 negate_rtx (Pmode, plus_constant (Pmode, size,
3795 extra)));
3796 else
3797 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3798 negate_rtx (Pmode, size));
3801 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3804 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3806 static rtx
3807 mem_autoinc_base (rtx mem)
3809 if (MEM_P (mem))
3811 rtx addr = XEXP (mem, 0);
3812 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3813 return XEXP (addr, 0);
3815 return NULL;
3818 /* A utility routine used here, in reload, and in try_split. The insns
3819 after PREV up to and including LAST are known to adjust the stack,
3820 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3821 placing notes as appropriate. PREV may be NULL, indicating the
3822 entire insn sequence prior to LAST should be scanned.
3824 The set of allowed stack pointer modifications is small:
3825 (1) One or more auto-inc style memory references (aka pushes),
3826 (2) One or more addition/subtraction with the SP as destination,
3827 (3) A single move insn with the SP as destination,
3828 (4) A call_pop insn,
3829 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3831 Insns in the sequence that do not modify the SP are ignored,
3832 except for noreturn calls.
3834 The return value is the amount of adjustment that can be trivially
3835 verified, via immediate operand or auto-inc. If the adjustment
3836 cannot be trivially extracted, the return value is INT_MIN. */
3838 HOST_WIDE_INT
3839 find_args_size_adjust (rtx_insn *insn)
3841 rtx dest, set, pat;
3842 int i;
3844 pat = PATTERN (insn);
3845 set = NULL;
3847 /* Look for a call_pop pattern. */
3848 if (CALL_P (insn))
3850 /* We have to allow non-call_pop patterns for the case
3851 of emit_single_push_insn of a TLS address. */
3852 if (GET_CODE (pat) != PARALLEL)
3853 return 0;
3855 /* All call_pop have a stack pointer adjust in the parallel.
3856 The call itself is always first, and the stack adjust is
3857 usually last, so search from the end. */
3858 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3860 set = XVECEXP (pat, 0, i);
3861 if (GET_CODE (set) != SET)
3862 continue;
3863 dest = SET_DEST (set);
3864 if (dest == stack_pointer_rtx)
3865 break;
3867 /* We'd better have found the stack pointer adjust. */
3868 if (i == 0)
3869 return 0;
3870 /* Fall through to process the extracted SET and DEST
3871 as if it was a standalone insn. */
3873 else if (GET_CODE (pat) == SET)
3874 set = pat;
3875 else if ((set = single_set (insn)) != NULL)
3877 else if (GET_CODE (pat) == PARALLEL)
3879 /* ??? Some older ports use a parallel with a stack adjust
3880 and a store for a PUSH_ROUNDING pattern, rather than a
3881 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3882 /* ??? See h8300 and m68k, pushqi1. */
3883 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3885 set = XVECEXP (pat, 0, i);
3886 if (GET_CODE (set) != SET)
3887 continue;
3888 dest = SET_DEST (set);
3889 if (dest == stack_pointer_rtx)
3890 break;
3892 /* We do not expect an auto-inc of the sp in the parallel. */
3893 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3894 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3895 != stack_pointer_rtx);
3897 if (i < 0)
3898 return 0;
3900 else
3901 return 0;
3903 dest = SET_DEST (set);
3905 /* Look for direct modifications of the stack pointer. */
3906 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3908 /* Look for a trivial adjustment, otherwise assume nothing. */
3909 /* Note that the SPU restore_stack_block pattern refers to
3910 the stack pointer in V4SImode. Consider that non-trivial. */
3911 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3912 && GET_CODE (SET_SRC (set)) == PLUS
3913 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3914 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3915 return INTVAL (XEXP (SET_SRC (set), 1));
3916 /* ??? Reload can generate no-op moves, which will be cleaned
3917 up later. Recognize it and continue searching. */
3918 else if (rtx_equal_p (dest, SET_SRC (set)))
3919 return 0;
3920 else
3921 return HOST_WIDE_INT_MIN;
3923 else
3925 rtx mem, addr;
3927 /* Otherwise only think about autoinc patterns. */
3928 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3930 mem = dest;
3931 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3932 != stack_pointer_rtx);
3934 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3935 mem = SET_SRC (set);
3936 else
3937 return 0;
3939 addr = XEXP (mem, 0);
3940 switch (GET_CODE (addr))
3942 case PRE_INC:
3943 case POST_INC:
3944 return GET_MODE_SIZE (GET_MODE (mem));
3945 case PRE_DEC:
3946 case POST_DEC:
3947 return -GET_MODE_SIZE (GET_MODE (mem));
3948 case PRE_MODIFY:
3949 case POST_MODIFY:
3950 addr = XEXP (addr, 1);
3951 gcc_assert (GET_CODE (addr) == PLUS);
3952 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3953 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3954 return INTVAL (XEXP (addr, 1));
3955 default:
3956 gcc_unreachable ();
3962 fixup_args_size_notes (rtx_insn *prev, rtx_insn *last, int end_args_size)
3964 int args_size = end_args_size;
3965 bool saw_unknown = false;
3966 rtx_insn *insn;
3968 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3970 HOST_WIDE_INT this_delta;
3972 if (!NONDEBUG_INSN_P (insn))
3973 continue;
3975 this_delta = find_args_size_adjust (insn);
3976 if (this_delta == 0)
3978 if (!CALL_P (insn)
3979 || ACCUMULATE_OUTGOING_ARGS
3980 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3981 continue;
3984 gcc_assert (!saw_unknown);
3985 if (this_delta == HOST_WIDE_INT_MIN)
3986 saw_unknown = true;
3988 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3989 #ifdef STACK_GROWS_DOWNWARD
3990 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
3991 #endif
3992 args_size -= this_delta;
3995 return saw_unknown ? INT_MIN : args_size;
3998 #ifdef PUSH_ROUNDING
3999 /* Emit single push insn. */
4001 static void
4002 emit_single_push_insn_1 (enum machine_mode mode, rtx x, tree type)
4004 rtx dest_addr;
4005 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
4006 rtx dest;
4007 enum insn_code icode;
4009 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
4010 /* If there is push pattern, use it. Otherwise try old way of throwing
4011 MEM representing push operation to move expander. */
4012 icode = optab_handler (push_optab, mode);
4013 if (icode != CODE_FOR_nothing)
4015 struct expand_operand ops[1];
4017 create_input_operand (&ops[0], x, mode);
4018 if (maybe_expand_insn (icode, 1, ops))
4019 return;
4021 if (GET_MODE_SIZE (mode) == rounded_size)
4022 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
4023 /* If we are to pad downward, adjust the stack pointer first and
4024 then store X into the stack location using an offset. This is
4025 because emit_move_insn does not know how to pad; it does not have
4026 access to type. */
4027 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
4029 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
4030 HOST_WIDE_INT offset;
4032 emit_move_insn (stack_pointer_rtx,
4033 expand_binop (Pmode,
4034 #ifdef STACK_GROWS_DOWNWARD
4035 sub_optab,
4036 #else
4037 add_optab,
4038 #endif
4039 stack_pointer_rtx,
4040 gen_int_mode (rounded_size, Pmode),
4041 NULL_RTX, 0, OPTAB_LIB_WIDEN));
4043 offset = (HOST_WIDE_INT) padding_size;
4044 #ifdef STACK_GROWS_DOWNWARD
4045 if (STACK_PUSH_CODE == POST_DEC)
4046 /* We have already decremented the stack pointer, so get the
4047 previous value. */
4048 offset += (HOST_WIDE_INT) rounded_size;
4049 #else
4050 if (STACK_PUSH_CODE == POST_INC)
4051 /* We have already incremented the stack pointer, so get the
4052 previous value. */
4053 offset -= (HOST_WIDE_INT) rounded_size;
4054 #endif
4055 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4056 gen_int_mode (offset, Pmode));
4058 else
4060 #ifdef STACK_GROWS_DOWNWARD
4061 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4062 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4063 gen_int_mode (-(HOST_WIDE_INT) rounded_size,
4064 Pmode));
4065 #else
4066 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4067 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4068 gen_int_mode (rounded_size, Pmode));
4069 #endif
4070 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4073 dest = gen_rtx_MEM (mode, dest_addr);
4075 if (type != 0)
4077 set_mem_attributes (dest, type, 1);
4079 if (cfun->tail_call_marked)
4080 /* Function incoming arguments may overlap with sibling call
4081 outgoing arguments and we cannot allow reordering of reads
4082 from function arguments with stores to outgoing arguments
4083 of sibling calls. */
4084 set_mem_alias_set (dest, 0);
4086 emit_move_insn (dest, x);
4089 /* Emit and annotate a single push insn. */
4091 static void
4092 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
4094 int delta, old_delta = stack_pointer_delta;
4095 rtx_insn *prev = get_last_insn ();
4096 rtx_insn *last;
4098 emit_single_push_insn_1 (mode, x, type);
4100 last = get_last_insn ();
4102 /* Notice the common case where we emitted exactly one insn. */
4103 if (PREV_INSN (last) == prev)
4105 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4106 return;
4109 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4110 gcc_assert (delta == INT_MIN || delta == old_delta);
4112 #endif
4114 /* Generate code to push X onto the stack, assuming it has mode MODE and
4115 type TYPE.
4116 MODE is redundant except when X is a CONST_INT (since they don't
4117 carry mode info).
4118 SIZE is an rtx for the size of data to be copied (in bytes),
4119 needed only if X is BLKmode.
4121 ALIGN (in bits) is maximum alignment we can assume.
4123 If PARTIAL and REG are both nonzero, then copy that many of the first
4124 bytes of X into registers starting with REG, and push the rest of X.
4125 The amount of space pushed is decreased by PARTIAL bytes.
4126 REG must be a hard register in this case.
4127 If REG is zero but PARTIAL is not, take any all others actions for an
4128 argument partially in registers, but do not actually load any
4129 registers.
4131 EXTRA is the amount in bytes of extra space to leave next to this arg.
4132 This is ignored if an argument block has already been allocated.
4134 On a machine that lacks real push insns, ARGS_ADDR is the address of
4135 the bottom of the argument block for this call. We use indexing off there
4136 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4137 argument block has not been preallocated.
4139 ARGS_SO_FAR is the size of args previously pushed for this call.
4141 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4142 for arguments passed in registers. If nonzero, it will be the number
4143 of bytes required. */
4145 void
4146 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
4147 unsigned int align, int partial, rtx reg, int extra,
4148 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4149 rtx alignment_pad)
4151 rtx xinner;
4152 enum direction stack_direction
4153 #ifdef STACK_GROWS_DOWNWARD
4154 = downward;
4155 #else
4156 = upward;
4157 #endif
4159 /* Decide where to pad the argument: `downward' for below,
4160 `upward' for above, or `none' for don't pad it.
4161 Default is below for small data on big-endian machines; else above. */
4162 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4164 /* Invert direction if stack is post-decrement.
4165 FIXME: why? */
4166 if (STACK_PUSH_CODE == POST_DEC)
4167 if (where_pad != none)
4168 where_pad = (where_pad == downward ? upward : downward);
4170 xinner = x;
4172 if (mode == BLKmode
4173 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4175 /* Copy a block into the stack, entirely or partially. */
4177 rtx temp;
4178 int used;
4179 int offset;
4180 int skip;
4182 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4183 used = partial - offset;
4185 if (mode != BLKmode)
4187 /* A value is to be stored in an insufficiently aligned
4188 stack slot; copy via a suitably aligned slot if
4189 necessary. */
4190 size = GEN_INT (GET_MODE_SIZE (mode));
4191 if (!MEM_P (xinner))
4193 temp = assign_temp (type, 1, 1);
4194 emit_move_insn (temp, xinner);
4195 xinner = temp;
4199 gcc_assert (size);
4201 /* USED is now the # of bytes we need not copy to the stack
4202 because registers will take care of them. */
4204 if (partial != 0)
4205 xinner = adjust_address (xinner, BLKmode, used);
4207 /* If the partial register-part of the arg counts in its stack size,
4208 skip the part of stack space corresponding to the registers.
4209 Otherwise, start copying to the beginning of the stack space,
4210 by setting SKIP to 0. */
4211 skip = (reg_parm_stack_space == 0) ? 0 : used;
4213 #ifdef PUSH_ROUNDING
4214 /* Do it with several push insns if that doesn't take lots of insns
4215 and if there is no difficulty with push insns that skip bytes
4216 on the stack for alignment purposes. */
4217 if (args_addr == 0
4218 && PUSH_ARGS
4219 && CONST_INT_P (size)
4220 && skip == 0
4221 && MEM_ALIGN (xinner) >= align
4222 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
4223 /* Here we avoid the case of a structure whose weak alignment
4224 forces many pushes of a small amount of data,
4225 and such small pushes do rounding that causes trouble. */
4226 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4227 || align >= BIGGEST_ALIGNMENT
4228 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4229 == (align / BITS_PER_UNIT)))
4230 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4232 /* Push padding now if padding above and stack grows down,
4233 or if padding below and stack grows up.
4234 But if space already allocated, this has already been done. */
4235 if (extra && args_addr == 0
4236 && where_pad != none && where_pad != stack_direction)
4237 anti_adjust_stack (GEN_INT (extra));
4239 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4241 else
4242 #endif /* PUSH_ROUNDING */
4244 rtx target;
4246 /* Otherwise make space on the stack and copy the data
4247 to the address of that space. */
4249 /* Deduct words put into registers from the size we must copy. */
4250 if (partial != 0)
4252 if (CONST_INT_P (size))
4253 size = GEN_INT (INTVAL (size) - used);
4254 else
4255 size = expand_binop (GET_MODE (size), sub_optab, size,
4256 gen_int_mode (used, GET_MODE (size)),
4257 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4260 /* Get the address of the stack space.
4261 In this case, we do not deal with EXTRA separately.
4262 A single stack adjust will do. */
4263 if (! args_addr)
4265 temp = push_block (size, extra, where_pad == downward);
4266 extra = 0;
4268 else if (CONST_INT_P (args_so_far))
4269 temp = memory_address (BLKmode,
4270 plus_constant (Pmode, args_addr,
4271 skip + INTVAL (args_so_far)));
4272 else
4273 temp = memory_address (BLKmode,
4274 plus_constant (Pmode,
4275 gen_rtx_PLUS (Pmode,
4276 args_addr,
4277 args_so_far),
4278 skip));
4280 if (!ACCUMULATE_OUTGOING_ARGS)
4282 /* If the source is referenced relative to the stack pointer,
4283 copy it to another register to stabilize it. We do not need
4284 to do this if we know that we won't be changing sp. */
4286 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4287 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4288 temp = copy_to_reg (temp);
4291 target = gen_rtx_MEM (BLKmode, temp);
4293 /* We do *not* set_mem_attributes here, because incoming arguments
4294 may overlap with sibling call outgoing arguments and we cannot
4295 allow reordering of reads from function arguments with stores
4296 to outgoing arguments of sibling calls. We do, however, want
4297 to record the alignment of the stack slot. */
4298 /* ALIGN may well be better aligned than TYPE, e.g. due to
4299 PARM_BOUNDARY. Assume the caller isn't lying. */
4300 set_mem_align (target, align);
4302 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4305 else if (partial > 0)
4307 /* Scalar partly in registers. */
4309 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4310 int i;
4311 int not_stack;
4312 /* # bytes of start of argument
4313 that we must make space for but need not store. */
4314 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4315 int args_offset = INTVAL (args_so_far);
4316 int skip;
4318 /* Push padding now if padding above and stack grows down,
4319 or if padding below and stack grows up.
4320 But if space already allocated, this has already been done. */
4321 if (extra && args_addr == 0
4322 && where_pad != none && where_pad != stack_direction)
4323 anti_adjust_stack (GEN_INT (extra));
4325 /* If we make space by pushing it, we might as well push
4326 the real data. Otherwise, we can leave OFFSET nonzero
4327 and leave the space uninitialized. */
4328 if (args_addr == 0)
4329 offset = 0;
4331 /* Now NOT_STACK gets the number of words that we don't need to
4332 allocate on the stack. Convert OFFSET to words too. */
4333 not_stack = (partial - offset) / UNITS_PER_WORD;
4334 offset /= UNITS_PER_WORD;
4336 /* If the partial register-part of the arg counts in its stack size,
4337 skip the part of stack space corresponding to the registers.
4338 Otherwise, start copying to the beginning of the stack space,
4339 by setting SKIP to 0. */
4340 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4342 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4343 x = validize_mem (force_const_mem (mode, x));
4345 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4346 SUBREGs of such registers are not allowed. */
4347 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4348 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4349 x = copy_to_reg (x);
4351 /* Loop over all the words allocated on the stack for this arg. */
4352 /* We can do it by words, because any scalar bigger than a word
4353 has a size a multiple of a word. */
4354 for (i = size - 1; i >= not_stack; i--)
4355 if (i >= not_stack + offset)
4356 emit_push_insn (operand_subword_force (x, i, mode),
4357 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4358 0, args_addr,
4359 GEN_INT (args_offset + ((i - not_stack + skip)
4360 * UNITS_PER_WORD)),
4361 reg_parm_stack_space, alignment_pad);
4363 else
4365 rtx addr;
4366 rtx dest;
4368 /* Push padding now if padding above and stack grows down,
4369 or if padding below and stack grows up.
4370 But if space already allocated, this has already been done. */
4371 if (extra && args_addr == 0
4372 && where_pad != none && where_pad != stack_direction)
4373 anti_adjust_stack (GEN_INT (extra));
4375 #ifdef PUSH_ROUNDING
4376 if (args_addr == 0 && PUSH_ARGS)
4377 emit_single_push_insn (mode, x, type);
4378 else
4379 #endif
4381 if (CONST_INT_P (args_so_far))
4382 addr
4383 = memory_address (mode,
4384 plus_constant (Pmode, args_addr,
4385 INTVAL (args_so_far)));
4386 else
4387 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4388 args_so_far));
4389 dest = gen_rtx_MEM (mode, addr);
4391 /* We do *not* set_mem_attributes here, because incoming arguments
4392 may overlap with sibling call outgoing arguments and we cannot
4393 allow reordering of reads from function arguments with stores
4394 to outgoing arguments of sibling calls. We do, however, want
4395 to record the alignment of the stack slot. */
4396 /* ALIGN may well be better aligned than TYPE, e.g. due to
4397 PARM_BOUNDARY. Assume the caller isn't lying. */
4398 set_mem_align (dest, align);
4400 emit_move_insn (dest, x);
4404 /* If part should go in registers, copy that part
4405 into the appropriate registers. Do this now, at the end,
4406 since mem-to-mem copies above may do function calls. */
4407 if (partial > 0 && reg != 0)
4409 /* Handle calls that pass values in multiple non-contiguous locations.
4410 The Irix 6 ABI has examples of this. */
4411 if (GET_CODE (reg) == PARALLEL)
4412 emit_group_load (reg, x, type, -1);
4413 else
4415 gcc_assert (partial % UNITS_PER_WORD == 0);
4416 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4420 if (extra && args_addr == 0 && where_pad == stack_direction)
4421 anti_adjust_stack (GEN_INT (extra));
4423 if (alignment_pad && args_addr == 0)
4424 anti_adjust_stack (alignment_pad);
4427 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4428 operations. */
4430 static rtx
4431 get_subtarget (rtx x)
4433 return (optimize
4434 || x == 0
4435 /* Only registers can be subtargets. */
4436 || !REG_P (x)
4437 /* Don't use hard regs to avoid extending their life. */
4438 || REGNO (x) < FIRST_PSEUDO_REGISTER
4439 ? 0 : x);
4442 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4443 FIELD is a bitfield. Returns true if the optimization was successful,
4444 and there's nothing else to do. */
4446 static bool
4447 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4448 unsigned HOST_WIDE_INT bitpos,
4449 unsigned HOST_WIDE_INT bitregion_start,
4450 unsigned HOST_WIDE_INT bitregion_end,
4451 enum machine_mode mode1, rtx str_rtx,
4452 tree to, tree src)
4454 enum machine_mode str_mode = GET_MODE (str_rtx);
4455 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4456 tree op0, op1;
4457 rtx value, result;
4458 optab binop;
4459 gimple srcstmt;
4460 enum tree_code code;
4462 if (mode1 != VOIDmode
4463 || bitsize >= BITS_PER_WORD
4464 || str_bitsize > BITS_PER_WORD
4465 || TREE_SIDE_EFFECTS (to)
4466 || TREE_THIS_VOLATILE (to))
4467 return false;
4469 STRIP_NOPS (src);
4470 if (TREE_CODE (src) != SSA_NAME)
4471 return false;
4472 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4473 return false;
4475 srcstmt = get_gimple_for_ssa_name (src);
4476 if (!srcstmt
4477 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4478 return false;
4480 code = gimple_assign_rhs_code (srcstmt);
4482 op0 = gimple_assign_rhs1 (srcstmt);
4484 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4485 to find its initialization. Hopefully the initialization will
4486 be from a bitfield load. */
4487 if (TREE_CODE (op0) == SSA_NAME)
4489 gimple op0stmt = get_gimple_for_ssa_name (op0);
4491 /* We want to eventually have OP0 be the same as TO, which
4492 should be a bitfield. */
4493 if (!op0stmt
4494 || !is_gimple_assign (op0stmt)
4495 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4496 return false;
4497 op0 = gimple_assign_rhs1 (op0stmt);
4500 op1 = gimple_assign_rhs2 (srcstmt);
4502 if (!operand_equal_p (to, op0, 0))
4503 return false;
4505 if (MEM_P (str_rtx))
4507 unsigned HOST_WIDE_INT offset1;
4509 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4510 str_mode = word_mode;
4511 str_mode = get_best_mode (bitsize, bitpos,
4512 bitregion_start, bitregion_end,
4513 MEM_ALIGN (str_rtx), str_mode, 0);
4514 if (str_mode == VOIDmode)
4515 return false;
4516 str_bitsize = GET_MODE_BITSIZE (str_mode);
4518 offset1 = bitpos;
4519 bitpos %= str_bitsize;
4520 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4521 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4523 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4524 return false;
4526 /* If the bit field covers the whole REG/MEM, store_field
4527 will likely generate better code. */
4528 if (bitsize >= str_bitsize)
4529 return false;
4531 /* We can't handle fields split across multiple entities. */
4532 if (bitpos + bitsize > str_bitsize)
4533 return false;
4535 if (BYTES_BIG_ENDIAN)
4536 bitpos = str_bitsize - bitpos - bitsize;
4538 switch (code)
4540 case PLUS_EXPR:
4541 case MINUS_EXPR:
4542 /* For now, just optimize the case of the topmost bitfield
4543 where we don't need to do any masking and also
4544 1 bit bitfields where xor can be used.
4545 We might win by one instruction for the other bitfields
4546 too if insv/extv instructions aren't used, so that
4547 can be added later. */
4548 if (bitpos + bitsize != str_bitsize
4549 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4550 break;
4552 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4553 value = convert_modes (str_mode,
4554 TYPE_MODE (TREE_TYPE (op1)), value,
4555 TYPE_UNSIGNED (TREE_TYPE (op1)));
4557 /* We may be accessing data outside the field, which means
4558 we can alias adjacent data. */
4559 if (MEM_P (str_rtx))
4561 str_rtx = shallow_copy_rtx (str_rtx);
4562 set_mem_alias_set (str_rtx, 0);
4563 set_mem_expr (str_rtx, 0);
4566 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4567 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4569 value = expand_and (str_mode, value, const1_rtx, NULL);
4570 binop = xor_optab;
4572 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4573 result = expand_binop (str_mode, binop, str_rtx,
4574 value, str_rtx, 1, OPTAB_WIDEN);
4575 if (result != str_rtx)
4576 emit_move_insn (str_rtx, result);
4577 return true;
4579 case BIT_IOR_EXPR:
4580 case BIT_XOR_EXPR:
4581 if (TREE_CODE (op1) != INTEGER_CST)
4582 break;
4583 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4584 value = convert_modes (str_mode,
4585 TYPE_MODE (TREE_TYPE (op1)), value,
4586 TYPE_UNSIGNED (TREE_TYPE (op1)));
4588 /* We may be accessing data outside the field, which means
4589 we can alias adjacent data. */
4590 if (MEM_P (str_rtx))
4592 str_rtx = shallow_copy_rtx (str_rtx);
4593 set_mem_alias_set (str_rtx, 0);
4594 set_mem_expr (str_rtx, 0);
4597 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4598 if (bitpos + bitsize != str_bitsize)
4600 rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4601 str_mode);
4602 value = expand_and (str_mode, value, mask, NULL_RTX);
4604 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4605 result = expand_binop (str_mode, binop, str_rtx,
4606 value, str_rtx, 1, OPTAB_WIDEN);
4607 if (result != str_rtx)
4608 emit_move_insn (str_rtx, result);
4609 return true;
4611 default:
4612 break;
4615 return false;
4618 /* In the C++ memory model, consecutive bit fields in a structure are
4619 considered one memory location.
4621 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4622 returns the bit range of consecutive bits in which this COMPONENT_REF
4623 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4624 and *OFFSET may be adjusted in the process.
4626 If the access does not need to be restricted, 0 is returned in both
4627 *BITSTART and *BITEND. */
4629 static void
4630 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4631 unsigned HOST_WIDE_INT *bitend,
4632 tree exp,
4633 HOST_WIDE_INT *bitpos,
4634 tree *offset)
4636 HOST_WIDE_INT bitoffset;
4637 tree field, repr;
4639 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4641 field = TREE_OPERAND (exp, 1);
4642 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4643 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4644 need to limit the range we can access. */
4645 if (!repr)
4647 *bitstart = *bitend = 0;
4648 return;
4651 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4652 part of a larger bit field, then the representative does not serve any
4653 useful purpose. This can occur in Ada. */
4654 if (handled_component_p (TREE_OPERAND (exp, 0)))
4656 enum machine_mode rmode;
4657 HOST_WIDE_INT rbitsize, rbitpos;
4658 tree roffset;
4659 int unsignedp;
4660 int volatilep = 0;
4661 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4662 &roffset, &rmode, &unsignedp, &volatilep, false);
4663 if ((rbitpos % BITS_PER_UNIT) != 0)
4665 *bitstart = *bitend = 0;
4666 return;
4670 /* Compute the adjustment to bitpos from the offset of the field
4671 relative to the representative. DECL_FIELD_OFFSET of field and
4672 repr are the same by construction if they are not constants,
4673 see finish_bitfield_layout. */
4674 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
4675 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
4676 bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
4677 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
4678 else
4679 bitoffset = 0;
4680 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4681 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4683 /* If the adjustment is larger than bitpos, we would have a negative bit
4684 position for the lower bound and this may wreak havoc later. Adjust
4685 offset and bitpos to make the lower bound non-negative in that case. */
4686 if (bitoffset > *bitpos)
4688 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4689 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4691 *bitpos += adjust;
4692 if (*offset == NULL_TREE)
4693 *offset = size_int (-adjust / BITS_PER_UNIT);
4694 else
4695 *offset
4696 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4697 *bitstart = 0;
4699 else
4700 *bitstart = *bitpos - bitoffset;
4702 *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4705 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4706 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4707 DECL_RTL was not set yet, return NORTL. */
4709 static inline bool
4710 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4712 if (TREE_CODE (addr) != ADDR_EXPR)
4713 return false;
4715 tree base = TREE_OPERAND (addr, 0);
4717 if (!DECL_P (base)
4718 || TREE_ADDRESSABLE (base)
4719 || DECL_MODE (base) == BLKmode)
4720 return false;
4722 if (!DECL_RTL_SET_P (base))
4723 return nortl;
4725 return (!MEM_P (DECL_RTL (base)));
4728 /* Returns true if the MEM_REF REF refers to an object that does not
4729 reside in memory and has non-BLKmode. */
4731 static inline bool
4732 mem_ref_refers_to_non_mem_p (tree ref)
4734 tree base = TREE_OPERAND (ref, 0);
4735 return addr_expr_of_non_mem_decl_p_1 (base, false);
4738 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4739 is true, try generating a nontemporal store. */
4741 void
4742 expand_assignment (tree to, tree from, bool nontemporal)
4744 rtx to_rtx = 0;
4745 rtx result;
4746 enum machine_mode mode;
4747 unsigned int align;
4748 enum insn_code icode;
4750 /* Don't crash if the lhs of the assignment was erroneous. */
4751 if (TREE_CODE (to) == ERROR_MARK)
4753 expand_normal (from);
4754 return;
4757 /* Optimize away no-op moves without side-effects. */
4758 if (operand_equal_p (to, from, 0))
4759 return;
4761 /* Handle misaligned stores. */
4762 mode = TYPE_MODE (TREE_TYPE (to));
4763 if ((TREE_CODE (to) == MEM_REF
4764 || TREE_CODE (to) == TARGET_MEM_REF)
4765 && mode != BLKmode
4766 && !mem_ref_refers_to_non_mem_p (to)
4767 && ((align = get_object_alignment (to))
4768 < GET_MODE_ALIGNMENT (mode))
4769 && (((icode = optab_handler (movmisalign_optab, mode))
4770 != CODE_FOR_nothing)
4771 || SLOW_UNALIGNED_ACCESS (mode, align)))
4773 rtx reg, mem;
4775 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4776 reg = force_not_mem (reg);
4777 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4779 if (icode != CODE_FOR_nothing)
4781 struct expand_operand ops[2];
4783 create_fixed_operand (&ops[0], mem);
4784 create_input_operand (&ops[1], reg, mode);
4785 /* The movmisalign<mode> pattern cannot fail, else the assignment
4786 would silently be omitted. */
4787 expand_insn (icode, 2, ops);
4789 else
4790 store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg);
4791 return;
4794 /* Assignment of a structure component needs special treatment
4795 if the structure component's rtx is not simply a MEM.
4796 Assignment of an array element at a constant index, and assignment of
4797 an array element in an unaligned packed structure field, has the same
4798 problem. Same for (partially) storing into a non-memory object. */
4799 if (handled_component_p (to)
4800 || (TREE_CODE (to) == MEM_REF
4801 && mem_ref_refers_to_non_mem_p (to))
4802 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4804 enum machine_mode mode1;
4805 HOST_WIDE_INT bitsize, bitpos;
4806 unsigned HOST_WIDE_INT bitregion_start = 0;
4807 unsigned HOST_WIDE_INT bitregion_end = 0;
4808 tree offset;
4809 int unsignedp;
4810 int volatilep = 0;
4811 tree tem;
4813 push_temp_slots ();
4814 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4815 &unsignedp, &volatilep, true);
4817 /* Make sure bitpos is not negative, it can wreak havoc later. */
4818 if (bitpos < 0)
4820 gcc_assert (offset == NULL_TREE);
4821 offset = size_int (bitpos >> (BITS_PER_UNIT == 8
4822 ? 3 : exact_log2 (BITS_PER_UNIT)));
4823 bitpos &= BITS_PER_UNIT - 1;
4826 if (TREE_CODE (to) == COMPONENT_REF
4827 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4828 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4829 /* The C++ memory model naturally applies to byte-aligned fields.
4830 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4831 BITSIZE are not byte-aligned, there is no need to limit the range
4832 we can access. This can occur with packed structures in Ada. */
4833 else if (bitsize > 0
4834 && bitsize % BITS_PER_UNIT == 0
4835 && bitpos % BITS_PER_UNIT == 0)
4837 bitregion_start = bitpos;
4838 bitregion_end = bitpos + bitsize - 1;
4841 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4843 /* If the field has a mode, we want to access it in the
4844 field's mode, not the computed mode.
4845 If a MEM has VOIDmode (external with incomplete type),
4846 use BLKmode for it instead. */
4847 if (MEM_P (to_rtx))
4849 if (mode1 != VOIDmode)
4850 to_rtx = adjust_address (to_rtx, mode1, 0);
4851 else if (GET_MODE (to_rtx) == VOIDmode)
4852 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4855 if (offset != 0)
4857 enum machine_mode address_mode;
4858 rtx offset_rtx;
4860 if (!MEM_P (to_rtx))
4862 /* We can get constant negative offsets into arrays with broken
4863 user code. Translate this to a trap instead of ICEing. */
4864 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4865 expand_builtin_trap ();
4866 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4869 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4870 address_mode = get_address_mode (to_rtx);
4871 if (GET_MODE (offset_rtx) != address_mode)
4872 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4874 /* If we have an expression in OFFSET_RTX and a non-zero
4875 byte offset in BITPOS, adding the byte offset before the
4876 OFFSET_RTX results in better intermediate code, which makes
4877 later rtl optimization passes perform better.
4879 We prefer intermediate code like this:
4881 r124:DI=r123:DI+0x18
4882 [r124:DI]=r121:DI
4884 ... instead of ...
4886 r124:DI=r123:DI+0x10
4887 [r124:DI+0x8]=r121:DI
4889 This is only done for aligned data values, as these can
4890 be expected to result in single move instructions. */
4891 if (mode1 != VOIDmode
4892 && bitpos != 0
4893 && bitsize > 0
4894 && (bitpos % bitsize) == 0
4895 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4896 && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
4898 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4899 bitregion_start = 0;
4900 if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos)
4901 bitregion_end -= bitpos;
4902 bitpos = 0;
4905 to_rtx = offset_address (to_rtx, offset_rtx,
4906 highest_pow2_factor_for_target (to,
4907 offset));
4910 /* No action is needed if the target is not a memory and the field
4911 lies completely outside that target. This can occur if the source
4912 code contains an out-of-bounds access to a small array. */
4913 if (!MEM_P (to_rtx)
4914 && GET_MODE (to_rtx) != BLKmode
4915 && (unsigned HOST_WIDE_INT) bitpos
4916 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4918 expand_normal (from);
4919 result = NULL;
4921 /* Handle expand_expr of a complex value returning a CONCAT. */
4922 else if (GET_CODE (to_rtx) == CONCAT)
4924 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4925 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4926 && bitpos == 0
4927 && bitsize == mode_bitsize)
4928 result = store_expr (from, to_rtx, false, nontemporal);
4929 else if (bitsize == mode_bitsize / 2
4930 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4931 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4932 nontemporal);
4933 else if (bitpos + bitsize <= mode_bitsize / 2)
4934 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4935 bitregion_start, bitregion_end,
4936 mode1, from,
4937 get_alias_set (to), nontemporal);
4938 else if (bitpos >= mode_bitsize / 2)
4939 result = store_field (XEXP (to_rtx, 1), bitsize,
4940 bitpos - mode_bitsize / 2,
4941 bitregion_start, bitregion_end,
4942 mode1, from,
4943 get_alias_set (to), nontemporal);
4944 else if (bitpos == 0 && bitsize == mode_bitsize)
4946 rtx from_rtx;
4947 result = expand_normal (from);
4948 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4949 TYPE_MODE (TREE_TYPE (from)), 0);
4950 emit_move_insn (XEXP (to_rtx, 0),
4951 read_complex_part (from_rtx, false));
4952 emit_move_insn (XEXP (to_rtx, 1),
4953 read_complex_part (from_rtx, true));
4955 else
4957 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4958 GET_MODE_SIZE (GET_MODE (to_rtx)));
4959 write_complex_part (temp, XEXP (to_rtx, 0), false);
4960 write_complex_part (temp, XEXP (to_rtx, 1), true);
4961 result = store_field (temp, bitsize, bitpos,
4962 bitregion_start, bitregion_end,
4963 mode1, from,
4964 get_alias_set (to), nontemporal);
4965 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4966 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4969 else
4971 if (MEM_P (to_rtx))
4973 /* If the field is at offset zero, we could have been given the
4974 DECL_RTX of the parent struct. Don't munge it. */
4975 to_rtx = shallow_copy_rtx (to_rtx);
4976 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4977 if (volatilep)
4978 MEM_VOLATILE_P (to_rtx) = 1;
4981 if (optimize_bitfield_assignment_op (bitsize, bitpos,
4982 bitregion_start, bitregion_end,
4983 mode1,
4984 to_rtx, to, from))
4985 result = NULL;
4986 else
4987 result = store_field (to_rtx, bitsize, bitpos,
4988 bitregion_start, bitregion_end,
4989 mode1, from,
4990 get_alias_set (to), nontemporal);
4993 if (result)
4994 preserve_temp_slots (result);
4995 pop_temp_slots ();
4996 return;
4999 /* If the rhs is a function call and its value is not an aggregate,
5000 call the function before we start to compute the lhs.
5001 This is needed for correct code for cases such as
5002 val = setjmp (buf) on machines where reference to val
5003 requires loading up part of an address in a separate insn.
5005 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5006 since it might be a promoted variable where the zero- or sign- extension
5007 needs to be done. Handling this in the normal way is safe because no
5008 computation is done before the call. The same is true for SSA names. */
5009 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
5010 && COMPLETE_TYPE_P (TREE_TYPE (from))
5011 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
5012 && ! (((TREE_CODE (to) == VAR_DECL
5013 || TREE_CODE (to) == PARM_DECL
5014 || TREE_CODE (to) == RESULT_DECL)
5015 && REG_P (DECL_RTL (to)))
5016 || TREE_CODE (to) == SSA_NAME))
5018 rtx value;
5020 push_temp_slots ();
5021 value = expand_normal (from);
5022 if (to_rtx == 0)
5023 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5025 /* Handle calls that return values in multiple non-contiguous locations.
5026 The Irix 6 ABI has examples of this. */
5027 if (GET_CODE (to_rtx) == PARALLEL)
5029 if (GET_CODE (value) == PARALLEL)
5030 emit_group_move (to_rtx, value);
5031 else
5032 emit_group_load (to_rtx, value, TREE_TYPE (from),
5033 int_size_in_bytes (TREE_TYPE (from)));
5035 else if (GET_CODE (value) == PARALLEL)
5036 emit_group_store (to_rtx, value, TREE_TYPE (from),
5037 int_size_in_bytes (TREE_TYPE (from)));
5038 else if (GET_MODE (to_rtx) == BLKmode)
5040 /* Handle calls that return BLKmode values in registers. */
5041 if (REG_P (value))
5042 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5043 else
5044 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5046 else
5048 if (POINTER_TYPE_P (TREE_TYPE (to)))
5049 value = convert_memory_address_addr_space
5050 (GET_MODE (to_rtx), value,
5051 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5053 emit_move_insn (to_rtx, value);
5055 preserve_temp_slots (to_rtx);
5056 pop_temp_slots ();
5057 return;
5060 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5061 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5063 /* Don't move directly into a return register. */
5064 if (TREE_CODE (to) == RESULT_DECL
5065 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5067 rtx temp;
5069 push_temp_slots ();
5071 /* If the source is itself a return value, it still is in a pseudo at
5072 this point so we can move it back to the return register directly. */
5073 if (REG_P (to_rtx)
5074 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5075 && TREE_CODE (from) != CALL_EXPR)
5076 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5077 else
5078 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5080 /* Handle calls that return values in multiple non-contiguous locations.
5081 The Irix 6 ABI has examples of this. */
5082 if (GET_CODE (to_rtx) == PARALLEL)
5084 if (GET_CODE (temp) == PARALLEL)
5085 emit_group_move (to_rtx, temp);
5086 else
5087 emit_group_load (to_rtx, temp, TREE_TYPE (from),
5088 int_size_in_bytes (TREE_TYPE (from)));
5090 else if (temp)
5091 emit_move_insn (to_rtx, temp);
5093 preserve_temp_slots (to_rtx);
5094 pop_temp_slots ();
5095 return;
5098 /* In case we are returning the contents of an object which overlaps
5099 the place the value is being stored, use a safe function when copying
5100 a value through a pointer into a structure value return block. */
5101 if (TREE_CODE (to) == RESULT_DECL
5102 && TREE_CODE (from) == INDIRECT_REF
5103 && ADDR_SPACE_GENERIC_P
5104 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5105 && refs_may_alias_p (to, from)
5106 && cfun->returns_struct
5107 && !cfun->returns_pcc_struct)
5109 rtx from_rtx, size;
5111 push_temp_slots ();
5112 size = expr_size (from);
5113 from_rtx = expand_normal (from);
5115 emit_library_call (memmove_libfunc, LCT_NORMAL,
5116 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
5117 XEXP (from_rtx, 0), Pmode,
5118 convert_to_mode (TYPE_MODE (sizetype),
5119 size, TYPE_UNSIGNED (sizetype)),
5120 TYPE_MODE (sizetype));
5122 preserve_temp_slots (to_rtx);
5123 pop_temp_slots ();
5124 return;
5127 /* Compute FROM and store the value in the rtx we got. */
5129 push_temp_slots ();
5130 result = store_expr (from, to_rtx, 0, nontemporal);
5131 preserve_temp_slots (result);
5132 pop_temp_slots ();
5133 return;
5136 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5137 succeeded, false otherwise. */
5139 bool
5140 emit_storent_insn (rtx to, rtx from)
5142 struct expand_operand ops[2];
5143 enum machine_mode mode = GET_MODE (to);
5144 enum insn_code code = optab_handler (storent_optab, mode);
5146 if (code == CODE_FOR_nothing)
5147 return false;
5149 create_fixed_operand (&ops[0], to);
5150 create_input_operand (&ops[1], from, mode);
5151 return maybe_expand_insn (code, 2, ops);
5154 /* Generate code for computing expression EXP,
5155 and storing the value into TARGET.
5157 If the mode is BLKmode then we may return TARGET itself.
5158 It turns out that in BLKmode it doesn't cause a problem.
5159 because C has no operators that could combine two different
5160 assignments into the same BLKmode object with different values
5161 with no sequence point. Will other languages need this to
5162 be more thorough?
5164 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5165 stack, and block moves may need to be treated specially.
5167 If NONTEMPORAL is true, try using a nontemporal store instruction. */
5170 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
5172 rtx temp;
5173 rtx alt_rtl = NULL_RTX;
5174 location_t loc = curr_insn_location ();
5176 if (VOID_TYPE_P (TREE_TYPE (exp)))
5178 /* C++ can generate ?: expressions with a throw expression in one
5179 branch and an rvalue in the other. Here, we resolve attempts to
5180 store the throw expression's nonexistent result. */
5181 gcc_assert (!call_param_p);
5182 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5183 return NULL_RTX;
5185 if (TREE_CODE (exp) == COMPOUND_EXPR)
5187 /* Perform first part of compound expression, then assign from second
5188 part. */
5189 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5190 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5191 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5192 nontemporal);
5194 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5196 /* For conditional expression, get safe form of the target. Then
5197 test the condition, doing the appropriate assignment on either
5198 side. This avoids the creation of unnecessary temporaries.
5199 For non-BLKmode, it is more efficient not to do this. */
5201 rtx_code_label *lab1 = gen_label_rtx (), *lab2 = gen_label_rtx ();
5203 do_pending_stack_adjust ();
5204 NO_DEFER_POP;
5205 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5206 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5207 nontemporal);
5208 emit_jump_insn (gen_jump (lab2));
5209 emit_barrier ();
5210 emit_label (lab1);
5211 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
5212 nontemporal);
5213 emit_label (lab2);
5214 OK_DEFER_POP;
5216 return NULL_RTX;
5218 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5219 /* If this is a scalar in a register that is stored in a wider mode
5220 than the declared mode, compute the result into its declared mode
5221 and then convert to the wider mode. Our value is the computed
5222 expression. */
5224 rtx inner_target = 0;
5226 /* We can do the conversion inside EXP, which will often result
5227 in some optimizations. Do the conversion in two steps: first
5228 change the signedness, if needed, then the extend. But don't
5229 do this if the type of EXP is a subtype of something else
5230 since then the conversion might involve more than just
5231 converting modes. */
5232 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5233 && TREE_TYPE (TREE_TYPE (exp)) == 0
5234 && GET_MODE_PRECISION (GET_MODE (target))
5235 == TYPE_PRECISION (TREE_TYPE (exp)))
5237 if (!SUBREG_CHECK_PROMOTED_SIGN (target,
5238 TYPE_UNSIGNED (TREE_TYPE (exp))))
5240 /* Some types, e.g. Fortran's logical*4, won't have a signed
5241 version, so use the mode instead. */
5242 tree ntype
5243 = (signed_or_unsigned_type_for
5244 (SUBREG_PROMOTED_SIGN (target), TREE_TYPE (exp)));
5245 if (ntype == NULL)
5246 ntype = lang_hooks.types.type_for_mode
5247 (TYPE_MODE (TREE_TYPE (exp)),
5248 SUBREG_PROMOTED_SIGN (target));
5250 exp = fold_convert_loc (loc, ntype, exp);
5253 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5254 (GET_MODE (SUBREG_REG (target)),
5255 SUBREG_PROMOTED_SIGN (target)),
5256 exp);
5258 inner_target = SUBREG_REG (target);
5261 temp = expand_expr (exp, inner_target, VOIDmode,
5262 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5264 /* If TEMP is a VOIDmode constant, use convert_modes to make
5265 sure that we properly convert it. */
5266 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5268 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5269 temp, SUBREG_PROMOTED_SIGN (target));
5270 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5271 GET_MODE (target), temp,
5272 SUBREG_PROMOTED_SIGN (target));
5275 convert_move (SUBREG_REG (target), temp,
5276 SUBREG_PROMOTED_SIGN (target));
5278 return NULL_RTX;
5280 else if ((TREE_CODE (exp) == STRING_CST
5281 || (TREE_CODE (exp) == MEM_REF
5282 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5283 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5284 == STRING_CST
5285 && integer_zerop (TREE_OPERAND (exp, 1))))
5286 && !nontemporal && !call_param_p
5287 && MEM_P (target))
5289 /* Optimize initialization of an array with a STRING_CST. */
5290 HOST_WIDE_INT exp_len, str_copy_len;
5291 rtx dest_mem;
5292 tree str = TREE_CODE (exp) == STRING_CST
5293 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5295 exp_len = int_expr_size (exp);
5296 if (exp_len <= 0)
5297 goto normal_expr;
5299 if (TREE_STRING_LENGTH (str) <= 0)
5300 goto normal_expr;
5302 str_copy_len = strlen (TREE_STRING_POINTER (str));
5303 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5304 goto normal_expr;
5306 str_copy_len = TREE_STRING_LENGTH (str);
5307 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5308 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5310 str_copy_len += STORE_MAX_PIECES - 1;
5311 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5313 str_copy_len = MIN (str_copy_len, exp_len);
5314 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5315 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5316 MEM_ALIGN (target), false))
5317 goto normal_expr;
5319 dest_mem = target;
5321 dest_mem = store_by_pieces (dest_mem,
5322 str_copy_len, builtin_strncpy_read_str,
5323 CONST_CAST (char *,
5324 TREE_STRING_POINTER (str)),
5325 MEM_ALIGN (target), false,
5326 exp_len > str_copy_len ? 1 : 0);
5327 if (exp_len > str_copy_len)
5328 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5329 GEN_INT (exp_len - str_copy_len),
5330 BLOCK_OP_NORMAL);
5331 return NULL_RTX;
5333 else
5335 rtx tmp_target;
5337 normal_expr:
5338 /* If we want to use a nontemporal store, force the value to
5339 register first. */
5340 tmp_target = nontemporal ? NULL_RTX : target;
5341 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5342 (call_param_p
5343 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5344 &alt_rtl, false);
5347 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5348 the same as that of TARGET, adjust the constant. This is needed, for
5349 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5350 only a word-sized value. */
5351 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5352 && TREE_CODE (exp) != ERROR_MARK
5353 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5354 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5355 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5357 /* If value was not generated in the target, store it there.
5358 Convert the value to TARGET's type first if necessary and emit the
5359 pending incrementations that have been queued when expanding EXP.
5360 Note that we cannot emit the whole queue blindly because this will
5361 effectively disable the POST_INC optimization later.
5363 If TEMP and TARGET compare equal according to rtx_equal_p, but
5364 one or both of them are volatile memory refs, we have to distinguish
5365 two cases:
5366 - expand_expr has used TARGET. In this case, we must not generate
5367 another copy. This can be detected by TARGET being equal according
5368 to == .
5369 - expand_expr has not used TARGET - that means that the source just
5370 happens to have the same RTX form. Since temp will have been created
5371 by expand_expr, it will compare unequal according to == .
5372 We must generate a copy in this case, to reach the correct number
5373 of volatile memory references. */
5375 if ((! rtx_equal_p (temp, target)
5376 || (temp != target && (side_effects_p (temp)
5377 || side_effects_p (target))))
5378 && TREE_CODE (exp) != ERROR_MARK
5379 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5380 but TARGET is not valid memory reference, TEMP will differ
5381 from TARGET although it is really the same location. */
5382 && !(alt_rtl
5383 && rtx_equal_p (alt_rtl, target)
5384 && !side_effects_p (alt_rtl)
5385 && !side_effects_p (target))
5386 /* If there's nothing to copy, don't bother. Don't call
5387 expr_size unless necessary, because some front-ends (C++)
5388 expr_size-hook must not be given objects that are not
5389 supposed to be bit-copied or bit-initialized. */
5390 && expr_size (exp) != const0_rtx)
5392 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5394 if (GET_MODE (target) == BLKmode)
5396 /* Handle calls that return BLKmode values in registers. */
5397 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5398 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5399 else
5400 store_bit_field (target,
5401 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5402 0, 0, 0, GET_MODE (temp), temp);
5404 else
5405 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5408 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5410 /* Handle copying a string constant into an array. The string
5411 constant may be shorter than the array. So copy just the string's
5412 actual length, and clear the rest. First get the size of the data
5413 type of the string, which is actually the size of the target. */
5414 rtx size = expr_size (exp);
5416 if (CONST_INT_P (size)
5417 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5418 emit_block_move (target, temp, size,
5419 (call_param_p
5420 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5421 else
5423 enum machine_mode pointer_mode
5424 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5425 enum machine_mode address_mode = get_address_mode (target);
5427 /* Compute the size of the data to copy from the string. */
5428 tree copy_size
5429 = size_binop_loc (loc, MIN_EXPR,
5430 make_tree (sizetype, size),
5431 size_int (TREE_STRING_LENGTH (exp)));
5432 rtx copy_size_rtx
5433 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5434 (call_param_p
5435 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5436 rtx_code_label *label = 0;
5438 /* Copy that much. */
5439 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5440 TYPE_UNSIGNED (sizetype));
5441 emit_block_move (target, temp, copy_size_rtx,
5442 (call_param_p
5443 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5445 /* Figure out how much is left in TARGET that we have to clear.
5446 Do all calculations in pointer_mode. */
5447 if (CONST_INT_P (copy_size_rtx))
5449 size = plus_constant (address_mode, size,
5450 -INTVAL (copy_size_rtx));
5451 target = adjust_address (target, BLKmode,
5452 INTVAL (copy_size_rtx));
5454 else
5456 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5457 copy_size_rtx, NULL_RTX, 0,
5458 OPTAB_LIB_WIDEN);
5460 if (GET_MODE (copy_size_rtx) != address_mode)
5461 copy_size_rtx = convert_to_mode (address_mode,
5462 copy_size_rtx,
5463 TYPE_UNSIGNED (sizetype));
5465 target = offset_address (target, copy_size_rtx,
5466 highest_pow2_factor (copy_size));
5467 label = gen_label_rtx ();
5468 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5469 GET_MODE (size), 0, label);
5472 if (size != const0_rtx)
5473 clear_storage (target, size, BLOCK_OP_NORMAL);
5475 if (label)
5476 emit_label (label);
5479 /* Handle calls that return values in multiple non-contiguous locations.
5480 The Irix 6 ABI has examples of this. */
5481 else if (GET_CODE (target) == PARALLEL)
5483 if (GET_CODE (temp) == PARALLEL)
5484 emit_group_move (target, temp);
5485 else
5486 emit_group_load (target, temp, TREE_TYPE (exp),
5487 int_size_in_bytes (TREE_TYPE (exp)));
5489 else if (GET_CODE (temp) == PARALLEL)
5490 emit_group_store (target, temp, TREE_TYPE (exp),
5491 int_size_in_bytes (TREE_TYPE (exp)));
5492 else if (GET_MODE (temp) == BLKmode)
5493 emit_block_move (target, temp, expr_size (exp),
5494 (call_param_p
5495 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5496 /* If we emit a nontemporal store, there is nothing else to do. */
5497 else if (nontemporal && emit_storent_insn (target, temp))
5499 else
5501 temp = force_operand (temp, target);
5502 if (temp != target)
5503 emit_move_insn (target, temp);
5507 return NULL_RTX;
5510 /* Return true if field F of structure TYPE is a flexible array. */
5512 static bool
5513 flexible_array_member_p (const_tree f, const_tree type)
5515 const_tree tf;
5517 tf = TREE_TYPE (f);
5518 return (DECL_CHAIN (f) == NULL
5519 && TREE_CODE (tf) == ARRAY_TYPE
5520 && TYPE_DOMAIN (tf)
5521 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5522 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5523 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5524 && int_size_in_bytes (type) >= 0);
5527 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5528 must have in order for it to completely initialize a value of type TYPE.
5529 Return -1 if the number isn't known.
5531 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5533 static HOST_WIDE_INT
5534 count_type_elements (const_tree type, bool for_ctor_p)
5536 switch (TREE_CODE (type))
5538 case ARRAY_TYPE:
5540 tree nelts;
5542 nelts = array_type_nelts (type);
5543 if (nelts && tree_fits_uhwi_p (nelts))
5545 unsigned HOST_WIDE_INT n;
5547 n = tree_to_uhwi (nelts) + 1;
5548 if (n == 0 || for_ctor_p)
5549 return n;
5550 else
5551 return n * count_type_elements (TREE_TYPE (type), false);
5553 return for_ctor_p ? -1 : 1;
5556 case RECORD_TYPE:
5558 unsigned HOST_WIDE_INT n;
5559 tree f;
5561 n = 0;
5562 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5563 if (TREE_CODE (f) == FIELD_DECL)
5565 if (!for_ctor_p)
5566 n += count_type_elements (TREE_TYPE (f), false);
5567 else if (!flexible_array_member_p (f, type))
5568 /* Don't count flexible arrays, which are not supposed
5569 to be initialized. */
5570 n += 1;
5573 return n;
5576 case UNION_TYPE:
5577 case QUAL_UNION_TYPE:
5579 tree f;
5580 HOST_WIDE_INT n, m;
5582 gcc_assert (!for_ctor_p);
5583 /* Estimate the number of scalars in each field and pick the
5584 maximum. Other estimates would do instead; the idea is simply
5585 to make sure that the estimate is not sensitive to the ordering
5586 of the fields. */
5587 n = 1;
5588 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5589 if (TREE_CODE (f) == FIELD_DECL)
5591 m = count_type_elements (TREE_TYPE (f), false);
5592 /* If the field doesn't span the whole union, add an extra
5593 scalar for the rest. */
5594 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5595 TYPE_SIZE (type)) != 1)
5596 m++;
5597 if (n < m)
5598 n = m;
5600 return n;
5603 case COMPLEX_TYPE:
5604 return 2;
5606 case VECTOR_TYPE:
5607 return TYPE_VECTOR_SUBPARTS (type);
5609 case INTEGER_TYPE:
5610 case REAL_TYPE:
5611 case FIXED_POINT_TYPE:
5612 case ENUMERAL_TYPE:
5613 case BOOLEAN_TYPE:
5614 case POINTER_TYPE:
5615 case OFFSET_TYPE:
5616 case REFERENCE_TYPE:
5617 case NULLPTR_TYPE:
5618 return 1;
5620 case ERROR_MARK:
5621 return 0;
5623 case VOID_TYPE:
5624 case METHOD_TYPE:
5625 case FUNCTION_TYPE:
5626 case LANG_TYPE:
5627 default:
5628 gcc_unreachable ();
5632 /* Helper for categorize_ctor_elements. Identical interface. */
5634 static bool
5635 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5636 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5638 unsigned HOST_WIDE_INT idx;
5639 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5640 tree value, purpose, elt_type;
5642 /* Whether CTOR is a valid constant initializer, in accordance with what
5643 initializer_constant_valid_p does. If inferred from the constructor
5644 elements, true until proven otherwise. */
5645 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5646 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5648 nz_elts = 0;
5649 init_elts = 0;
5650 num_fields = 0;
5651 elt_type = NULL_TREE;
5653 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5655 HOST_WIDE_INT mult = 1;
5657 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5659 tree lo_index = TREE_OPERAND (purpose, 0);
5660 tree hi_index = TREE_OPERAND (purpose, 1);
5662 if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
5663 mult = (tree_to_uhwi (hi_index)
5664 - tree_to_uhwi (lo_index) + 1);
5666 num_fields += mult;
5667 elt_type = TREE_TYPE (value);
5669 switch (TREE_CODE (value))
5671 case CONSTRUCTOR:
5673 HOST_WIDE_INT nz = 0, ic = 0;
5675 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5676 p_complete);
5678 nz_elts += mult * nz;
5679 init_elts += mult * ic;
5681 if (const_from_elts_p && const_p)
5682 const_p = const_elt_p;
5684 break;
5686 case INTEGER_CST:
5687 case REAL_CST:
5688 case FIXED_CST:
5689 if (!initializer_zerop (value))
5690 nz_elts += mult;
5691 init_elts += mult;
5692 break;
5694 case STRING_CST:
5695 nz_elts += mult * TREE_STRING_LENGTH (value);
5696 init_elts += mult * TREE_STRING_LENGTH (value);
5697 break;
5699 case COMPLEX_CST:
5700 if (!initializer_zerop (TREE_REALPART (value)))
5701 nz_elts += mult;
5702 if (!initializer_zerop (TREE_IMAGPART (value)))
5703 nz_elts += mult;
5704 init_elts += mult;
5705 break;
5707 case VECTOR_CST:
5709 unsigned i;
5710 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5712 tree v = VECTOR_CST_ELT (value, i);
5713 if (!initializer_zerop (v))
5714 nz_elts += mult;
5715 init_elts += mult;
5718 break;
5720 default:
5722 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5723 nz_elts += mult * tc;
5724 init_elts += mult * tc;
5726 if (const_from_elts_p && const_p)
5727 const_p = initializer_constant_valid_p (value, elt_type)
5728 != NULL_TREE;
5730 break;
5734 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5735 num_fields, elt_type))
5736 *p_complete = false;
5738 *p_nz_elts += nz_elts;
5739 *p_init_elts += init_elts;
5741 return const_p;
5744 /* Examine CTOR to discover:
5745 * how many scalar fields are set to nonzero values,
5746 and place it in *P_NZ_ELTS;
5747 * how many scalar fields in total are in CTOR,
5748 and place it in *P_ELT_COUNT.
5749 * whether the constructor is complete -- in the sense that every
5750 meaningful byte is explicitly given a value --
5751 and place it in *P_COMPLETE.
5753 Return whether or not CTOR is a valid static constant initializer, the same
5754 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5756 bool
5757 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5758 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5760 *p_nz_elts = 0;
5761 *p_init_elts = 0;
5762 *p_complete = true;
5764 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5767 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5768 of which had type LAST_TYPE. Each element was itself a complete
5769 initializer, in the sense that every meaningful byte was explicitly
5770 given a value. Return true if the same is true for the constructor
5771 as a whole. */
5773 bool
5774 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5775 const_tree last_type)
5777 if (TREE_CODE (type) == UNION_TYPE
5778 || TREE_CODE (type) == QUAL_UNION_TYPE)
5780 if (num_elts == 0)
5781 return false;
5783 gcc_assert (num_elts == 1 && last_type);
5785 /* ??? We could look at each element of the union, and find the
5786 largest element. Which would avoid comparing the size of the
5787 initialized element against any tail padding in the union.
5788 Doesn't seem worth the effort... */
5789 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5792 return count_type_elements (type, true) == num_elts;
5795 /* Return 1 if EXP contains mostly (3/4) zeros. */
5797 static int
5798 mostly_zeros_p (const_tree exp)
5800 if (TREE_CODE (exp) == CONSTRUCTOR)
5802 HOST_WIDE_INT nz_elts, init_elts;
5803 bool complete_p;
5805 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5806 return !complete_p || nz_elts < init_elts / 4;
5809 return initializer_zerop (exp);
5812 /* Return 1 if EXP contains all zeros. */
5814 static int
5815 all_zeros_p (const_tree exp)
5817 if (TREE_CODE (exp) == CONSTRUCTOR)
5819 HOST_WIDE_INT nz_elts, init_elts;
5820 bool complete_p;
5822 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5823 return nz_elts == 0;
5826 return initializer_zerop (exp);
5829 /* Helper function for store_constructor.
5830 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5831 CLEARED is as for store_constructor.
5832 ALIAS_SET is the alias set to use for any stores.
5834 This provides a recursive shortcut back to store_constructor when it isn't
5835 necessary to go through store_field. This is so that we can pass through
5836 the cleared field to let store_constructor know that we may not have to
5837 clear a substructure if the outer structure has already been cleared. */
5839 static void
5840 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5841 HOST_WIDE_INT bitpos, enum machine_mode mode,
5842 tree exp, int cleared, alias_set_type alias_set)
5844 if (TREE_CODE (exp) == CONSTRUCTOR
5845 /* We can only call store_constructor recursively if the size and
5846 bit position are on a byte boundary. */
5847 && bitpos % BITS_PER_UNIT == 0
5848 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5849 /* If we have a nonzero bitpos for a register target, then we just
5850 let store_field do the bitfield handling. This is unlikely to
5851 generate unnecessary clear instructions anyways. */
5852 && (bitpos == 0 || MEM_P (target)))
5854 if (MEM_P (target))
5855 target
5856 = adjust_address (target,
5857 GET_MODE (target) == BLKmode
5858 || 0 != (bitpos
5859 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5860 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5863 /* Update the alias set, if required. */
5864 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5865 && MEM_ALIAS_SET (target) != 0)
5867 target = copy_rtx (target);
5868 set_mem_alias_set (target, alias_set);
5871 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5873 else
5874 store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false);
5878 /* Returns the number of FIELD_DECLs in TYPE. */
5880 static int
5881 fields_length (const_tree type)
5883 tree t = TYPE_FIELDS (type);
5884 int count = 0;
5886 for (; t; t = DECL_CHAIN (t))
5887 if (TREE_CODE (t) == FIELD_DECL)
5888 ++count;
5890 return count;
5894 /* Store the value of constructor EXP into the rtx TARGET.
5895 TARGET is either a REG or a MEM; we know it cannot conflict, since
5896 safe_from_p has been called.
5897 CLEARED is true if TARGET is known to have been zero'd.
5898 SIZE is the number of bytes of TARGET we are allowed to modify: this
5899 may not be the same as the size of EXP if we are assigning to a field
5900 which has been packed to exclude padding bits. */
5902 static void
5903 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5905 tree type = TREE_TYPE (exp);
5906 #ifdef WORD_REGISTER_OPERATIONS
5907 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5908 #endif
5910 switch (TREE_CODE (type))
5912 case RECORD_TYPE:
5913 case UNION_TYPE:
5914 case QUAL_UNION_TYPE:
5916 unsigned HOST_WIDE_INT idx;
5917 tree field, value;
5919 /* If size is zero or the target is already cleared, do nothing. */
5920 if (size == 0 || cleared)
5921 cleared = 1;
5922 /* We either clear the aggregate or indicate the value is dead. */
5923 else if ((TREE_CODE (type) == UNION_TYPE
5924 || TREE_CODE (type) == QUAL_UNION_TYPE)
5925 && ! CONSTRUCTOR_ELTS (exp))
5926 /* If the constructor is empty, clear the union. */
5928 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5929 cleared = 1;
5932 /* If we are building a static constructor into a register,
5933 set the initial value as zero so we can fold the value into
5934 a constant. But if more than one register is involved,
5935 this probably loses. */
5936 else if (REG_P (target) && TREE_STATIC (exp)
5937 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5939 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5940 cleared = 1;
5943 /* If the constructor has fewer fields than the structure or
5944 if we are initializing the structure to mostly zeros, clear
5945 the whole structure first. Don't do this if TARGET is a
5946 register whose mode size isn't equal to SIZE since
5947 clear_storage can't handle this case. */
5948 else if (size > 0
5949 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
5950 != fields_length (type))
5951 || mostly_zeros_p (exp))
5952 && (!REG_P (target)
5953 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5954 == size)))
5956 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5957 cleared = 1;
5960 if (REG_P (target) && !cleared)
5961 emit_clobber (target);
5963 /* Store each element of the constructor into the
5964 corresponding field of TARGET. */
5965 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5967 enum machine_mode mode;
5968 HOST_WIDE_INT bitsize;
5969 HOST_WIDE_INT bitpos = 0;
5970 tree offset;
5971 rtx to_rtx = target;
5973 /* Just ignore missing fields. We cleared the whole
5974 structure, above, if any fields are missing. */
5975 if (field == 0)
5976 continue;
5978 if (cleared && initializer_zerop (value))
5979 continue;
5981 if (tree_fits_uhwi_p (DECL_SIZE (field)))
5982 bitsize = tree_to_uhwi (DECL_SIZE (field));
5983 else
5984 bitsize = -1;
5986 mode = DECL_MODE (field);
5987 if (DECL_BIT_FIELD (field))
5988 mode = VOIDmode;
5990 offset = DECL_FIELD_OFFSET (field);
5991 if (tree_fits_shwi_p (offset)
5992 && tree_fits_shwi_p (bit_position (field)))
5994 bitpos = int_bit_position (field);
5995 offset = 0;
5997 else
5998 bitpos = tree_to_shwi (DECL_FIELD_BIT_OFFSET (field));
6000 if (offset)
6002 enum machine_mode address_mode;
6003 rtx offset_rtx;
6005 offset
6006 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
6007 make_tree (TREE_TYPE (exp),
6008 target));
6010 offset_rtx = expand_normal (offset);
6011 gcc_assert (MEM_P (to_rtx));
6013 address_mode = get_address_mode (to_rtx);
6014 if (GET_MODE (offset_rtx) != address_mode)
6015 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
6017 to_rtx = offset_address (to_rtx, offset_rtx,
6018 highest_pow2_factor (offset));
6021 #ifdef WORD_REGISTER_OPERATIONS
6022 /* If this initializes a field that is smaller than a
6023 word, at the start of a word, try to widen it to a full
6024 word. This special case allows us to output C++ member
6025 function initializations in a form that the optimizers
6026 can understand. */
6027 if (REG_P (target)
6028 && bitsize < BITS_PER_WORD
6029 && bitpos % BITS_PER_WORD == 0
6030 && GET_MODE_CLASS (mode) == MODE_INT
6031 && TREE_CODE (value) == INTEGER_CST
6032 && exp_size >= 0
6033 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6035 tree type = TREE_TYPE (value);
6037 if (TYPE_PRECISION (type) < BITS_PER_WORD)
6039 type = lang_hooks.types.type_for_mode
6040 (word_mode, TYPE_UNSIGNED (type));
6041 value = fold_convert (type, value);
6044 if (BYTES_BIG_ENDIAN)
6045 value
6046 = fold_build2 (LSHIFT_EXPR, type, value,
6047 build_int_cst (type,
6048 BITS_PER_WORD - bitsize));
6049 bitsize = BITS_PER_WORD;
6050 mode = word_mode;
6052 #endif
6054 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6055 && DECL_NONADDRESSABLE_P (field))
6057 to_rtx = copy_rtx (to_rtx);
6058 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6061 store_constructor_field (to_rtx, bitsize, bitpos, mode,
6062 value, cleared,
6063 get_alias_set (TREE_TYPE (field)));
6065 break;
6067 case ARRAY_TYPE:
6069 tree value, index;
6070 unsigned HOST_WIDE_INT i;
6071 int need_to_clear;
6072 tree domain;
6073 tree elttype = TREE_TYPE (type);
6074 int const_bounds_p;
6075 HOST_WIDE_INT minelt = 0;
6076 HOST_WIDE_INT maxelt = 0;
6078 domain = TYPE_DOMAIN (type);
6079 const_bounds_p = (TYPE_MIN_VALUE (domain)
6080 && TYPE_MAX_VALUE (domain)
6081 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6082 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6084 /* If we have constant bounds for the range of the type, get them. */
6085 if (const_bounds_p)
6087 minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6088 maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6091 /* If the constructor has fewer elements than the array, clear
6092 the whole array first. Similarly if this is static
6093 constructor of a non-BLKmode object. */
6094 if (cleared)
6095 need_to_clear = 0;
6096 else if (REG_P (target) && TREE_STATIC (exp))
6097 need_to_clear = 1;
6098 else
6100 unsigned HOST_WIDE_INT idx;
6101 tree index, value;
6102 HOST_WIDE_INT count = 0, zero_count = 0;
6103 need_to_clear = ! const_bounds_p;
6105 /* This loop is a more accurate version of the loop in
6106 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6107 is also needed to check for missing elements. */
6108 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6110 HOST_WIDE_INT this_node_count;
6112 if (need_to_clear)
6113 break;
6115 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6117 tree lo_index = TREE_OPERAND (index, 0);
6118 tree hi_index = TREE_OPERAND (index, 1);
6120 if (! tree_fits_uhwi_p (lo_index)
6121 || ! tree_fits_uhwi_p (hi_index))
6123 need_to_clear = 1;
6124 break;
6127 this_node_count = (tree_to_uhwi (hi_index)
6128 - tree_to_uhwi (lo_index) + 1);
6130 else
6131 this_node_count = 1;
6133 count += this_node_count;
6134 if (mostly_zeros_p (value))
6135 zero_count += this_node_count;
6138 /* Clear the entire array first if there are any missing
6139 elements, or if the incidence of zero elements is >=
6140 75%. */
6141 if (! need_to_clear
6142 && (count < maxelt - minelt + 1
6143 || 4 * zero_count >= 3 * count))
6144 need_to_clear = 1;
6147 if (need_to_clear && size > 0)
6149 if (REG_P (target))
6150 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6151 else
6152 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6153 cleared = 1;
6156 if (!cleared && REG_P (target))
6157 /* Inform later passes that the old value is dead. */
6158 emit_clobber (target);
6160 /* Store each element of the constructor into the
6161 corresponding element of TARGET, determined by counting the
6162 elements. */
6163 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6165 enum machine_mode mode;
6166 HOST_WIDE_INT bitsize;
6167 HOST_WIDE_INT bitpos;
6168 rtx xtarget = target;
6170 if (cleared && initializer_zerop (value))
6171 continue;
6173 mode = TYPE_MODE (elttype);
6174 if (mode == BLKmode)
6175 bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6176 ? tree_to_uhwi (TYPE_SIZE (elttype))
6177 : -1);
6178 else
6179 bitsize = GET_MODE_BITSIZE (mode);
6181 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6183 tree lo_index = TREE_OPERAND (index, 0);
6184 tree hi_index = TREE_OPERAND (index, 1);
6185 rtx index_r, pos_rtx;
6186 HOST_WIDE_INT lo, hi, count;
6187 tree position;
6189 /* If the range is constant and "small", unroll the loop. */
6190 if (const_bounds_p
6191 && tree_fits_shwi_p (lo_index)
6192 && tree_fits_shwi_p (hi_index)
6193 && (lo = tree_to_shwi (lo_index),
6194 hi = tree_to_shwi (hi_index),
6195 count = hi - lo + 1,
6196 (!MEM_P (target)
6197 || count <= 2
6198 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6199 && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6200 <= 40 * 8)))))
6202 lo -= minelt; hi -= minelt;
6203 for (; lo <= hi; lo++)
6205 bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6207 if (MEM_P (target)
6208 && !MEM_KEEP_ALIAS_SET_P (target)
6209 && TREE_CODE (type) == ARRAY_TYPE
6210 && TYPE_NONALIASED_COMPONENT (type))
6212 target = copy_rtx (target);
6213 MEM_KEEP_ALIAS_SET_P (target) = 1;
6216 store_constructor_field
6217 (target, bitsize, bitpos, mode, value, cleared,
6218 get_alias_set (elttype));
6221 else
6223 rtx_code_label *loop_start = gen_label_rtx ();
6224 rtx_code_label *loop_end = gen_label_rtx ();
6225 tree exit_cond;
6227 expand_normal (hi_index);
6229 index = build_decl (EXPR_LOCATION (exp),
6230 VAR_DECL, NULL_TREE, domain);
6231 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6232 SET_DECL_RTL (index, index_r);
6233 store_expr (lo_index, index_r, 0, false);
6235 /* Build the head of the loop. */
6236 do_pending_stack_adjust ();
6237 emit_label (loop_start);
6239 /* Assign value to element index. */
6240 position =
6241 fold_convert (ssizetype,
6242 fold_build2 (MINUS_EXPR,
6243 TREE_TYPE (index),
6244 index,
6245 TYPE_MIN_VALUE (domain)));
6247 position =
6248 size_binop (MULT_EXPR, position,
6249 fold_convert (ssizetype,
6250 TYPE_SIZE_UNIT (elttype)));
6252 pos_rtx = expand_normal (position);
6253 xtarget = offset_address (target, pos_rtx,
6254 highest_pow2_factor (position));
6255 xtarget = adjust_address (xtarget, mode, 0);
6256 if (TREE_CODE (value) == CONSTRUCTOR)
6257 store_constructor (value, xtarget, cleared,
6258 bitsize / BITS_PER_UNIT);
6259 else
6260 store_expr (value, xtarget, 0, false);
6262 /* Generate a conditional jump to exit the loop. */
6263 exit_cond = build2 (LT_EXPR, integer_type_node,
6264 index, hi_index);
6265 jumpif (exit_cond, loop_end, -1);
6267 /* Update the loop counter, and jump to the head of
6268 the loop. */
6269 expand_assignment (index,
6270 build2 (PLUS_EXPR, TREE_TYPE (index),
6271 index, integer_one_node),
6272 false);
6274 emit_jump (loop_start);
6276 /* Build the end of the loop. */
6277 emit_label (loop_end);
6280 else if ((index != 0 && ! tree_fits_shwi_p (index))
6281 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6283 tree position;
6285 if (index == 0)
6286 index = ssize_int (1);
6288 if (minelt)
6289 index = fold_convert (ssizetype,
6290 fold_build2 (MINUS_EXPR,
6291 TREE_TYPE (index),
6292 index,
6293 TYPE_MIN_VALUE (domain)));
6295 position =
6296 size_binop (MULT_EXPR, index,
6297 fold_convert (ssizetype,
6298 TYPE_SIZE_UNIT (elttype)));
6299 xtarget = offset_address (target,
6300 expand_normal (position),
6301 highest_pow2_factor (position));
6302 xtarget = adjust_address (xtarget, mode, 0);
6303 store_expr (value, xtarget, 0, false);
6305 else
6307 if (index != 0)
6308 bitpos = ((tree_to_shwi (index) - minelt)
6309 * tree_to_uhwi (TYPE_SIZE (elttype)));
6310 else
6311 bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6313 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6314 && TREE_CODE (type) == ARRAY_TYPE
6315 && TYPE_NONALIASED_COMPONENT (type))
6317 target = copy_rtx (target);
6318 MEM_KEEP_ALIAS_SET_P (target) = 1;
6320 store_constructor_field (target, bitsize, bitpos, mode, value,
6321 cleared, get_alias_set (elttype));
6324 break;
6327 case VECTOR_TYPE:
6329 unsigned HOST_WIDE_INT idx;
6330 constructor_elt *ce;
6331 int i;
6332 int need_to_clear;
6333 int icode = CODE_FOR_nothing;
6334 tree elttype = TREE_TYPE (type);
6335 int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6336 enum machine_mode eltmode = TYPE_MODE (elttype);
6337 HOST_WIDE_INT bitsize;
6338 HOST_WIDE_INT bitpos;
6339 rtvec vector = NULL;
6340 unsigned n_elts;
6341 alias_set_type alias;
6343 gcc_assert (eltmode != BLKmode);
6345 n_elts = TYPE_VECTOR_SUBPARTS (type);
6346 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6348 enum machine_mode mode = GET_MODE (target);
6350 icode = (int) optab_handler (vec_init_optab, mode);
6351 /* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
6352 if (icode != CODE_FOR_nothing)
6354 tree value;
6356 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6357 if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
6359 icode = CODE_FOR_nothing;
6360 break;
6363 if (icode != CODE_FOR_nothing)
6365 unsigned int i;
6367 vector = rtvec_alloc (n_elts);
6368 for (i = 0; i < n_elts; i++)
6369 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6373 /* If the constructor has fewer elements than the vector,
6374 clear the whole array first. Similarly if this is static
6375 constructor of a non-BLKmode object. */
6376 if (cleared)
6377 need_to_clear = 0;
6378 else if (REG_P (target) && TREE_STATIC (exp))
6379 need_to_clear = 1;
6380 else
6382 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6383 tree value;
6385 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6387 int n_elts_here = tree_to_uhwi
6388 (int_const_binop (TRUNC_DIV_EXPR,
6389 TYPE_SIZE (TREE_TYPE (value)),
6390 TYPE_SIZE (elttype)));
6392 count += n_elts_here;
6393 if (mostly_zeros_p (value))
6394 zero_count += n_elts_here;
6397 /* Clear the entire vector first if there are any missing elements,
6398 or if the incidence of zero elements is >= 75%. */
6399 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6402 if (need_to_clear && size > 0 && !vector)
6404 if (REG_P (target))
6405 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6406 else
6407 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6408 cleared = 1;
6411 /* Inform later passes that the old value is dead. */
6412 if (!cleared && !vector && REG_P (target))
6413 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6415 if (MEM_P (target))
6416 alias = MEM_ALIAS_SET (target);
6417 else
6418 alias = get_alias_set (elttype);
6420 /* Store each element of the constructor into the corresponding
6421 element of TARGET, determined by counting the elements. */
6422 for (idx = 0, i = 0;
6423 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6424 idx++, i += bitsize / elt_size)
6426 HOST_WIDE_INT eltpos;
6427 tree value = ce->value;
6429 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6430 if (cleared && initializer_zerop (value))
6431 continue;
6433 if (ce->index)
6434 eltpos = tree_to_uhwi (ce->index);
6435 else
6436 eltpos = i;
6438 if (vector)
6440 /* vec_init<mode> should not be used if there are VECTOR_TYPE
6441 elements. */
6442 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6443 RTVEC_ELT (vector, eltpos)
6444 = expand_normal (value);
6446 else
6448 enum machine_mode value_mode =
6449 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6450 ? TYPE_MODE (TREE_TYPE (value))
6451 : eltmode;
6452 bitpos = eltpos * elt_size;
6453 store_constructor_field (target, bitsize, bitpos, value_mode,
6454 value, cleared, alias);
6458 if (vector)
6459 emit_insn (GEN_FCN (icode)
6460 (target,
6461 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6462 break;
6465 default:
6466 gcc_unreachable ();
6470 /* Store the value of EXP (an expression tree)
6471 into a subfield of TARGET which has mode MODE and occupies
6472 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6473 If MODE is VOIDmode, it means that we are storing into a bit-field.
6475 BITREGION_START is bitpos of the first bitfield in this region.
6476 BITREGION_END is the bitpos of the ending bitfield in this region.
6477 These two fields are 0, if the C++ memory model does not apply,
6478 or we are not interested in keeping track of bitfield regions.
6480 Always return const0_rtx unless we have something particular to
6481 return.
6483 ALIAS_SET is the alias set for the destination. This value will
6484 (in general) be different from that for TARGET, since TARGET is a
6485 reference to the containing structure.
6487 If NONTEMPORAL is true, try generating a nontemporal store. */
6489 static rtx
6490 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6491 unsigned HOST_WIDE_INT bitregion_start,
6492 unsigned HOST_WIDE_INT bitregion_end,
6493 enum machine_mode mode, tree exp,
6494 alias_set_type alias_set, bool nontemporal)
6496 if (TREE_CODE (exp) == ERROR_MARK)
6497 return const0_rtx;
6499 /* If we have nothing to store, do nothing unless the expression has
6500 side-effects. */
6501 if (bitsize == 0)
6502 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6504 if (GET_CODE (target) == CONCAT)
6506 /* We're storing into a struct containing a single __complex. */
6508 gcc_assert (!bitpos);
6509 return store_expr (exp, target, 0, nontemporal);
6512 /* If the structure is in a register or if the component
6513 is a bit field, we cannot use addressing to access it.
6514 Use bit-field techniques or SUBREG to store in it. */
6516 if (mode == VOIDmode
6517 || (mode != BLKmode && ! direct_store[(int) mode]
6518 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6519 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6520 || REG_P (target)
6521 || GET_CODE (target) == SUBREG
6522 /* If the field isn't aligned enough to store as an ordinary memref,
6523 store it as a bit field. */
6524 || (mode != BLKmode
6525 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6526 || bitpos % GET_MODE_ALIGNMENT (mode))
6527 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6528 || (bitpos % BITS_PER_UNIT != 0)))
6529 || (bitsize >= 0 && mode != BLKmode
6530 && GET_MODE_BITSIZE (mode) > bitsize)
6531 /* If the RHS and field are a constant size and the size of the
6532 RHS isn't the same size as the bitfield, we must use bitfield
6533 operations. */
6534 || (bitsize >= 0
6535 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6536 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6537 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6538 decl we must use bitfield operations. */
6539 || (bitsize >= 0
6540 && TREE_CODE (exp) == MEM_REF
6541 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6542 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6543 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6544 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6546 rtx temp;
6547 gimple nop_def;
6549 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6550 implies a mask operation. If the precision is the same size as
6551 the field we're storing into, that mask is redundant. This is
6552 particularly common with bit field assignments generated by the
6553 C front end. */
6554 nop_def = get_def_for_expr (exp, NOP_EXPR);
6555 if (nop_def)
6557 tree type = TREE_TYPE (exp);
6558 if (INTEGRAL_TYPE_P (type)
6559 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6560 && bitsize == TYPE_PRECISION (type))
6562 tree op = gimple_assign_rhs1 (nop_def);
6563 type = TREE_TYPE (op);
6564 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6565 exp = op;
6569 temp = expand_normal (exp);
6571 /* If BITSIZE is narrower than the size of the type of EXP
6572 we will be narrowing TEMP. Normally, what's wanted are the
6573 low-order bits. However, if EXP's type is a record and this is
6574 big-endian machine, we want the upper BITSIZE bits. */
6575 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6576 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6577 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6578 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6579 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6580 NULL_RTX, 1);
6582 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6583 if (mode != VOIDmode && mode != BLKmode
6584 && mode != TYPE_MODE (TREE_TYPE (exp)))
6585 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6587 /* If the modes of TEMP and TARGET are both BLKmode, both
6588 must be in memory and BITPOS must be aligned on a byte
6589 boundary. If so, we simply do a block copy. Likewise
6590 for a BLKmode-like TARGET. */
6591 if (GET_MODE (temp) == BLKmode
6592 && (GET_MODE (target) == BLKmode
6593 || (MEM_P (target)
6594 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6595 && (bitpos % BITS_PER_UNIT) == 0
6596 && (bitsize % BITS_PER_UNIT) == 0)))
6598 gcc_assert (MEM_P (target) && MEM_P (temp)
6599 && (bitpos % BITS_PER_UNIT) == 0);
6601 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6602 emit_block_move (target, temp,
6603 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6604 / BITS_PER_UNIT),
6605 BLOCK_OP_NORMAL);
6607 return const0_rtx;
6610 /* Handle calls that return values in multiple non-contiguous locations.
6611 The Irix 6 ABI has examples of this. */
6612 if (GET_CODE (temp) == PARALLEL)
6614 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6615 rtx temp_target;
6616 if (mode == BLKmode || mode == VOIDmode)
6617 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6618 temp_target = gen_reg_rtx (mode);
6619 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6620 temp = temp_target;
6622 else if (mode == BLKmode)
6624 /* Handle calls that return BLKmode values in registers. */
6625 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6627 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6628 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6629 temp = temp_target;
6631 else
6633 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6634 rtx temp_target;
6635 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6636 temp_target = gen_reg_rtx (mode);
6637 temp_target
6638 = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
6639 temp_target, mode, mode);
6640 temp = temp_target;
6644 /* Store the value in the bitfield. */
6645 store_bit_field (target, bitsize, bitpos,
6646 bitregion_start, bitregion_end,
6647 mode, temp);
6649 return const0_rtx;
6651 else
6653 /* Now build a reference to just the desired component. */
6654 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6656 if (to_rtx == target)
6657 to_rtx = copy_rtx (to_rtx);
6659 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6660 set_mem_alias_set (to_rtx, alias_set);
6662 return store_expr (exp, to_rtx, 0, nontemporal);
6666 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6667 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6668 codes and find the ultimate containing object, which we return.
6670 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6671 bit position, and *PUNSIGNEDP to the signedness of the field.
6672 If the position of the field is variable, we store a tree
6673 giving the variable offset (in units) in *POFFSET.
6674 This offset is in addition to the bit position.
6675 If the position is not variable, we store 0 in *POFFSET.
6677 If any of the extraction expressions is volatile,
6678 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6680 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6681 Otherwise, it is a mode that can be used to access the field.
6683 If the field describes a variable-sized object, *PMODE is set to
6684 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6685 this case, but the address of the object can be found.
6687 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6688 look through nodes that serve as markers of a greater alignment than
6689 the one that can be deduced from the expression. These nodes make it
6690 possible for front-ends to prevent temporaries from being created by
6691 the middle-end on alignment considerations. For that purpose, the
6692 normal operating mode at high-level is to always pass FALSE so that
6693 the ultimate containing object is really returned; moreover, the
6694 associated predicate handled_component_p will always return TRUE
6695 on these nodes, thus indicating that they are essentially handled
6696 by get_inner_reference. TRUE should only be passed when the caller
6697 is scanning the expression in order to build another representation
6698 and specifically knows how to handle these nodes; as such, this is
6699 the normal operating mode in the RTL expanders. */
6701 tree
6702 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6703 HOST_WIDE_INT *pbitpos, tree *poffset,
6704 enum machine_mode *pmode, int *punsignedp,
6705 int *pvolatilep, bool keep_aligning)
6707 tree size_tree = 0;
6708 enum machine_mode mode = VOIDmode;
6709 bool blkmode_bitfield = false;
6710 tree offset = size_zero_node;
6711 offset_int bit_offset = 0;
6713 /* First get the mode, signedness, and size. We do this from just the
6714 outermost expression. */
6715 *pbitsize = -1;
6716 if (TREE_CODE (exp) == COMPONENT_REF)
6718 tree field = TREE_OPERAND (exp, 1);
6719 size_tree = DECL_SIZE (field);
6720 if (flag_strict_volatile_bitfields > 0
6721 && TREE_THIS_VOLATILE (exp)
6722 && DECL_BIT_FIELD_TYPE (field)
6723 && DECL_MODE (field) != BLKmode)
6724 /* Volatile bitfields should be accessed in the mode of the
6725 field's type, not the mode computed based on the bit
6726 size. */
6727 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6728 else if (!DECL_BIT_FIELD (field))
6729 mode = DECL_MODE (field);
6730 else if (DECL_MODE (field) == BLKmode)
6731 blkmode_bitfield = true;
6733 *punsignedp = DECL_UNSIGNED (field);
6735 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6737 size_tree = TREE_OPERAND (exp, 1);
6738 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6739 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6741 /* For vector types, with the correct size of access, use the mode of
6742 inner type. */
6743 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6744 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6745 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6746 mode = TYPE_MODE (TREE_TYPE (exp));
6748 else
6750 mode = TYPE_MODE (TREE_TYPE (exp));
6751 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6753 if (mode == BLKmode)
6754 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6755 else
6756 *pbitsize = GET_MODE_BITSIZE (mode);
6759 if (size_tree != 0)
6761 if (! tree_fits_uhwi_p (size_tree))
6762 mode = BLKmode, *pbitsize = -1;
6763 else
6764 *pbitsize = tree_to_uhwi (size_tree);
6767 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6768 and find the ultimate containing object. */
6769 while (1)
6771 switch (TREE_CODE (exp))
6773 case BIT_FIELD_REF:
6774 bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
6775 break;
6777 case COMPONENT_REF:
6779 tree field = TREE_OPERAND (exp, 1);
6780 tree this_offset = component_ref_field_offset (exp);
6782 /* If this field hasn't been filled in yet, don't go past it.
6783 This should only happen when folding expressions made during
6784 type construction. */
6785 if (this_offset == 0)
6786 break;
6788 offset = size_binop (PLUS_EXPR, offset, this_offset);
6789 bit_offset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
6791 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6793 break;
6795 case ARRAY_REF:
6796 case ARRAY_RANGE_REF:
6798 tree index = TREE_OPERAND (exp, 1);
6799 tree low_bound = array_ref_low_bound (exp);
6800 tree unit_size = array_ref_element_size (exp);
6802 /* We assume all arrays have sizes that are a multiple of a byte.
6803 First subtract the lower bound, if any, in the type of the
6804 index, then convert to sizetype and multiply by the size of
6805 the array element. */
6806 if (! integer_zerop (low_bound))
6807 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6808 index, low_bound);
6810 offset = size_binop (PLUS_EXPR, offset,
6811 size_binop (MULT_EXPR,
6812 fold_convert (sizetype, index),
6813 unit_size));
6815 break;
6817 case REALPART_EXPR:
6818 break;
6820 case IMAGPART_EXPR:
6821 bit_offset += *pbitsize;
6822 break;
6824 case VIEW_CONVERT_EXPR:
6825 if (keep_aligning && STRICT_ALIGNMENT
6826 && (TYPE_ALIGN (TREE_TYPE (exp))
6827 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6828 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6829 < BIGGEST_ALIGNMENT)
6830 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6831 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6832 goto done;
6833 break;
6835 case MEM_REF:
6836 /* Hand back the decl for MEM[&decl, off]. */
6837 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6839 tree off = TREE_OPERAND (exp, 1);
6840 if (!integer_zerop (off))
6842 offset_int boff, coff = mem_ref_offset (exp);
6843 boff = wi::lshift (coff, LOG2_BITS_PER_UNIT);
6844 bit_offset += boff;
6846 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6848 goto done;
6850 default:
6851 goto done;
6854 /* If any reference in the chain is volatile, the effect is volatile. */
6855 if (TREE_THIS_VOLATILE (exp))
6856 *pvolatilep = 1;
6858 exp = TREE_OPERAND (exp, 0);
6860 done:
6862 /* If OFFSET is constant, see if we can return the whole thing as a
6863 constant bit position. Make sure to handle overflow during
6864 this conversion. */
6865 if (TREE_CODE (offset) == INTEGER_CST)
6867 offset_int tem = wi::sext (wi::to_offset (offset),
6868 TYPE_PRECISION (sizetype));
6869 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
6870 tem += bit_offset;
6871 if (wi::fits_shwi_p (tem))
6873 *pbitpos = tem.to_shwi ();
6874 *poffset = offset = NULL_TREE;
6878 /* Otherwise, split it up. */
6879 if (offset)
6881 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6882 if (wi::neg_p (bit_offset))
6884 offset_int mask = wi::mask <offset_int> (LOG2_BITS_PER_UNIT, false);
6885 offset_int tem = bit_offset.and_not (mask);
6886 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6887 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6888 bit_offset -= tem;
6889 tem = wi::arshift (tem, LOG2_BITS_PER_UNIT);
6890 offset = size_binop (PLUS_EXPR, offset,
6891 wide_int_to_tree (sizetype, tem));
6894 *pbitpos = bit_offset.to_shwi ();
6895 *poffset = offset;
6898 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6899 if (mode == VOIDmode
6900 && blkmode_bitfield
6901 && (*pbitpos % BITS_PER_UNIT) == 0
6902 && (*pbitsize % BITS_PER_UNIT) == 0)
6903 *pmode = BLKmode;
6904 else
6905 *pmode = mode;
6907 return exp;
6910 /* Return a tree of sizetype representing the size, in bytes, of the element
6911 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6913 tree
6914 array_ref_element_size (tree exp)
6916 tree aligned_size = TREE_OPERAND (exp, 3);
6917 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6918 location_t loc = EXPR_LOCATION (exp);
6920 /* If a size was specified in the ARRAY_REF, it's the size measured
6921 in alignment units of the element type. So multiply by that value. */
6922 if (aligned_size)
6924 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6925 sizetype from another type of the same width and signedness. */
6926 if (TREE_TYPE (aligned_size) != sizetype)
6927 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6928 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6929 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6932 /* Otherwise, take the size from that of the element type. Substitute
6933 any PLACEHOLDER_EXPR that we have. */
6934 else
6935 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6938 /* Return a tree representing the lower bound of the array mentioned in
6939 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6941 tree
6942 array_ref_low_bound (tree exp)
6944 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6946 /* If a lower bound is specified in EXP, use it. */
6947 if (TREE_OPERAND (exp, 2))
6948 return TREE_OPERAND (exp, 2);
6950 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6951 substituting for a PLACEHOLDER_EXPR as needed. */
6952 if (domain_type && TYPE_MIN_VALUE (domain_type))
6953 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6955 /* Otherwise, return a zero of the appropriate type. */
6956 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6959 /* Returns true if REF is an array reference to an array at the end of
6960 a structure. If this is the case, the array may be allocated larger
6961 than its upper bound implies. */
6963 bool
6964 array_at_struct_end_p (tree ref)
6966 if (TREE_CODE (ref) != ARRAY_REF
6967 && TREE_CODE (ref) != ARRAY_RANGE_REF)
6968 return false;
6970 while (handled_component_p (ref))
6972 /* If the reference chain contains a component reference to a
6973 non-union type and there follows another field the reference
6974 is not at the end of a structure. */
6975 if (TREE_CODE (ref) == COMPONENT_REF
6976 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
6978 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
6979 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
6980 nextf = DECL_CHAIN (nextf);
6981 if (nextf)
6982 return false;
6985 ref = TREE_OPERAND (ref, 0);
6988 /* If the reference is based on a declared entity, the size of the array
6989 is constrained by its given domain. */
6990 if (DECL_P (ref))
6991 return false;
6993 return true;
6996 /* Return a tree representing the upper bound of the array mentioned in
6997 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6999 tree
7000 array_ref_up_bound (tree exp)
7002 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
7004 /* If there is a domain type and it has an upper bound, use it, substituting
7005 for a PLACEHOLDER_EXPR as needed. */
7006 if (domain_type && TYPE_MAX_VALUE (domain_type))
7007 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
7009 /* Otherwise fail. */
7010 return NULL_TREE;
7013 /* Return a tree representing the offset, in bytes, of the field referenced
7014 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
7016 tree
7017 component_ref_field_offset (tree exp)
7019 tree aligned_offset = TREE_OPERAND (exp, 2);
7020 tree field = TREE_OPERAND (exp, 1);
7021 location_t loc = EXPR_LOCATION (exp);
7023 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
7024 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
7025 value. */
7026 if (aligned_offset)
7028 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
7029 sizetype from another type of the same width and signedness. */
7030 if (TREE_TYPE (aligned_offset) != sizetype)
7031 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
7032 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
7033 size_int (DECL_OFFSET_ALIGN (field)
7034 / BITS_PER_UNIT));
7037 /* Otherwise, take the offset from that of the field. Substitute
7038 any PLACEHOLDER_EXPR that we have. */
7039 else
7040 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
7043 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7045 static unsigned HOST_WIDE_INT
7046 target_align (const_tree target)
7048 /* We might have a chain of nested references with intermediate misaligning
7049 bitfields components, so need to recurse to find out. */
7051 unsigned HOST_WIDE_INT this_align, outer_align;
7053 switch (TREE_CODE (target))
7055 case BIT_FIELD_REF:
7056 return 1;
7058 case COMPONENT_REF:
7059 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7060 outer_align = target_align (TREE_OPERAND (target, 0));
7061 return MIN (this_align, outer_align);
7063 case ARRAY_REF:
7064 case ARRAY_RANGE_REF:
7065 this_align = TYPE_ALIGN (TREE_TYPE (target));
7066 outer_align = target_align (TREE_OPERAND (target, 0));
7067 return MIN (this_align, outer_align);
7069 CASE_CONVERT:
7070 case NON_LVALUE_EXPR:
7071 case VIEW_CONVERT_EXPR:
7072 this_align = TYPE_ALIGN (TREE_TYPE (target));
7073 outer_align = target_align (TREE_OPERAND (target, 0));
7074 return MAX (this_align, outer_align);
7076 default:
7077 return TYPE_ALIGN (TREE_TYPE (target));
7082 /* Given an rtx VALUE that may contain additions and multiplications, return
7083 an equivalent value that just refers to a register, memory, or constant.
7084 This is done by generating instructions to perform the arithmetic and
7085 returning a pseudo-register containing the value.
7087 The returned value may be a REG, SUBREG, MEM or constant. */
7090 force_operand (rtx value, rtx target)
7092 rtx op1, op2;
7093 /* Use subtarget as the target for operand 0 of a binary operation. */
7094 rtx subtarget = get_subtarget (target);
7095 enum rtx_code code = GET_CODE (value);
7097 /* Check for subreg applied to an expression produced by loop optimizer. */
7098 if (code == SUBREG
7099 && !REG_P (SUBREG_REG (value))
7100 && !MEM_P (SUBREG_REG (value)))
7102 value
7103 = simplify_gen_subreg (GET_MODE (value),
7104 force_reg (GET_MODE (SUBREG_REG (value)),
7105 force_operand (SUBREG_REG (value),
7106 NULL_RTX)),
7107 GET_MODE (SUBREG_REG (value)),
7108 SUBREG_BYTE (value));
7109 code = GET_CODE (value);
7112 /* Check for a PIC address load. */
7113 if ((code == PLUS || code == MINUS)
7114 && XEXP (value, 0) == pic_offset_table_rtx
7115 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7116 || GET_CODE (XEXP (value, 1)) == LABEL_REF
7117 || GET_CODE (XEXP (value, 1)) == CONST))
7119 if (!subtarget)
7120 subtarget = gen_reg_rtx (GET_MODE (value));
7121 emit_move_insn (subtarget, value);
7122 return subtarget;
7125 if (ARITHMETIC_P (value))
7127 op2 = XEXP (value, 1);
7128 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7129 subtarget = 0;
7130 if (code == MINUS && CONST_INT_P (op2))
7132 code = PLUS;
7133 op2 = negate_rtx (GET_MODE (value), op2);
7136 /* Check for an addition with OP2 a constant integer and our first
7137 operand a PLUS of a virtual register and something else. In that
7138 case, we want to emit the sum of the virtual register and the
7139 constant first and then add the other value. This allows virtual
7140 register instantiation to simply modify the constant rather than
7141 creating another one around this addition. */
7142 if (code == PLUS && CONST_INT_P (op2)
7143 && GET_CODE (XEXP (value, 0)) == PLUS
7144 && REG_P (XEXP (XEXP (value, 0), 0))
7145 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7146 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7148 rtx temp = expand_simple_binop (GET_MODE (value), code,
7149 XEXP (XEXP (value, 0), 0), op2,
7150 subtarget, 0, OPTAB_LIB_WIDEN);
7151 return expand_simple_binop (GET_MODE (value), code, temp,
7152 force_operand (XEXP (XEXP (value,
7153 0), 1), 0),
7154 target, 0, OPTAB_LIB_WIDEN);
7157 op1 = force_operand (XEXP (value, 0), subtarget);
7158 op2 = force_operand (op2, NULL_RTX);
7159 switch (code)
7161 case MULT:
7162 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7163 case DIV:
7164 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7165 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7166 target, 1, OPTAB_LIB_WIDEN);
7167 else
7168 return expand_divmod (0,
7169 FLOAT_MODE_P (GET_MODE (value))
7170 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7171 GET_MODE (value), op1, op2, target, 0);
7172 case MOD:
7173 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7174 target, 0);
7175 case UDIV:
7176 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7177 target, 1);
7178 case UMOD:
7179 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7180 target, 1);
7181 case ASHIFTRT:
7182 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7183 target, 0, OPTAB_LIB_WIDEN);
7184 default:
7185 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7186 target, 1, OPTAB_LIB_WIDEN);
7189 if (UNARY_P (value))
7191 if (!target)
7192 target = gen_reg_rtx (GET_MODE (value));
7193 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7194 switch (code)
7196 case ZERO_EXTEND:
7197 case SIGN_EXTEND:
7198 case TRUNCATE:
7199 case FLOAT_EXTEND:
7200 case FLOAT_TRUNCATE:
7201 convert_move (target, op1, code == ZERO_EXTEND);
7202 return target;
7204 case FIX:
7205 case UNSIGNED_FIX:
7206 expand_fix (target, op1, code == UNSIGNED_FIX);
7207 return target;
7209 case FLOAT:
7210 case UNSIGNED_FLOAT:
7211 expand_float (target, op1, code == UNSIGNED_FLOAT);
7212 return target;
7214 default:
7215 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7219 #ifdef INSN_SCHEDULING
7220 /* On machines that have insn scheduling, we want all memory reference to be
7221 explicit, so we need to deal with such paradoxical SUBREGs. */
7222 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7223 value
7224 = simplify_gen_subreg (GET_MODE (value),
7225 force_reg (GET_MODE (SUBREG_REG (value)),
7226 force_operand (SUBREG_REG (value),
7227 NULL_RTX)),
7228 GET_MODE (SUBREG_REG (value)),
7229 SUBREG_BYTE (value));
7230 #endif
7232 return value;
7235 /* Subroutine of expand_expr: return nonzero iff there is no way that
7236 EXP can reference X, which is being modified. TOP_P is nonzero if this
7237 call is going to be used to determine whether we need a temporary
7238 for EXP, as opposed to a recursive call to this function.
7240 It is always safe for this routine to return zero since it merely
7241 searches for optimization opportunities. */
7244 safe_from_p (const_rtx x, tree exp, int top_p)
7246 rtx exp_rtl = 0;
7247 int i, nops;
7249 if (x == 0
7250 /* If EXP has varying size, we MUST use a target since we currently
7251 have no way of allocating temporaries of variable size
7252 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7253 So we assume here that something at a higher level has prevented a
7254 clash. This is somewhat bogus, but the best we can do. Only
7255 do this when X is BLKmode and when we are at the top level. */
7256 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7257 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7258 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7259 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7260 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7261 != INTEGER_CST)
7262 && GET_MODE (x) == BLKmode)
7263 /* If X is in the outgoing argument area, it is always safe. */
7264 || (MEM_P (x)
7265 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7266 || (GET_CODE (XEXP (x, 0)) == PLUS
7267 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7268 return 1;
7270 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7271 find the underlying pseudo. */
7272 if (GET_CODE (x) == SUBREG)
7274 x = SUBREG_REG (x);
7275 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7276 return 0;
7279 /* Now look at our tree code and possibly recurse. */
7280 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7282 case tcc_declaration:
7283 exp_rtl = DECL_RTL_IF_SET (exp);
7284 break;
7286 case tcc_constant:
7287 return 1;
7289 case tcc_exceptional:
7290 if (TREE_CODE (exp) == TREE_LIST)
7292 while (1)
7294 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7295 return 0;
7296 exp = TREE_CHAIN (exp);
7297 if (!exp)
7298 return 1;
7299 if (TREE_CODE (exp) != TREE_LIST)
7300 return safe_from_p (x, exp, 0);
7303 else if (TREE_CODE (exp) == CONSTRUCTOR)
7305 constructor_elt *ce;
7306 unsigned HOST_WIDE_INT idx;
7308 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7309 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7310 || !safe_from_p (x, ce->value, 0))
7311 return 0;
7312 return 1;
7314 else if (TREE_CODE (exp) == ERROR_MARK)
7315 return 1; /* An already-visited SAVE_EXPR? */
7316 else
7317 return 0;
7319 case tcc_statement:
7320 /* The only case we look at here is the DECL_INITIAL inside a
7321 DECL_EXPR. */
7322 return (TREE_CODE (exp) != DECL_EXPR
7323 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7324 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7325 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7327 case tcc_binary:
7328 case tcc_comparison:
7329 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7330 return 0;
7331 /* Fall through. */
7333 case tcc_unary:
7334 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7336 case tcc_expression:
7337 case tcc_reference:
7338 case tcc_vl_exp:
7339 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7340 the expression. If it is set, we conflict iff we are that rtx or
7341 both are in memory. Otherwise, we check all operands of the
7342 expression recursively. */
7344 switch (TREE_CODE (exp))
7346 case ADDR_EXPR:
7347 /* If the operand is static or we are static, we can't conflict.
7348 Likewise if we don't conflict with the operand at all. */
7349 if (staticp (TREE_OPERAND (exp, 0))
7350 || TREE_STATIC (exp)
7351 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7352 return 1;
7354 /* Otherwise, the only way this can conflict is if we are taking
7355 the address of a DECL a that address if part of X, which is
7356 very rare. */
7357 exp = TREE_OPERAND (exp, 0);
7358 if (DECL_P (exp))
7360 if (!DECL_RTL_SET_P (exp)
7361 || !MEM_P (DECL_RTL (exp)))
7362 return 0;
7363 else
7364 exp_rtl = XEXP (DECL_RTL (exp), 0);
7366 break;
7368 case MEM_REF:
7369 if (MEM_P (x)
7370 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7371 get_alias_set (exp)))
7372 return 0;
7373 break;
7375 case CALL_EXPR:
7376 /* Assume that the call will clobber all hard registers and
7377 all of memory. */
7378 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7379 || MEM_P (x))
7380 return 0;
7381 break;
7383 case WITH_CLEANUP_EXPR:
7384 case CLEANUP_POINT_EXPR:
7385 /* Lowered by gimplify.c. */
7386 gcc_unreachable ();
7388 case SAVE_EXPR:
7389 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7391 default:
7392 break;
7395 /* If we have an rtx, we do not need to scan our operands. */
7396 if (exp_rtl)
7397 break;
7399 nops = TREE_OPERAND_LENGTH (exp);
7400 for (i = 0; i < nops; i++)
7401 if (TREE_OPERAND (exp, i) != 0
7402 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7403 return 0;
7405 break;
7407 case tcc_type:
7408 /* Should never get a type here. */
7409 gcc_unreachable ();
7412 /* If we have an rtl, find any enclosed object. Then see if we conflict
7413 with it. */
7414 if (exp_rtl)
7416 if (GET_CODE (exp_rtl) == SUBREG)
7418 exp_rtl = SUBREG_REG (exp_rtl);
7419 if (REG_P (exp_rtl)
7420 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7421 return 0;
7424 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7425 are memory and they conflict. */
7426 return ! (rtx_equal_p (x, exp_rtl)
7427 || (MEM_P (x) && MEM_P (exp_rtl)
7428 && true_dependence (exp_rtl, VOIDmode, x)));
7431 /* If we reach here, it is safe. */
7432 return 1;
7436 /* Return the highest power of two that EXP is known to be a multiple of.
7437 This is used in updating alignment of MEMs in array references. */
7439 unsigned HOST_WIDE_INT
7440 highest_pow2_factor (const_tree exp)
7442 unsigned HOST_WIDE_INT ret;
7443 int trailing_zeros = tree_ctz (exp);
7444 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7445 return BIGGEST_ALIGNMENT;
7446 ret = (unsigned HOST_WIDE_INT) 1 << trailing_zeros;
7447 if (ret > BIGGEST_ALIGNMENT)
7448 return BIGGEST_ALIGNMENT;
7449 return ret;
7452 /* Similar, except that the alignment requirements of TARGET are
7453 taken into account. Assume it is at least as aligned as its
7454 type, unless it is a COMPONENT_REF in which case the layout of
7455 the structure gives the alignment. */
7457 static unsigned HOST_WIDE_INT
7458 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7460 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7461 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7463 return MAX (factor, talign);
7466 #ifdef HAVE_conditional_move
7467 /* Convert the tree comparison code TCODE to the rtl one where the
7468 signedness is UNSIGNEDP. */
7470 static enum rtx_code
7471 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7473 enum rtx_code code;
7474 switch (tcode)
7476 case EQ_EXPR:
7477 code = EQ;
7478 break;
7479 case NE_EXPR:
7480 code = NE;
7481 break;
7482 case LT_EXPR:
7483 code = unsignedp ? LTU : LT;
7484 break;
7485 case LE_EXPR:
7486 code = unsignedp ? LEU : LE;
7487 break;
7488 case GT_EXPR:
7489 code = unsignedp ? GTU : GT;
7490 break;
7491 case GE_EXPR:
7492 code = unsignedp ? GEU : GE;
7493 break;
7494 case UNORDERED_EXPR:
7495 code = UNORDERED;
7496 break;
7497 case ORDERED_EXPR:
7498 code = ORDERED;
7499 break;
7500 case UNLT_EXPR:
7501 code = UNLT;
7502 break;
7503 case UNLE_EXPR:
7504 code = UNLE;
7505 break;
7506 case UNGT_EXPR:
7507 code = UNGT;
7508 break;
7509 case UNGE_EXPR:
7510 code = UNGE;
7511 break;
7512 case UNEQ_EXPR:
7513 code = UNEQ;
7514 break;
7515 case LTGT_EXPR:
7516 code = LTGT;
7517 break;
7519 default:
7520 gcc_unreachable ();
7522 return code;
7524 #endif
7526 /* Subroutine of expand_expr. Expand the two operands of a binary
7527 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7528 The value may be stored in TARGET if TARGET is nonzero. The
7529 MODIFIER argument is as documented by expand_expr. */
7531 static void
7532 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7533 enum expand_modifier modifier)
7535 if (! safe_from_p (target, exp1, 1))
7536 target = 0;
7537 if (operand_equal_p (exp0, exp1, 0))
7539 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7540 *op1 = copy_rtx (*op0);
7542 else
7544 /* If we need to preserve evaluation order, copy exp0 into its own
7545 temporary variable so that it can't be clobbered by exp1. */
7546 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7547 exp0 = save_expr (exp0);
7548 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7549 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7554 /* Return a MEM that contains constant EXP. DEFER is as for
7555 output_constant_def and MODIFIER is as for expand_expr. */
7557 static rtx
7558 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7560 rtx mem;
7562 mem = output_constant_def (exp, defer);
7563 if (modifier != EXPAND_INITIALIZER)
7564 mem = use_anchored_address (mem);
7565 return mem;
7568 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7569 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7571 static rtx
7572 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
7573 enum expand_modifier modifier, addr_space_t as)
7575 rtx result, subtarget;
7576 tree inner, offset;
7577 HOST_WIDE_INT bitsize, bitpos;
7578 int volatilep, unsignedp;
7579 enum machine_mode mode1;
7581 /* If we are taking the address of a constant and are at the top level,
7582 we have to use output_constant_def since we can't call force_const_mem
7583 at top level. */
7584 /* ??? This should be considered a front-end bug. We should not be
7585 generating ADDR_EXPR of something that isn't an LVALUE. The only
7586 exception here is STRING_CST. */
7587 if (CONSTANT_CLASS_P (exp))
7589 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7590 if (modifier < EXPAND_SUM)
7591 result = force_operand (result, target);
7592 return result;
7595 /* Everything must be something allowed by is_gimple_addressable. */
7596 switch (TREE_CODE (exp))
7598 case INDIRECT_REF:
7599 /* This case will happen via recursion for &a->b. */
7600 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7602 case MEM_REF:
7604 tree tem = TREE_OPERAND (exp, 0);
7605 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7606 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7607 return expand_expr (tem, target, tmode, modifier);
7610 case CONST_DECL:
7611 /* Expand the initializer like constants above. */
7612 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7613 0, modifier), 0);
7614 if (modifier < EXPAND_SUM)
7615 result = force_operand (result, target);
7616 return result;
7618 case REALPART_EXPR:
7619 /* The real part of the complex number is always first, therefore
7620 the address is the same as the address of the parent object. */
7621 offset = 0;
7622 bitpos = 0;
7623 inner = TREE_OPERAND (exp, 0);
7624 break;
7626 case IMAGPART_EXPR:
7627 /* The imaginary part of the complex number is always second.
7628 The expression is therefore always offset by the size of the
7629 scalar type. */
7630 offset = 0;
7631 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7632 inner = TREE_OPERAND (exp, 0);
7633 break;
7635 case COMPOUND_LITERAL_EXPR:
7636 /* Allow COMPOUND_LITERAL_EXPR in initializers, if e.g.
7637 rtl_for_decl_init is called on DECL_INITIAL with
7638 COMPOUNT_LITERAL_EXPRs in it, they aren't gimplified. */
7639 if (modifier == EXPAND_INITIALIZER
7640 && COMPOUND_LITERAL_EXPR_DECL (exp))
7641 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7642 target, tmode, modifier, as);
7643 /* FALLTHRU */
7644 default:
7645 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7646 expand_expr, as that can have various side effects; LABEL_DECLs for
7647 example, may not have their DECL_RTL set yet. Expand the rtl of
7648 CONSTRUCTORs too, which should yield a memory reference for the
7649 constructor's contents. Assume language specific tree nodes can
7650 be expanded in some interesting way. */
7651 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7652 if (DECL_P (exp)
7653 || TREE_CODE (exp) == CONSTRUCTOR
7654 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7656 result = expand_expr (exp, target, tmode,
7657 modifier == EXPAND_INITIALIZER
7658 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7660 /* If the DECL isn't in memory, then the DECL wasn't properly
7661 marked TREE_ADDRESSABLE, which will be either a front-end
7662 or a tree optimizer bug. */
7664 if (TREE_ADDRESSABLE (exp)
7665 && ! MEM_P (result)
7666 && ! targetm.calls.allocate_stack_slots_for_args ())
7668 error ("local frame unavailable (naked function?)");
7669 return result;
7671 else
7672 gcc_assert (MEM_P (result));
7673 result = XEXP (result, 0);
7675 /* ??? Is this needed anymore? */
7676 if (DECL_P (exp))
7677 TREE_USED (exp) = 1;
7679 if (modifier != EXPAND_INITIALIZER
7680 && modifier != EXPAND_CONST_ADDRESS
7681 && modifier != EXPAND_SUM)
7682 result = force_operand (result, target);
7683 return result;
7686 /* Pass FALSE as the last argument to get_inner_reference although
7687 we are expanding to RTL. The rationale is that we know how to
7688 handle "aligning nodes" here: we can just bypass them because
7689 they won't change the final object whose address will be returned
7690 (they actually exist only for that purpose). */
7691 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7692 &mode1, &unsignedp, &volatilep, false);
7693 break;
7696 /* We must have made progress. */
7697 gcc_assert (inner != exp);
7699 subtarget = offset || bitpos ? NULL_RTX : target;
7700 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7701 inner alignment, force the inner to be sufficiently aligned. */
7702 if (CONSTANT_CLASS_P (inner)
7703 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7705 inner = copy_node (inner);
7706 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7707 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7708 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7710 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7712 if (offset)
7714 rtx tmp;
7716 if (modifier != EXPAND_NORMAL)
7717 result = force_operand (result, NULL);
7718 tmp = expand_expr (offset, NULL_RTX, tmode,
7719 modifier == EXPAND_INITIALIZER
7720 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7722 /* expand_expr is allowed to return an object in a mode other
7723 than TMODE. If it did, we need to convert. */
7724 if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
7725 tmp = convert_modes (tmode, GET_MODE (tmp),
7726 tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
7727 result = convert_memory_address_addr_space (tmode, result, as);
7728 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7730 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7731 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7732 else
7734 subtarget = bitpos ? NULL_RTX : target;
7735 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7736 1, OPTAB_LIB_WIDEN);
7740 if (bitpos)
7742 /* Someone beforehand should have rejected taking the address
7743 of such an object. */
7744 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7746 result = convert_memory_address_addr_space (tmode, result, as);
7747 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7748 if (modifier < EXPAND_SUM)
7749 result = force_operand (result, target);
7752 return result;
7755 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7756 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7758 static rtx
7759 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
7760 enum expand_modifier modifier)
7762 addr_space_t as = ADDR_SPACE_GENERIC;
7763 enum machine_mode address_mode = Pmode;
7764 enum machine_mode pointer_mode = ptr_mode;
7765 enum machine_mode rmode;
7766 rtx result;
7768 /* Target mode of VOIDmode says "whatever's natural". */
7769 if (tmode == VOIDmode)
7770 tmode = TYPE_MODE (TREE_TYPE (exp));
7772 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7774 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7775 address_mode = targetm.addr_space.address_mode (as);
7776 pointer_mode = targetm.addr_space.pointer_mode (as);
7779 /* We can get called with some Weird Things if the user does silliness
7780 like "(short) &a". In that case, convert_memory_address won't do
7781 the right thing, so ignore the given target mode. */
7782 if (tmode != address_mode && tmode != pointer_mode)
7783 tmode = address_mode;
7785 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7786 tmode, modifier, as);
7788 /* Despite expand_expr claims concerning ignoring TMODE when not
7789 strictly convenient, stuff breaks if we don't honor it. Note
7790 that combined with the above, we only do this for pointer modes. */
7791 rmode = GET_MODE (result);
7792 if (rmode == VOIDmode)
7793 rmode = tmode;
7794 if (rmode != tmode)
7795 result = convert_memory_address_addr_space (tmode, result, as);
7797 return result;
7800 /* Generate code for computing CONSTRUCTOR EXP.
7801 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7802 is TRUE, instead of creating a temporary variable in memory
7803 NULL is returned and the caller needs to handle it differently. */
7805 static rtx
7806 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7807 bool avoid_temp_mem)
7809 tree type = TREE_TYPE (exp);
7810 enum machine_mode mode = TYPE_MODE (type);
7812 /* Try to avoid creating a temporary at all. This is possible
7813 if all of the initializer is zero.
7814 FIXME: try to handle all [0..255] initializers we can handle
7815 with memset. */
7816 if (TREE_STATIC (exp)
7817 && !TREE_ADDRESSABLE (exp)
7818 && target != 0 && mode == BLKmode
7819 && all_zeros_p (exp))
7821 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7822 return target;
7825 /* All elts simple constants => refer to a constant in memory. But
7826 if this is a non-BLKmode mode, let it store a field at a time
7827 since that should make a CONST_INT, CONST_WIDE_INT or
7828 CONST_DOUBLE when we fold. Likewise, if we have a target we can
7829 use, it is best to store directly into the target unless the type
7830 is large enough that memcpy will be used. If we are making an
7831 initializer and all operands are constant, put it in memory as
7832 well.
7834 FIXME: Avoid trying to fill vector constructors piece-meal.
7835 Output them with output_constant_def below unless we're sure
7836 they're zeros. This should go away when vector initializers
7837 are treated like VECTOR_CST instead of arrays. */
7838 if ((TREE_STATIC (exp)
7839 && ((mode == BLKmode
7840 && ! (target != 0 && safe_from_p (target, exp, 1)))
7841 || TREE_ADDRESSABLE (exp)
7842 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
7843 && (! MOVE_BY_PIECES_P
7844 (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
7845 TYPE_ALIGN (type)))
7846 && ! mostly_zeros_p (exp))))
7847 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7848 && TREE_CONSTANT (exp)))
7850 rtx constructor;
7852 if (avoid_temp_mem)
7853 return NULL_RTX;
7855 constructor = expand_expr_constant (exp, 1, modifier);
7857 if (modifier != EXPAND_CONST_ADDRESS
7858 && modifier != EXPAND_INITIALIZER
7859 && modifier != EXPAND_SUM)
7860 constructor = validize_mem (constructor);
7862 return constructor;
7865 /* Handle calls that pass values in multiple non-contiguous
7866 locations. The Irix 6 ABI has examples of this. */
7867 if (target == 0 || ! safe_from_p (target, exp, 1)
7868 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7870 if (avoid_temp_mem)
7871 return NULL_RTX;
7873 target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
7876 store_constructor (exp, target, 0, int_expr_size (exp));
7877 return target;
7881 /* expand_expr: generate code for computing expression EXP.
7882 An rtx for the computed value is returned. The value is never null.
7883 In the case of a void EXP, const0_rtx is returned.
7885 The value may be stored in TARGET if TARGET is nonzero.
7886 TARGET is just a suggestion; callers must assume that
7887 the rtx returned may not be the same as TARGET.
7889 If TARGET is CONST0_RTX, it means that the value will be ignored.
7891 If TMODE is not VOIDmode, it suggests generating the
7892 result in mode TMODE. But this is done only when convenient.
7893 Otherwise, TMODE is ignored and the value generated in its natural mode.
7894 TMODE is just a suggestion; callers must assume that
7895 the rtx returned may not have mode TMODE.
7897 Note that TARGET may have neither TMODE nor MODE. In that case, it
7898 probably will not be used.
7900 If MODIFIER is EXPAND_SUM then when EXP is an addition
7901 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7902 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7903 products as above, or REG or MEM, or constant.
7904 Ordinarily in such cases we would output mul or add instructions
7905 and then return a pseudo reg containing the sum.
7907 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7908 it also marks a label as absolutely required (it can't be dead).
7909 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7910 This is used for outputting expressions used in initializers.
7912 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7913 with a constant address even if that address is not normally legitimate.
7914 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7916 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7917 a call parameter. Such targets require special care as we haven't yet
7918 marked TARGET so that it's safe from being trashed by libcalls. We
7919 don't want to use TARGET for anything but the final result;
7920 Intermediate values must go elsewhere. Additionally, calls to
7921 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7923 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7924 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7925 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7926 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7927 recursively.
7929 If INNER_REFERENCE_P is true, we are expanding an inner reference.
7930 In this case, we don't adjust a returned MEM rtx that wouldn't be
7931 sufficiently aligned for its mode; instead, it's up to the caller
7932 to deal with it afterwards. This is used to make sure that unaligned
7933 base objects for which out-of-bounds accesses are supported, for
7934 example record types with trailing arrays, aren't realigned behind
7935 the back of the caller.
7936 The normal operating mode is to pass FALSE for this parameter. */
7939 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7940 enum expand_modifier modifier, rtx *alt_rtl,
7941 bool inner_reference_p)
7943 rtx ret;
7945 /* Handle ERROR_MARK before anybody tries to access its type. */
7946 if (TREE_CODE (exp) == ERROR_MARK
7947 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7949 ret = CONST0_RTX (tmode);
7950 return ret ? ret : const0_rtx;
7953 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
7954 inner_reference_p);
7955 return ret;
7958 /* Try to expand the conditional expression which is represented by
7959 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
7960 return the rtl reg which repsents the result. Otherwise return
7961 NULL_RTL. */
7963 static rtx
7964 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
7965 tree treeop1 ATTRIBUTE_UNUSED,
7966 tree treeop2 ATTRIBUTE_UNUSED)
7968 #ifdef HAVE_conditional_move
7969 rtx insn;
7970 rtx op00, op01, op1, op2;
7971 enum rtx_code comparison_code;
7972 enum machine_mode comparison_mode;
7973 gimple srcstmt;
7974 rtx temp;
7975 tree type = TREE_TYPE (treeop1);
7976 int unsignedp = TYPE_UNSIGNED (type);
7977 enum machine_mode mode = TYPE_MODE (type);
7978 enum machine_mode orig_mode = mode;
7980 /* If we cannot do a conditional move on the mode, try doing it
7981 with the promoted mode. */
7982 if (!can_conditionally_move_p (mode))
7984 mode = promote_mode (type, mode, &unsignedp);
7985 if (!can_conditionally_move_p (mode))
7986 return NULL_RTX;
7987 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
7989 else
7990 temp = assign_temp (type, 0, 1);
7992 start_sequence ();
7993 expand_operands (treeop1, treeop2,
7994 temp, &op1, &op2, EXPAND_NORMAL);
7996 if (TREE_CODE (treeop0) == SSA_NAME
7997 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
7999 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
8000 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
8001 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
8002 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
8003 comparison_mode = TYPE_MODE (type);
8004 unsignedp = TYPE_UNSIGNED (type);
8005 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8007 else if (TREE_CODE_CLASS (TREE_CODE (treeop0)) == tcc_comparison)
8009 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
8010 enum tree_code cmpcode = TREE_CODE (treeop0);
8011 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
8012 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
8013 unsignedp = TYPE_UNSIGNED (type);
8014 comparison_mode = TYPE_MODE (type);
8015 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8017 else
8019 op00 = expand_normal (treeop0);
8020 op01 = const0_rtx;
8021 comparison_code = NE;
8022 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
8025 if (GET_MODE (op1) != mode)
8026 op1 = gen_lowpart (mode, op1);
8028 if (GET_MODE (op2) != mode)
8029 op2 = gen_lowpart (mode, op2);
8031 /* Try to emit the conditional move. */
8032 insn = emit_conditional_move (temp, comparison_code,
8033 op00, op01, comparison_mode,
8034 op1, op2, mode,
8035 unsignedp);
8037 /* If we could do the conditional move, emit the sequence,
8038 and return. */
8039 if (insn)
8041 rtx_insn *seq = get_insns ();
8042 end_sequence ();
8043 emit_insn (seq);
8044 return convert_modes (orig_mode, mode, temp, 0);
8047 /* Otherwise discard the sequence and fall back to code with
8048 branches. */
8049 end_sequence ();
8050 #endif
8051 return NULL_RTX;
8055 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
8056 enum expand_modifier modifier)
8058 rtx op0, op1, op2, temp;
8059 tree type;
8060 int unsignedp;
8061 enum machine_mode mode;
8062 enum tree_code code = ops->code;
8063 optab this_optab;
8064 rtx subtarget, original_target;
8065 int ignore;
8066 bool reduce_bit_field;
8067 location_t loc = ops->location;
8068 tree treeop0, treeop1, treeop2;
8069 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8070 ? reduce_to_bit_field_precision ((expr), \
8071 target, \
8072 type) \
8073 : (expr))
8075 type = ops->type;
8076 mode = TYPE_MODE (type);
8077 unsignedp = TYPE_UNSIGNED (type);
8079 treeop0 = ops->op0;
8080 treeop1 = ops->op1;
8081 treeop2 = ops->op2;
8083 /* We should be called only on simple (binary or unary) expressions,
8084 exactly those that are valid in gimple expressions that aren't
8085 GIMPLE_SINGLE_RHS (or invalid). */
8086 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8087 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8088 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8090 ignore = (target == const0_rtx
8091 || ((CONVERT_EXPR_CODE_P (code)
8092 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8093 && TREE_CODE (type) == VOID_TYPE));
8095 /* We should be called only if we need the result. */
8096 gcc_assert (!ignore);
8098 /* An operation in what may be a bit-field type needs the
8099 result to be reduced to the precision of the bit-field type,
8100 which is narrower than that of the type's mode. */
8101 reduce_bit_field = (INTEGRAL_TYPE_P (type)
8102 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8104 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8105 target = 0;
8107 /* Use subtarget as the target for operand 0 of a binary operation. */
8108 subtarget = get_subtarget (target);
8109 original_target = target;
8111 switch (code)
8113 case NON_LVALUE_EXPR:
8114 case PAREN_EXPR:
8115 CASE_CONVERT:
8116 if (treeop0 == error_mark_node)
8117 return const0_rtx;
8119 if (TREE_CODE (type) == UNION_TYPE)
8121 tree valtype = TREE_TYPE (treeop0);
8123 /* If both input and output are BLKmode, this conversion isn't doing
8124 anything except possibly changing memory attribute. */
8125 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8127 rtx result = expand_expr (treeop0, target, tmode,
8128 modifier);
8130 result = copy_rtx (result);
8131 set_mem_attributes (result, type, 0);
8132 return result;
8135 if (target == 0)
8137 if (TYPE_MODE (type) != BLKmode)
8138 target = gen_reg_rtx (TYPE_MODE (type));
8139 else
8140 target = assign_temp (type, 1, 1);
8143 if (MEM_P (target))
8144 /* Store data into beginning of memory target. */
8145 store_expr (treeop0,
8146 adjust_address (target, TYPE_MODE (valtype), 0),
8147 modifier == EXPAND_STACK_PARM,
8148 false);
8150 else
8152 gcc_assert (REG_P (target));
8154 /* Store this field into a union of the proper type. */
8155 store_field (target,
8156 MIN ((int_size_in_bytes (TREE_TYPE
8157 (treeop0))
8158 * BITS_PER_UNIT),
8159 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8160 0, 0, 0, TYPE_MODE (valtype), treeop0, 0, false);
8163 /* Return the entire union. */
8164 return target;
8167 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8169 op0 = expand_expr (treeop0, target, VOIDmode,
8170 modifier);
8172 /* If the signedness of the conversion differs and OP0 is
8173 a promoted SUBREG, clear that indication since we now
8174 have to do the proper extension. */
8175 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8176 && GET_CODE (op0) == SUBREG)
8177 SUBREG_PROMOTED_VAR_P (op0) = 0;
8179 return REDUCE_BIT_FIELD (op0);
8182 op0 = expand_expr (treeop0, NULL_RTX, mode,
8183 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8184 if (GET_MODE (op0) == mode)
8187 /* If OP0 is a constant, just convert it into the proper mode. */
8188 else if (CONSTANT_P (op0))
8190 tree inner_type = TREE_TYPE (treeop0);
8191 enum machine_mode inner_mode = GET_MODE (op0);
8193 if (inner_mode == VOIDmode)
8194 inner_mode = TYPE_MODE (inner_type);
8196 if (modifier == EXPAND_INITIALIZER)
8197 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8198 subreg_lowpart_offset (mode,
8199 inner_mode));
8200 else
8201 op0= convert_modes (mode, inner_mode, op0,
8202 TYPE_UNSIGNED (inner_type));
8205 else if (modifier == EXPAND_INITIALIZER)
8206 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8208 else if (target == 0)
8209 op0 = convert_to_mode (mode, op0,
8210 TYPE_UNSIGNED (TREE_TYPE
8211 (treeop0)));
8212 else
8214 convert_move (target, op0,
8215 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8216 op0 = target;
8219 return REDUCE_BIT_FIELD (op0);
8221 case ADDR_SPACE_CONVERT_EXPR:
8223 tree treeop0_type = TREE_TYPE (treeop0);
8224 addr_space_t as_to;
8225 addr_space_t as_from;
8227 gcc_assert (POINTER_TYPE_P (type));
8228 gcc_assert (POINTER_TYPE_P (treeop0_type));
8230 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8231 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8233 /* Conversions between pointers to the same address space should
8234 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8235 gcc_assert (as_to != as_from);
8237 /* Ask target code to handle conversion between pointers
8238 to overlapping address spaces. */
8239 if (targetm.addr_space.subset_p (as_to, as_from)
8240 || targetm.addr_space.subset_p (as_from, as_to))
8242 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8243 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8244 gcc_assert (op0);
8245 return op0;
8248 /* For disjoint address spaces, converting anything but
8249 a null pointer invokes undefined behaviour. We simply
8250 always return a null pointer here. */
8251 return CONST0_RTX (mode);
8254 case POINTER_PLUS_EXPR:
8255 /* Even though the sizetype mode and the pointer's mode can be different
8256 expand is able to handle this correctly and get the correct result out
8257 of the PLUS_EXPR code. */
8258 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8259 if sizetype precision is smaller than pointer precision. */
8260 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8261 treeop1 = fold_convert_loc (loc, type,
8262 fold_convert_loc (loc, ssizetype,
8263 treeop1));
8264 /* If sizetype precision is larger than pointer precision, truncate the
8265 offset to have matching modes. */
8266 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8267 treeop1 = fold_convert_loc (loc, type, treeop1);
8269 case PLUS_EXPR:
8270 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8271 something else, make sure we add the register to the constant and
8272 then to the other thing. This case can occur during strength
8273 reduction and doing it this way will produce better code if the
8274 frame pointer or argument pointer is eliminated.
8276 fold-const.c will ensure that the constant is always in the inner
8277 PLUS_EXPR, so the only case we need to do anything about is if
8278 sp, ap, or fp is our second argument, in which case we must swap
8279 the innermost first argument and our second argument. */
8281 if (TREE_CODE (treeop0) == PLUS_EXPR
8282 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8283 && TREE_CODE (treeop1) == VAR_DECL
8284 && (DECL_RTL (treeop1) == frame_pointer_rtx
8285 || DECL_RTL (treeop1) == stack_pointer_rtx
8286 || DECL_RTL (treeop1) == arg_pointer_rtx))
8288 gcc_unreachable ();
8291 /* If the result is to be ptr_mode and we are adding an integer to
8292 something, we might be forming a constant. So try to use
8293 plus_constant. If it produces a sum and we can't accept it,
8294 use force_operand. This allows P = &ARR[const] to generate
8295 efficient code on machines where a SYMBOL_REF is not a valid
8296 address.
8298 If this is an EXPAND_SUM call, always return the sum. */
8299 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8300 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8302 if (modifier == EXPAND_STACK_PARM)
8303 target = 0;
8304 if (TREE_CODE (treeop0) == INTEGER_CST
8305 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8306 && TREE_CONSTANT (treeop1))
8308 rtx constant_part;
8309 HOST_WIDE_INT wc;
8310 enum machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
8312 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8313 EXPAND_SUM);
8314 /* Use wi::shwi to ensure that the constant is
8315 truncated according to the mode of OP1, then sign extended
8316 to a HOST_WIDE_INT. Using the constant directly can result
8317 in non-canonical RTL in a 64x32 cross compile. */
8318 wc = TREE_INT_CST_LOW (treeop0);
8319 constant_part =
8320 immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8321 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8322 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8323 op1 = force_operand (op1, target);
8324 return REDUCE_BIT_FIELD (op1);
8327 else if (TREE_CODE (treeop1) == INTEGER_CST
8328 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8329 && TREE_CONSTANT (treeop0))
8331 rtx constant_part;
8332 HOST_WIDE_INT wc;
8333 enum machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
8335 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8336 (modifier == EXPAND_INITIALIZER
8337 ? EXPAND_INITIALIZER : EXPAND_SUM));
8338 if (! CONSTANT_P (op0))
8340 op1 = expand_expr (treeop1, NULL_RTX,
8341 VOIDmode, modifier);
8342 /* Return a PLUS if modifier says it's OK. */
8343 if (modifier == EXPAND_SUM
8344 || modifier == EXPAND_INITIALIZER)
8345 return simplify_gen_binary (PLUS, mode, op0, op1);
8346 goto binop2;
8348 /* Use wi::shwi to ensure that the constant is
8349 truncated according to the mode of OP1, then sign extended
8350 to a HOST_WIDE_INT. Using the constant directly can result
8351 in non-canonical RTL in a 64x32 cross compile. */
8352 wc = TREE_INT_CST_LOW (treeop1);
8353 constant_part
8354 = immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8355 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8356 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8357 op0 = force_operand (op0, target);
8358 return REDUCE_BIT_FIELD (op0);
8362 /* Use TER to expand pointer addition of a negated value
8363 as pointer subtraction. */
8364 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8365 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8366 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8367 && TREE_CODE (treeop1) == SSA_NAME
8368 && TYPE_MODE (TREE_TYPE (treeop0))
8369 == TYPE_MODE (TREE_TYPE (treeop1)))
8371 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8372 if (def)
8374 treeop1 = gimple_assign_rhs1 (def);
8375 code = MINUS_EXPR;
8376 goto do_minus;
8380 /* No sense saving up arithmetic to be done
8381 if it's all in the wrong mode to form part of an address.
8382 And force_operand won't know whether to sign-extend or
8383 zero-extend. */
8384 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8385 || mode != ptr_mode)
8387 expand_operands (treeop0, treeop1,
8388 subtarget, &op0, &op1, EXPAND_NORMAL);
8389 if (op0 == const0_rtx)
8390 return op1;
8391 if (op1 == const0_rtx)
8392 return op0;
8393 goto binop2;
8396 expand_operands (treeop0, treeop1,
8397 subtarget, &op0, &op1, modifier);
8398 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8400 case MINUS_EXPR:
8401 do_minus:
8402 /* For initializers, we are allowed to return a MINUS of two
8403 symbolic constants. Here we handle all cases when both operands
8404 are constant. */
8405 /* Handle difference of two symbolic constants,
8406 for the sake of an initializer. */
8407 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8408 && really_constant_p (treeop0)
8409 && really_constant_p (treeop1))
8411 expand_operands (treeop0, treeop1,
8412 NULL_RTX, &op0, &op1, modifier);
8414 /* If the last operand is a CONST_INT, use plus_constant of
8415 the negated constant. Else make the MINUS. */
8416 if (CONST_INT_P (op1))
8417 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8418 -INTVAL (op1)));
8419 else
8420 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8423 /* No sense saving up arithmetic to be done
8424 if it's all in the wrong mode to form part of an address.
8425 And force_operand won't know whether to sign-extend or
8426 zero-extend. */
8427 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8428 || mode != ptr_mode)
8429 goto binop;
8431 expand_operands (treeop0, treeop1,
8432 subtarget, &op0, &op1, modifier);
8434 /* Convert A - const to A + (-const). */
8435 if (CONST_INT_P (op1))
8437 op1 = negate_rtx (mode, op1);
8438 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8441 goto binop2;
8443 case WIDEN_MULT_PLUS_EXPR:
8444 case WIDEN_MULT_MINUS_EXPR:
8445 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8446 op2 = expand_normal (treeop2);
8447 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8448 target, unsignedp);
8449 return target;
8451 case WIDEN_MULT_EXPR:
8452 /* If first operand is constant, swap them.
8453 Thus the following special case checks need only
8454 check the second operand. */
8455 if (TREE_CODE (treeop0) == INTEGER_CST)
8457 tree t1 = treeop0;
8458 treeop0 = treeop1;
8459 treeop1 = t1;
8462 /* First, check if we have a multiplication of one signed and one
8463 unsigned operand. */
8464 if (TREE_CODE (treeop1) != INTEGER_CST
8465 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8466 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8468 enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8469 this_optab = usmul_widen_optab;
8470 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8471 != CODE_FOR_nothing)
8473 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8474 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8475 EXPAND_NORMAL);
8476 else
8477 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8478 EXPAND_NORMAL);
8479 /* op0 and op1 might still be constant, despite the above
8480 != INTEGER_CST check. Handle it. */
8481 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8483 op0 = convert_modes (innermode, mode, op0, true);
8484 op1 = convert_modes (innermode, mode, op1, false);
8485 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8486 target, unsignedp));
8488 goto binop3;
8491 /* Check for a multiplication with matching signedness. */
8492 else if ((TREE_CODE (treeop1) == INTEGER_CST
8493 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8494 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8495 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8497 tree op0type = TREE_TYPE (treeop0);
8498 enum machine_mode innermode = TYPE_MODE (op0type);
8499 bool zextend_p = TYPE_UNSIGNED (op0type);
8500 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8501 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8503 if (TREE_CODE (treeop0) != INTEGER_CST)
8505 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8506 != CODE_FOR_nothing)
8508 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8509 EXPAND_NORMAL);
8510 /* op0 and op1 might still be constant, despite the above
8511 != INTEGER_CST check. Handle it. */
8512 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8514 widen_mult_const:
8515 op0 = convert_modes (innermode, mode, op0, zextend_p);
8517 = convert_modes (innermode, mode, op1,
8518 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8519 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8520 target,
8521 unsignedp));
8523 temp = expand_widening_mult (mode, op0, op1, target,
8524 unsignedp, this_optab);
8525 return REDUCE_BIT_FIELD (temp);
8527 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8528 != CODE_FOR_nothing
8529 && innermode == word_mode)
8531 rtx htem, hipart;
8532 op0 = expand_normal (treeop0);
8533 if (TREE_CODE (treeop1) == INTEGER_CST)
8534 op1 = convert_modes (innermode, mode,
8535 expand_normal (treeop1),
8536 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8537 else
8538 op1 = expand_normal (treeop1);
8539 /* op0 and op1 might still be constant, despite the above
8540 != INTEGER_CST check. Handle it. */
8541 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8542 goto widen_mult_const;
8543 temp = expand_binop (mode, other_optab, op0, op1, target,
8544 unsignedp, OPTAB_LIB_WIDEN);
8545 hipart = gen_highpart (innermode, temp);
8546 htem = expand_mult_highpart_adjust (innermode, hipart,
8547 op0, op1, hipart,
8548 zextend_p);
8549 if (htem != hipart)
8550 emit_move_insn (hipart, htem);
8551 return REDUCE_BIT_FIELD (temp);
8555 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8556 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8557 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8558 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8560 case FMA_EXPR:
8562 optab opt = fma_optab;
8563 gimple def0, def2;
8565 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8566 call. */
8567 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8569 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8570 tree call_expr;
8572 gcc_assert (fn != NULL_TREE);
8573 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8574 return expand_builtin (call_expr, target, subtarget, mode, false);
8577 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8578 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8580 op0 = op2 = NULL;
8582 if (def0 && def2
8583 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8585 opt = fnms_optab;
8586 op0 = expand_normal (gimple_assign_rhs1 (def0));
8587 op2 = expand_normal (gimple_assign_rhs1 (def2));
8589 else if (def0
8590 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8592 opt = fnma_optab;
8593 op0 = expand_normal (gimple_assign_rhs1 (def0));
8595 else if (def2
8596 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8598 opt = fms_optab;
8599 op2 = expand_normal (gimple_assign_rhs1 (def2));
8602 if (op0 == NULL)
8603 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8604 if (op2 == NULL)
8605 op2 = expand_normal (treeop2);
8606 op1 = expand_normal (treeop1);
8608 return expand_ternary_op (TYPE_MODE (type), opt,
8609 op0, op1, op2, target, 0);
8612 case MULT_EXPR:
8613 /* If this is a fixed-point operation, then we cannot use the code
8614 below because "expand_mult" doesn't support sat/no-sat fixed-point
8615 multiplications. */
8616 if (ALL_FIXED_POINT_MODE_P (mode))
8617 goto binop;
8619 /* If first operand is constant, swap them.
8620 Thus the following special case checks need only
8621 check the second operand. */
8622 if (TREE_CODE (treeop0) == INTEGER_CST)
8624 tree t1 = treeop0;
8625 treeop0 = treeop1;
8626 treeop1 = t1;
8629 /* Attempt to return something suitable for generating an
8630 indexed address, for machines that support that. */
8632 if (modifier == EXPAND_SUM && mode == ptr_mode
8633 && tree_fits_shwi_p (treeop1))
8635 tree exp1 = treeop1;
8637 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8638 EXPAND_SUM);
8640 if (!REG_P (op0))
8641 op0 = force_operand (op0, NULL_RTX);
8642 if (!REG_P (op0))
8643 op0 = copy_to_mode_reg (mode, op0);
8645 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8646 gen_int_mode (tree_to_shwi (exp1),
8647 TYPE_MODE (TREE_TYPE (exp1)))));
8650 if (modifier == EXPAND_STACK_PARM)
8651 target = 0;
8653 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8654 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8656 case TRUNC_DIV_EXPR:
8657 case FLOOR_DIV_EXPR:
8658 case CEIL_DIV_EXPR:
8659 case ROUND_DIV_EXPR:
8660 case EXACT_DIV_EXPR:
8661 /* If this is a fixed-point operation, then we cannot use the code
8662 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8663 divisions. */
8664 if (ALL_FIXED_POINT_MODE_P (mode))
8665 goto binop;
8667 if (modifier == EXPAND_STACK_PARM)
8668 target = 0;
8669 /* Possible optimization: compute the dividend with EXPAND_SUM
8670 then if the divisor is constant can optimize the case
8671 where some terms of the dividend have coeffs divisible by it. */
8672 expand_operands (treeop0, treeop1,
8673 subtarget, &op0, &op1, EXPAND_NORMAL);
8674 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8676 case RDIV_EXPR:
8677 goto binop;
8679 case MULT_HIGHPART_EXPR:
8680 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8681 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8682 gcc_assert (temp);
8683 return temp;
8685 case TRUNC_MOD_EXPR:
8686 case FLOOR_MOD_EXPR:
8687 case CEIL_MOD_EXPR:
8688 case ROUND_MOD_EXPR:
8689 if (modifier == EXPAND_STACK_PARM)
8690 target = 0;
8691 expand_operands (treeop0, treeop1,
8692 subtarget, &op0, &op1, EXPAND_NORMAL);
8693 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8695 case FIXED_CONVERT_EXPR:
8696 op0 = expand_normal (treeop0);
8697 if (target == 0 || modifier == EXPAND_STACK_PARM)
8698 target = gen_reg_rtx (mode);
8700 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8701 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8702 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8703 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8704 else
8705 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8706 return target;
8708 case FIX_TRUNC_EXPR:
8709 op0 = expand_normal (treeop0);
8710 if (target == 0 || modifier == EXPAND_STACK_PARM)
8711 target = gen_reg_rtx (mode);
8712 expand_fix (target, op0, unsignedp);
8713 return target;
8715 case FLOAT_EXPR:
8716 op0 = expand_normal (treeop0);
8717 if (target == 0 || modifier == EXPAND_STACK_PARM)
8718 target = gen_reg_rtx (mode);
8719 /* expand_float can't figure out what to do if FROM has VOIDmode.
8720 So give it the correct mode. With -O, cse will optimize this. */
8721 if (GET_MODE (op0) == VOIDmode)
8722 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8723 op0);
8724 expand_float (target, op0,
8725 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8726 return target;
8728 case NEGATE_EXPR:
8729 op0 = expand_expr (treeop0, subtarget,
8730 VOIDmode, EXPAND_NORMAL);
8731 if (modifier == EXPAND_STACK_PARM)
8732 target = 0;
8733 temp = expand_unop (mode,
8734 optab_for_tree_code (NEGATE_EXPR, type,
8735 optab_default),
8736 op0, target, 0);
8737 gcc_assert (temp);
8738 return REDUCE_BIT_FIELD (temp);
8740 case ABS_EXPR:
8741 op0 = expand_expr (treeop0, subtarget,
8742 VOIDmode, EXPAND_NORMAL);
8743 if (modifier == EXPAND_STACK_PARM)
8744 target = 0;
8746 /* ABS_EXPR is not valid for complex arguments. */
8747 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8748 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8750 /* Unsigned abs is simply the operand. Testing here means we don't
8751 risk generating incorrect code below. */
8752 if (TYPE_UNSIGNED (type))
8753 return op0;
8755 return expand_abs (mode, op0, target, unsignedp,
8756 safe_from_p (target, treeop0, 1));
8758 case MAX_EXPR:
8759 case MIN_EXPR:
8760 target = original_target;
8761 if (target == 0
8762 || modifier == EXPAND_STACK_PARM
8763 || (MEM_P (target) && MEM_VOLATILE_P (target))
8764 || GET_MODE (target) != mode
8765 || (REG_P (target)
8766 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8767 target = gen_reg_rtx (mode);
8768 expand_operands (treeop0, treeop1,
8769 target, &op0, &op1, EXPAND_NORMAL);
8771 /* First try to do it with a special MIN or MAX instruction.
8772 If that does not win, use a conditional jump to select the proper
8773 value. */
8774 this_optab = optab_for_tree_code (code, type, optab_default);
8775 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8776 OPTAB_WIDEN);
8777 if (temp != 0)
8778 return temp;
8780 /* At this point, a MEM target is no longer useful; we will get better
8781 code without it. */
8783 if (! REG_P (target))
8784 target = gen_reg_rtx (mode);
8786 /* If op1 was placed in target, swap op0 and op1. */
8787 if (target != op0 && target == op1)
8789 temp = op0;
8790 op0 = op1;
8791 op1 = temp;
8794 /* We generate better code and avoid problems with op1 mentioning
8795 target by forcing op1 into a pseudo if it isn't a constant. */
8796 if (! CONSTANT_P (op1))
8797 op1 = force_reg (mode, op1);
8800 enum rtx_code comparison_code;
8801 rtx cmpop1 = op1;
8803 if (code == MAX_EXPR)
8804 comparison_code = unsignedp ? GEU : GE;
8805 else
8806 comparison_code = unsignedp ? LEU : LE;
8808 /* Canonicalize to comparisons against 0. */
8809 if (op1 == const1_rtx)
8811 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8812 or (a != 0 ? a : 1) for unsigned.
8813 For MIN we are safe converting (a <= 1 ? a : 1)
8814 into (a <= 0 ? a : 1) */
8815 cmpop1 = const0_rtx;
8816 if (code == MAX_EXPR)
8817 comparison_code = unsignedp ? NE : GT;
8819 if (op1 == constm1_rtx && !unsignedp)
8821 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8822 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8823 cmpop1 = const0_rtx;
8824 if (code == MIN_EXPR)
8825 comparison_code = LT;
8827 #ifdef HAVE_conditional_move
8828 /* Use a conditional move if possible. */
8829 if (can_conditionally_move_p (mode))
8831 rtx insn;
8833 start_sequence ();
8835 /* Try to emit the conditional move. */
8836 insn = emit_conditional_move (target, comparison_code,
8837 op0, cmpop1, mode,
8838 op0, op1, mode,
8839 unsignedp);
8841 /* If we could do the conditional move, emit the sequence,
8842 and return. */
8843 if (insn)
8845 rtx_insn *seq = get_insns ();
8846 end_sequence ();
8847 emit_insn (seq);
8848 return target;
8851 /* Otherwise discard the sequence and fall back to code with
8852 branches. */
8853 end_sequence ();
8855 #endif
8856 if (target != op0)
8857 emit_move_insn (target, op0);
8859 temp = gen_label_rtx ();
8860 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8861 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8862 -1);
8864 emit_move_insn (target, op1);
8865 emit_label (temp);
8866 return target;
8868 case BIT_NOT_EXPR:
8869 op0 = expand_expr (treeop0, subtarget,
8870 VOIDmode, EXPAND_NORMAL);
8871 if (modifier == EXPAND_STACK_PARM)
8872 target = 0;
8873 /* In case we have to reduce the result to bitfield precision
8874 for unsigned bitfield expand this as XOR with a proper constant
8875 instead. */
8876 if (reduce_bit_field && TYPE_UNSIGNED (type))
8878 wide_int mask = wi::mask (TYPE_PRECISION (type),
8879 false, GET_MODE_PRECISION (mode));
8881 temp = expand_binop (mode, xor_optab, op0,
8882 immed_wide_int_const (mask, mode),
8883 target, 1, OPTAB_LIB_WIDEN);
8885 else
8886 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8887 gcc_assert (temp);
8888 return temp;
8890 /* ??? Can optimize bitwise operations with one arg constant.
8891 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8892 and (a bitwise1 b) bitwise2 b (etc)
8893 but that is probably not worth while. */
8895 case BIT_AND_EXPR:
8896 case BIT_IOR_EXPR:
8897 case BIT_XOR_EXPR:
8898 goto binop;
8900 case LROTATE_EXPR:
8901 case RROTATE_EXPR:
8902 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8903 || (GET_MODE_PRECISION (TYPE_MODE (type))
8904 == TYPE_PRECISION (type)));
8905 /* fall through */
8907 case LSHIFT_EXPR:
8908 case RSHIFT_EXPR:
8909 /* If this is a fixed-point operation, then we cannot use the code
8910 below because "expand_shift" doesn't support sat/no-sat fixed-point
8911 shifts. */
8912 if (ALL_FIXED_POINT_MODE_P (mode))
8913 goto binop;
8915 if (! safe_from_p (subtarget, treeop1, 1))
8916 subtarget = 0;
8917 if (modifier == EXPAND_STACK_PARM)
8918 target = 0;
8919 op0 = expand_expr (treeop0, subtarget,
8920 VOIDmode, EXPAND_NORMAL);
8921 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8922 unsignedp);
8923 if (code == LSHIFT_EXPR)
8924 temp = REDUCE_BIT_FIELD (temp);
8925 return temp;
8927 /* Could determine the answer when only additive constants differ. Also,
8928 the addition of one can be handled by changing the condition. */
8929 case LT_EXPR:
8930 case LE_EXPR:
8931 case GT_EXPR:
8932 case GE_EXPR:
8933 case EQ_EXPR:
8934 case NE_EXPR:
8935 case UNORDERED_EXPR:
8936 case ORDERED_EXPR:
8937 case UNLT_EXPR:
8938 case UNLE_EXPR:
8939 case UNGT_EXPR:
8940 case UNGE_EXPR:
8941 case UNEQ_EXPR:
8942 case LTGT_EXPR:
8943 temp = do_store_flag (ops,
8944 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8945 tmode != VOIDmode ? tmode : mode);
8946 if (temp)
8947 return temp;
8949 /* Use a compare and a jump for BLKmode comparisons, or for function
8950 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8952 if ((target == 0
8953 || modifier == EXPAND_STACK_PARM
8954 || ! safe_from_p (target, treeop0, 1)
8955 || ! safe_from_p (target, treeop1, 1)
8956 /* Make sure we don't have a hard reg (such as function's return
8957 value) live across basic blocks, if not optimizing. */
8958 || (!optimize && REG_P (target)
8959 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8960 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8962 emit_move_insn (target, const0_rtx);
8964 op1 = gen_label_rtx ();
8965 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8967 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8968 emit_move_insn (target, constm1_rtx);
8969 else
8970 emit_move_insn (target, const1_rtx);
8972 emit_label (op1);
8973 return target;
8975 case COMPLEX_EXPR:
8976 /* Get the rtx code of the operands. */
8977 op0 = expand_normal (treeop0);
8978 op1 = expand_normal (treeop1);
8980 if (!target)
8981 target = gen_reg_rtx (TYPE_MODE (type));
8982 else
8983 /* If target overlaps with op1, then either we need to force
8984 op1 into a pseudo (if target also overlaps with op0),
8985 or write the complex parts in reverse order. */
8986 switch (GET_CODE (target))
8988 case CONCAT:
8989 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
8991 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
8993 complex_expr_force_op1:
8994 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
8995 emit_move_insn (temp, op1);
8996 op1 = temp;
8997 break;
8999 complex_expr_swap_order:
9000 /* Move the imaginary (op1) and real (op0) parts to their
9001 location. */
9002 write_complex_part (target, op1, true);
9003 write_complex_part (target, op0, false);
9005 return target;
9007 break;
9008 case MEM:
9009 temp = adjust_address_nv (target,
9010 GET_MODE_INNER (GET_MODE (target)), 0);
9011 if (reg_overlap_mentioned_p (temp, op1))
9013 enum machine_mode imode = GET_MODE_INNER (GET_MODE (target));
9014 temp = adjust_address_nv (target, imode,
9015 GET_MODE_SIZE (imode));
9016 if (reg_overlap_mentioned_p (temp, op0))
9017 goto complex_expr_force_op1;
9018 goto complex_expr_swap_order;
9020 break;
9021 default:
9022 if (reg_overlap_mentioned_p (target, op1))
9024 if (reg_overlap_mentioned_p (target, op0))
9025 goto complex_expr_force_op1;
9026 goto complex_expr_swap_order;
9028 break;
9031 /* Move the real (op0) and imaginary (op1) parts to their location. */
9032 write_complex_part (target, op0, false);
9033 write_complex_part (target, op1, true);
9035 return target;
9037 case WIDEN_SUM_EXPR:
9039 tree oprnd0 = treeop0;
9040 tree oprnd1 = treeop1;
9042 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9043 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
9044 target, unsignedp);
9045 return target;
9048 case REDUC_MAX_EXPR:
9049 case REDUC_MIN_EXPR:
9050 case REDUC_PLUS_EXPR:
9052 op0 = expand_normal (treeop0);
9053 this_optab = optab_for_tree_code (code, type, optab_default);
9054 enum machine_mode vec_mode = TYPE_MODE (TREE_TYPE (treeop0));
9055 temp = expand_unop (vec_mode, this_optab, op0, NULL_RTX, unsignedp);
9056 gcc_assert (temp);
9057 /* The tree code produces a scalar result, but (somewhat by convention)
9058 the optab produces a vector with the result in element 0 if
9059 little-endian, or element N-1 if big-endian. So pull the scalar
9060 result out of that element. */
9061 int index = BYTES_BIG_ENDIAN ? GET_MODE_NUNITS (vec_mode) - 1 : 0;
9062 int bitsize = GET_MODE_BITSIZE (GET_MODE_INNER (vec_mode));
9063 temp = extract_bit_field (temp, bitsize, bitsize * index, unsignedp,
9064 target, mode, mode);
9065 gcc_assert (temp);
9066 return temp;
9069 case VEC_LSHIFT_EXPR:
9070 case VEC_RSHIFT_EXPR:
9072 target = expand_vec_shift_expr (ops, target);
9073 return target;
9076 case VEC_UNPACK_HI_EXPR:
9077 case VEC_UNPACK_LO_EXPR:
9079 op0 = expand_normal (treeop0);
9080 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9081 target, unsignedp);
9082 gcc_assert (temp);
9083 return temp;
9086 case VEC_UNPACK_FLOAT_HI_EXPR:
9087 case VEC_UNPACK_FLOAT_LO_EXPR:
9089 op0 = expand_normal (treeop0);
9090 /* The signedness is determined from input operand. */
9091 temp = expand_widen_pattern_expr
9092 (ops, op0, NULL_RTX, NULL_RTX,
9093 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9095 gcc_assert (temp);
9096 return temp;
9099 case VEC_WIDEN_MULT_HI_EXPR:
9100 case VEC_WIDEN_MULT_LO_EXPR:
9101 case VEC_WIDEN_MULT_EVEN_EXPR:
9102 case VEC_WIDEN_MULT_ODD_EXPR:
9103 case VEC_WIDEN_LSHIFT_HI_EXPR:
9104 case VEC_WIDEN_LSHIFT_LO_EXPR:
9105 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9106 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9107 target, unsignedp);
9108 gcc_assert (target);
9109 return target;
9111 case VEC_PACK_TRUNC_EXPR:
9112 case VEC_PACK_SAT_EXPR:
9113 case VEC_PACK_FIX_TRUNC_EXPR:
9114 mode = TYPE_MODE (TREE_TYPE (treeop0));
9115 goto binop;
9117 case VEC_PERM_EXPR:
9118 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9119 op2 = expand_normal (treeop2);
9121 /* Careful here: if the target doesn't support integral vector modes,
9122 a constant selection vector could wind up smooshed into a normal
9123 integral constant. */
9124 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9126 tree sel_type = TREE_TYPE (treeop2);
9127 enum machine_mode vmode
9128 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9129 TYPE_VECTOR_SUBPARTS (sel_type));
9130 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9131 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9132 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9134 else
9135 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9137 temp = expand_vec_perm (mode, op0, op1, op2, target);
9138 gcc_assert (temp);
9139 return temp;
9141 case DOT_PROD_EXPR:
9143 tree oprnd0 = treeop0;
9144 tree oprnd1 = treeop1;
9145 tree oprnd2 = treeop2;
9146 rtx op2;
9148 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9149 op2 = expand_normal (oprnd2);
9150 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9151 target, unsignedp);
9152 return target;
9155 case SAD_EXPR:
9157 tree oprnd0 = treeop0;
9158 tree oprnd1 = treeop1;
9159 tree oprnd2 = treeop2;
9160 rtx op2;
9162 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9163 op2 = expand_normal (oprnd2);
9164 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9165 target, unsignedp);
9166 return target;
9169 case REALIGN_LOAD_EXPR:
9171 tree oprnd0 = treeop0;
9172 tree oprnd1 = treeop1;
9173 tree oprnd2 = treeop2;
9174 rtx op2;
9176 this_optab = optab_for_tree_code (code, type, optab_default);
9177 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9178 op2 = expand_normal (oprnd2);
9179 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9180 target, unsignedp);
9181 gcc_assert (temp);
9182 return temp;
9185 case COND_EXPR:
9186 /* A COND_EXPR with its type being VOID_TYPE represents a
9187 conditional jump and is handled in
9188 expand_gimple_cond_expr. */
9189 gcc_assert (!VOID_TYPE_P (type));
9191 /* Note that COND_EXPRs whose type is a structure or union
9192 are required to be constructed to contain assignments of
9193 a temporary variable, so that we can evaluate them here
9194 for side effect only. If type is void, we must do likewise. */
9196 gcc_assert (!TREE_ADDRESSABLE (type)
9197 && !ignore
9198 && TREE_TYPE (treeop1) != void_type_node
9199 && TREE_TYPE (treeop2) != void_type_node);
9201 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9202 if (temp)
9203 return temp;
9205 /* If we are not to produce a result, we have no target. Otherwise,
9206 if a target was specified use it; it will not be used as an
9207 intermediate target unless it is safe. If no target, use a
9208 temporary. */
9210 if (modifier != EXPAND_STACK_PARM
9211 && original_target
9212 && safe_from_p (original_target, treeop0, 1)
9213 && GET_MODE (original_target) == mode
9214 && !MEM_P (original_target))
9215 temp = original_target;
9216 else
9217 temp = assign_temp (type, 0, 1);
9219 do_pending_stack_adjust ();
9220 NO_DEFER_POP;
9221 op0 = gen_label_rtx ();
9222 op1 = gen_label_rtx ();
9223 jumpifnot (treeop0, op0, -1);
9224 store_expr (treeop1, temp,
9225 modifier == EXPAND_STACK_PARM,
9226 false);
9228 emit_jump_insn (gen_jump (op1));
9229 emit_barrier ();
9230 emit_label (op0);
9231 store_expr (treeop2, temp,
9232 modifier == EXPAND_STACK_PARM,
9233 false);
9235 emit_label (op1);
9236 OK_DEFER_POP;
9237 return temp;
9239 case VEC_COND_EXPR:
9240 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9241 return target;
9243 default:
9244 gcc_unreachable ();
9247 /* Here to do an ordinary binary operator. */
9248 binop:
9249 expand_operands (treeop0, treeop1,
9250 subtarget, &op0, &op1, EXPAND_NORMAL);
9251 binop2:
9252 this_optab = optab_for_tree_code (code, type, optab_default);
9253 binop3:
9254 if (modifier == EXPAND_STACK_PARM)
9255 target = 0;
9256 temp = expand_binop (mode, this_optab, op0, op1, target,
9257 unsignedp, OPTAB_LIB_WIDEN);
9258 gcc_assert (temp);
9259 /* Bitwise operations do not need bitfield reduction as we expect their
9260 operands being properly truncated. */
9261 if (code == BIT_XOR_EXPR
9262 || code == BIT_AND_EXPR
9263 || code == BIT_IOR_EXPR)
9264 return temp;
9265 return REDUCE_BIT_FIELD (temp);
9267 #undef REDUCE_BIT_FIELD
9270 /* Return TRUE if expression STMT is suitable for replacement.
9271 Never consider memory loads as replaceable, because those don't ever lead
9272 into constant expressions. */
9274 static bool
9275 stmt_is_replaceable_p (gimple stmt)
9277 if (ssa_is_replaceable_p (stmt))
9279 /* Don't move around loads. */
9280 if (!gimple_assign_single_p (stmt)
9281 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9282 return true;
9284 return false;
9288 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
9289 enum expand_modifier modifier, rtx *alt_rtl,
9290 bool inner_reference_p)
9292 rtx op0, op1, temp, decl_rtl;
9293 tree type;
9294 int unsignedp;
9295 enum machine_mode mode;
9296 enum tree_code code = TREE_CODE (exp);
9297 rtx subtarget, original_target;
9298 int ignore;
9299 tree context;
9300 bool reduce_bit_field;
9301 location_t loc = EXPR_LOCATION (exp);
9302 struct separate_ops ops;
9303 tree treeop0, treeop1, treeop2;
9304 tree ssa_name = NULL_TREE;
9305 gimple g;
9307 type = TREE_TYPE (exp);
9308 mode = TYPE_MODE (type);
9309 unsignedp = TYPE_UNSIGNED (type);
9311 treeop0 = treeop1 = treeop2 = NULL_TREE;
9312 if (!VL_EXP_CLASS_P (exp))
9313 switch (TREE_CODE_LENGTH (code))
9315 default:
9316 case 3: treeop2 = TREE_OPERAND (exp, 2);
9317 case 2: treeop1 = TREE_OPERAND (exp, 1);
9318 case 1: treeop0 = TREE_OPERAND (exp, 0);
9319 case 0: break;
9321 ops.code = code;
9322 ops.type = type;
9323 ops.op0 = treeop0;
9324 ops.op1 = treeop1;
9325 ops.op2 = treeop2;
9326 ops.location = loc;
9328 ignore = (target == const0_rtx
9329 || ((CONVERT_EXPR_CODE_P (code)
9330 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9331 && TREE_CODE (type) == VOID_TYPE));
9333 /* An operation in what may be a bit-field type needs the
9334 result to be reduced to the precision of the bit-field type,
9335 which is narrower than that of the type's mode. */
9336 reduce_bit_field = (!ignore
9337 && INTEGRAL_TYPE_P (type)
9338 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9340 /* If we are going to ignore this result, we need only do something
9341 if there is a side-effect somewhere in the expression. If there
9342 is, short-circuit the most common cases here. Note that we must
9343 not call expand_expr with anything but const0_rtx in case this
9344 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9346 if (ignore)
9348 if (! TREE_SIDE_EFFECTS (exp))
9349 return const0_rtx;
9351 /* Ensure we reference a volatile object even if value is ignored, but
9352 don't do this if all we are doing is taking its address. */
9353 if (TREE_THIS_VOLATILE (exp)
9354 && TREE_CODE (exp) != FUNCTION_DECL
9355 && mode != VOIDmode && mode != BLKmode
9356 && modifier != EXPAND_CONST_ADDRESS)
9358 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9359 if (MEM_P (temp))
9360 copy_to_reg (temp);
9361 return const0_rtx;
9364 if (TREE_CODE_CLASS (code) == tcc_unary
9365 || code == BIT_FIELD_REF
9366 || code == COMPONENT_REF
9367 || code == INDIRECT_REF)
9368 return expand_expr (treeop0, const0_rtx, VOIDmode,
9369 modifier);
9371 else if (TREE_CODE_CLASS (code) == tcc_binary
9372 || TREE_CODE_CLASS (code) == tcc_comparison
9373 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9375 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9376 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9377 return const0_rtx;
9380 target = 0;
9383 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9384 target = 0;
9386 /* Use subtarget as the target for operand 0 of a binary operation. */
9387 subtarget = get_subtarget (target);
9388 original_target = target;
9390 switch (code)
9392 case LABEL_DECL:
9394 tree function = decl_function_context (exp);
9396 temp = label_rtx (exp);
9397 temp = gen_rtx_LABEL_REF (Pmode, temp);
9399 if (function != current_function_decl
9400 && function != 0)
9401 LABEL_REF_NONLOCAL_P (temp) = 1;
9403 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9404 return temp;
9407 case SSA_NAME:
9408 /* ??? ivopts calls expander, without any preparation from
9409 out-of-ssa. So fake instructions as if this was an access to the
9410 base variable. This unnecessarily allocates a pseudo, see how we can
9411 reuse it, if partition base vars have it set already. */
9412 if (!currently_expanding_to_rtl)
9414 tree var = SSA_NAME_VAR (exp);
9415 if (var && DECL_RTL_SET_P (var))
9416 return DECL_RTL (var);
9417 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9418 LAST_VIRTUAL_REGISTER + 1);
9421 g = get_gimple_for_ssa_name (exp);
9422 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9423 if (g == NULL
9424 && modifier == EXPAND_INITIALIZER
9425 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9426 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9427 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9428 g = SSA_NAME_DEF_STMT (exp);
9429 if (g)
9431 rtx r;
9432 ops.code = gimple_assign_rhs_code (g);
9433 switch (get_gimple_rhs_class (ops.code))
9435 case GIMPLE_TERNARY_RHS:
9436 ops.op2 = gimple_assign_rhs3 (g);
9437 /* Fallthru */
9438 case GIMPLE_BINARY_RHS:
9439 ops.op1 = gimple_assign_rhs2 (g);
9440 /* Fallthru */
9441 case GIMPLE_UNARY_RHS:
9442 ops.op0 = gimple_assign_rhs1 (g);
9443 ops.type = TREE_TYPE (gimple_assign_lhs (g));
9444 ops.location = gimple_location (g);
9445 r = expand_expr_real_2 (&ops, target, tmode, modifier);
9446 break;
9447 case GIMPLE_SINGLE_RHS:
9449 location_t saved_loc = curr_insn_location ();
9450 set_curr_insn_location (gimple_location (g));
9451 r = expand_expr_real (gimple_assign_rhs1 (g), target,
9452 tmode, modifier, NULL, inner_reference_p);
9453 set_curr_insn_location (saved_loc);
9454 break;
9456 default:
9457 gcc_unreachable ();
9459 if (REG_P (r) && !REG_EXPR (r))
9460 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9461 return r;
9464 ssa_name = exp;
9465 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9466 exp = SSA_NAME_VAR (ssa_name);
9467 goto expand_decl_rtl;
9469 case PARM_DECL:
9470 case VAR_DECL:
9471 /* If a static var's type was incomplete when the decl was written,
9472 but the type is complete now, lay out the decl now. */
9473 if (DECL_SIZE (exp) == 0
9474 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9475 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9476 layout_decl (exp, 0);
9478 /* ... fall through ... */
9480 case FUNCTION_DECL:
9481 case RESULT_DECL:
9482 decl_rtl = DECL_RTL (exp);
9483 expand_decl_rtl:
9484 gcc_assert (decl_rtl);
9485 decl_rtl = copy_rtx (decl_rtl);
9486 /* Record writes to register variables. */
9487 if (modifier == EXPAND_WRITE
9488 && REG_P (decl_rtl)
9489 && HARD_REGISTER_P (decl_rtl))
9490 add_to_hard_reg_set (&crtl->asm_clobbers,
9491 GET_MODE (decl_rtl), REGNO (decl_rtl));
9493 /* Ensure variable marked as used even if it doesn't go through
9494 a parser. If it hasn't be used yet, write out an external
9495 definition. */
9496 TREE_USED (exp) = 1;
9498 /* Show we haven't gotten RTL for this yet. */
9499 temp = 0;
9501 /* Variables inherited from containing functions should have
9502 been lowered by this point. */
9503 context = decl_function_context (exp);
9504 gcc_assert (SCOPE_FILE_SCOPE_P (context)
9505 || context == current_function_decl
9506 || TREE_STATIC (exp)
9507 || DECL_EXTERNAL (exp)
9508 /* ??? C++ creates functions that are not TREE_STATIC. */
9509 || TREE_CODE (exp) == FUNCTION_DECL);
9511 /* This is the case of an array whose size is to be determined
9512 from its initializer, while the initializer is still being parsed.
9513 ??? We aren't parsing while expanding anymore. */
9515 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9516 temp = validize_mem (decl_rtl);
9518 /* If DECL_RTL is memory, we are in the normal case and the
9519 address is not valid, get the address into a register. */
9521 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9523 if (alt_rtl)
9524 *alt_rtl = decl_rtl;
9525 decl_rtl = use_anchored_address (decl_rtl);
9526 if (modifier != EXPAND_CONST_ADDRESS
9527 && modifier != EXPAND_SUM
9528 && !memory_address_addr_space_p (DECL_MODE (exp),
9529 XEXP (decl_rtl, 0),
9530 MEM_ADDR_SPACE (decl_rtl)))
9531 temp = replace_equiv_address (decl_rtl,
9532 copy_rtx (XEXP (decl_rtl, 0)));
9535 /* If we got something, return it. But first, set the alignment
9536 if the address is a register. */
9537 if (temp != 0)
9539 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9540 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9542 return temp;
9545 /* If the mode of DECL_RTL does not match that of the decl,
9546 there are two cases: we are dealing with a BLKmode value
9547 that is returned in a register, or we are dealing with
9548 a promoted value. In the latter case, return a SUBREG
9549 of the wanted mode, but mark it so that we know that it
9550 was already extended. */
9551 if (REG_P (decl_rtl)
9552 && DECL_MODE (exp) != BLKmode
9553 && GET_MODE (decl_rtl) != DECL_MODE (exp))
9555 enum machine_mode pmode;
9557 /* Get the signedness to be used for this variable. Ensure we get
9558 the same mode we got when the variable was declared. */
9559 if (code == SSA_NAME
9560 && (g = SSA_NAME_DEF_STMT (ssa_name))
9561 && gimple_code (g) == GIMPLE_CALL
9562 && !gimple_call_internal_p (g))
9563 pmode = promote_function_mode (type, mode, &unsignedp,
9564 gimple_call_fntype (g),
9566 else
9567 pmode = promote_decl_mode (exp, &unsignedp);
9568 gcc_assert (GET_MODE (decl_rtl) == pmode);
9570 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9571 SUBREG_PROMOTED_VAR_P (temp) = 1;
9572 SUBREG_PROMOTED_SET (temp, unsignedp);
9573 return temp;
9576 return decl_rtl;
9578 case INTEGER_CST:
9579 /* Given that TYPE_PRECISION (type) is not always equal to
9580 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9581 the former to the latter according to the signedness of the
9582 type. */
9583 temp = immed_wide_int_const (wide_int::from
9584 (exp,
9585 GET_MODE_PRECISION (TYPE_MODE (type)),
9586 TYPE_SIGN (type)),
9587 TYPE_MODE (type));
9588 return temp;
9590 case VECTOR_CST:
9592 tree tmp = NULL_TREE;
9593 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9594 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9595 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9596 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9597 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9598 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9599 return const_vector_from_tree (exp);
9600 if (GET_MODE_CLASS (mode) == MODE_INT)
9602 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9603 if (type_for_mode)
9604 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9606 if (!tmp)
9608 vec<constructor_elt, va_gc> *v;
9609 unsigned i;
9610 vec_alloc (v, VECTOR_CST_NELTS (exp));
9611 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9612 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9613 tmp = build_constructor (type, v);
9615 return expand_expr (tmp, ignore ? const0_rtx : target,
9616 tmode, modifier);
9619 case CONST_DECL:
9620 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9622 case REAL_CST:
9623 /* If optimized, generate immediate CONST_DOUBLE
9624 which will be turned into memory by reload if necessary.
9626 We used to force a register so that loop.c could see it. But
9627 this does not allow gen_* patterns to perform optimizations with
9628 the constants. It also produces two insns in cases like "x = 1.0;".
9629 On most machines, floating-point constants are not permitted in
9630 many insns, so we'd end up copying it to a register in any case.
9632 Now, we do the copying in expand_binop, if appropriate. */
9633 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9634 TYPE_MODE (TREE_TYPE (exp)));
9636 case FIXED_CST:
9637 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9638 TYPE_MODE (TREE_TYPE (exp)));
9640 case COMPLEX_CST:
9641 /* Handle evaluating a complex constant in a CONCAT target. */
9642 if (original_target && GET_CODE (original_target) == CONCAT)
9644 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9645 rtx rtarg, itarg;
9647 rtarg = XEXP (original_target, 0);
9648 itarg = XEXP (original_target, 1);
9650 /* Move the real and imaginary parts separately. */
9651 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9652 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9654 if (op0 != rtarg)
9655 emit_move_insn (rtarg, op0);
9656 if (op1 != itarg)
9657 emit_move_insn (itarg, op1);
9659 return original_target;
9662 /* ... fall through ... */
9664 case STRING_CST:
9665 temp = expand_expr_constant (exp, 1, modifier);
9667 /* temp contains a constant address.
9668 On RISC machines where a constant address isn't valid,
9669 make some insns to get that address into a register. */
9670 if (modifier != EXPAND_CONST_ADDRESS
9671 && modifier != EXPAND_INITIALIZER
9672 && modifier != EXPAND_SUM
9673 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9674 MEM_ADDR_SPACE (temp)))
9675 return replace_equiv_address (temp,
9676 copy_rtx (XEXP (temp, 0)));
9677 return temp;
9679 case SAVE_EXPR:
9681 tree val = treeop0;
9682 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
9683 inner_reference_p);
9685 if (!SAVE_EXPR_RESOLVED_P (exp))
9687 /* We can indeed still hit this case, typically via builtin
9688 expanders calling save_expr immediately before expanding
9689 something. Assume this means that we only have to deal
9690 with non-BLKmode values. */
9691 gcc_assert (GET_MODE (ret) != BLKmode);
9693 val = build_decl (curr_insn_location (),
9694 VAR_DECL, NULL, TREE_TYPE (exp));
9695 DECL_ARTIFICIAL (val) = 1;
9696 DECL_IGNORED_P (val) = 1;
9697 treeop0 = val;
9698 TREE_OPERAND (exp, 0) = treeop0;
9699 SAVE_EXPR_RESOLVED_P (exp) = 1;
9701 if (!CONSTANT_P (ret))
9702 ret = copy_to_reg (ret);
9703 SET_DECL_RTL (val, ret);
9706 return ret;
9710 case CONSTRUCTOR:
9711 /* If we don't need the result, just ensure we evaluate any
9712 subexpressions. */
9713 if (ignore)
9715 unsigned HOST_WIDE_INT idx;
9716 tree value;
9718 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9719 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9721 return const0_rtx;
9724 return expand_constructor (exp, target, modifier, false);
9726 case TARGET_MEM_REF:
9728 addr_space_t as
9729 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9730 enum insn_code icode;
9731 unsigned int align;
9733 op0 = addr_for_mem_ref (exp, as, true);
9734 op0 = memory_address_addr_space (mode, op0, as);
9735 temp = gen_rtx_MEM (mode, op0);
9736 set_mem_attributes (temp, exp, 0);
9737 set_mem_addr_space (temp, as);
9738 align = get_object_alignment (exp);
9739 if (modifier != EXPAND_WRITE
9740 && modifier != EXPAND_MEMORY
9741 && mode != BLKmode
9742 && align < GET_MODE_ALIGNMENT (mode)
9743 /* If the target does not have special handling for unaligned
9744 loads of mode then it can use regular moves for them. */
9745 && ((icode = optab_handler (movmisalign_optab, mode))
9746 != CODE_FOR_nothing))
9748 struct expand_operand ops[2];
9750 /* We've already validated the memory, and we're creating a
9751 new pseudo destination. The predicates really can't fail,
9752 nor can the generator. */
9753 create_output_operand (&ops[0], NULL_RTX, mode);
9754 create_fixed_operand (&ops[1], temp);
9755 expand_insn (icode, 2, ops);
9756 temp = ops[0].value;
9758 return temp;
9761 case MEM_REF:
9763 addr_space_t as
9764 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9765 enum machine_mode address_mode;
9766 tree base = TREE_OPERAND (exp, 0);
9767 gimple def_stmt;
9768 enum insn_code icode;
9769 unsigned align;
9770 /* Handle expansion of non-aliased memory with non-BLKmode. That
9771 might end up in a register. */
9772 if (mem_ref_refers_to_non_mem_p (exp))
9774 HOST_WIDE_INT offset = mem_ref_offset (exp).to_short_addr ();
9775 base = TREE_OPERAND (base, 0);
9776 if (offset == 0
9777 && tree_fits_uhwi_p (TYPE_SIZE (type))
9778 && (GET_MODE_BITSIZE (DECL_MODE (base))
9779 == tree_to_uhwi (TYPE_SIZE (type))))
9780 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
9781 target, tmode, modifier);
9782 if (TYPE_MODE (type) == BLKmode)
9784 temp = assign_stack_temp (DECL_MODE (base),
9785 GET_MODE_SIZE (DECL_MODE (base)));
9786 store_expr (base, temp, 0, false);
9787 temp = adjust_address (temp, BLKmode, offset);
9788 set_mem_size (temp, int_size_in_bytes (type));
9789 return temp;
9791 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
9792 bitsize_int (offset * BITS_PER_UNIT));
9793 return expand_expr (exp, target, tmode, modifier);
9795 address_mode = targetm.addr_space.address_mode (as);
9796 base = TREE_OPERAND (exp, 0);
9797 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9799 tree mask = gimple_assign_rhs2 (def_stmt);
9800 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9801 gimple_assign_rhs1 (def_stmt), mask);
9802 TREE_OPERAND (exp, 0) = base;
9804 align = get_object_alignment (exp);
9805 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9806 op0 = memory_address_addr_space (mode, op0, as);
9807 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9809 rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
9810 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9811 op0 = memory_address_addr_space (mode, op0, as);
9813 temp = gen_rtx_MEM (mode, op0);
9814 set_mem_attributes (temp, exp, 0);
9815 set_mem_addr_space (temp, as);
9816 if (TREE_THIS_VOLATILE (exp))
9817 MEM_VOLATILE_P (temp) = 1;
9818 if (modifier != EXPAND_WRITE
9819 && modifier != EXPAND_MEMORY
9820 && !inner_reference_p
9821 && mode != BLKmode
9822 && align < GET_MODE_ALIGNMENT (mode))
9824 if ((icode = optab_handler (movmisalign_optab, mode))
9825 != CODE_FOR_nothing)
9827 struct expand_operand ops[2];
9829 /* We've already validated the memory, and we're creating a
9830 new pseudo destination. The predicates really can't fail,
9831 nor can the generator. */
9832 create_output_operand (&ops[0], NULL_RTX, mode);
9833 create_fixed_operand (&ops[1], temp);
9834 expand_insn (icode, 2, ops);
9835 temp = ops[0].value;
9837 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9838 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9839 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9840 (modifier == EXPAND_STACK_PARM
9841 ? NULL_RTX : target),
9842 mode, mode);
9844 return temp;
9847 case ARRAY_REF:
9850 tree array = treeop0;
9851 tree index = treeop1;
9852 tree init;
9854 /* Fold an expression like: "foo"[2].
9855 This is not done in fold so it won't happen inside &.
9856 Don't fold if this is for wide characters since it's too
9857 difficult to do correctly and this is a very rare case. */
9859 if (modifier != EXPAND_CONST_ADDRESS
9860 && modifier != EXPAND_INITIALIZER
9861 && modifier != EXPAND_MEMORY)
9863 tree t = fold_read_from_constant_string (exp);
9865 if (t)
9866 return expand_expr (t, target, tmode, modifier);
9869 /* If this is a constant index into a constant array,
9870 just get the value from the array. Handle both the cases when
9871 we have an explicit constructor and when our operand is a variable
9872 that was declared const. */
9874 if (modifier != EXPAND_CONST_ADDRESS
9875 && modifier != EXPAND_INITIALIZER
9876 && modifier != EXPAND_MEMORY
9877 && TREE_CODE (array) == CONSTRUCTOR
9878 && ! TREE_SIDE_EFFECTS (array)
9879 && TREE_CODE (index) == INTEGER_CST)
9881 unsigned HOST_WIDE_INT ix;
9882 tree field, value;
9884 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9885 field, value)
9886 if (tree_int_cst_equal (field, index))
9888 if (!TREE_SIDE_EFFECTS (value))
9889 return expand_expr (fold (value), target, tmode, modifier);
9890 break;
9894 else if (optimize >= 1
9895 && modifier != EXPAND_CONST_ADDRESS
9896 && modifier != EXPAND_INITIALIZER
9897 && modifier != EXPAND_MEMORY
9898 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9899 && TREE_CODE (index) == INTEGER_CST
9900 && (TREE_CODE (array) == VAR_DECL
9901 || TREE_CODE (array) == CONST_DECL)
9902 && (init = ctor_for_folding (array)) != error_mark_node)
9904 if (init == NULL_TREE)
9906 tree value = build_zero_cst (type);
9907 if (TREE_CODE (value) == CONSTRUCTOR)
9909 /* If VALUE is a CONSTRUCTOR, this optimization is only
9910 useful if this doesn't store the CONSTRUCTOR into
9911 memory. If it does, it is more efficient to just
9912 load the data from the array directly. */
9913 rtx ret = expand_constructor (value, target,
9914 modifier, true);
9915 if (ret == NULL_RTX)
9916 value = NULL_TREE;
9919 if (value)
9920 return expand_expr (value, target, tmode, modifier);
9922 else if (TREE_CODE (init) == CONSTRUCTOR)
9924 unsigned HOST_WIDE_INT ix;
9925 tree field, value;
9927 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
9928 field, value)
9929 if (tree_int_cst_equal (field, index))
9931 if (TREE_SIDE_EFFECTS (value))
9932 break;
9934 if (TREE_CODE (value) == CONSTRUCTOR)
9936 /* If VALUE is a CONSTRUCTOR, this
9937 optimization is only useful if
9938 this doesn't store the CONSTRUCTOR
9939 into memory. If it does, it is more
9940 efficient to just load the data from
9941 the array directly. */
9942 rtx ret = expand_constructor (value, target,
9943 modifier, true);
9944 if (ret == NULL_RTX)
9945 break;
9948 return
9949 expand_expr (fold (value), target, tmode, modifier);
9952 else if (TREE_CODE (init) == STRING_CST)
9954 tree low_bound = array_ref_low_bound (exp);
9955 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
9957 /* Optimize the special case of a zero lower bound.
9959 We convert the lower bound to sizetype to avoid problems
9960 with constant folding. E.g. suppose the lower bound is
9961 1 and its mode is QI. Without the conversion
9962 (ARRAY + (INDEX - (unsigned char)1))
9963 becomes
9964 (ARRAY + (-(unsigned char)1) + INDEX)
9965 which becomes
9966 (ARRAY + 255 + INDEX). Oops! */
9967 if (!integer_zerop (low_bound))
9968 index1 = size_diffop_loc (loc, index1,
9969 fold_convert_loc (loc, sizetype,
9970 low_bound));
9972 if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
9974 tree type = TREE_TYPE (TREE_TYPE (init));
9975 enum machine_mode mode = TYPE_MODE (type);
9977 if (GET_MODE_CLASS (mode) == MODE_INT
9978 && GET_MODE_SIZE (mode) == 1)
9979 return gen_int_mode (TREE_STRING_POINTER (init)
9980 [TREE_INT_CST_LOW (index1)],
9981 mode);
9986 goto normal_inner_ref;
9988 case COMPONENT_REF:
9989 /* If the operand is a CONSTRUCTOR, we can just extract the
9990 appropriate field if it is present. */
9991 if (TREE_CODE (treeop0) == CONSTRUCTOR)
9993 unsigned HOST_WIDE_INT idx;
9994 tree field, value;
9996 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
9997 idx, field, value)
9998 if (field == treeop1
9999 /* We can normally use the value of the field in the
10000 CONSTRUCTOR. However, if this is a bitfield in
10001 an integral mode that we can fit in a HOST_WIDE_INT,
10002 we must mask only the number of bits in the bitfield,
10003 since this is done implicitly by the constructor. If
10004 the bitfield does not meet either of those conditions,
10005 we can't do this optimization. */
10006 && (! DECL_BIT_FIELD (field)
10007 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
10008 && (GET_MODE_PRECISION (DECL_MODE (field))
10009 <= HOST_BITS_PER_WIDE_INT))))
10011 if (DECL_BIT_FIELD (field)
10012 && modifier == EXPAND_STACK_PARM)
10013 target = 0;
10014 op0 = expand_expr (value, target, tmode, modifier);
10015 if (DECL_BIT_FIELD (field))
10017 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
10018 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
10020 if (TYPE_UNSIGNED (TREE_TYPE (field)))
10022 op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
10023 imode);
10024 op0 = expand_and (imode, op0, op1, target);
10026 else
10028 int count = GET_MODE_PRECISION (imode) - bitsize;
10030 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
10031 target, 0);
10032 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
10033 target, 0);
10037 return op0;
10040 goto normal_inner_ref;
10042 case BIT_FIELD_REF:
10043 case ARRAY_RANGE_REF:
10044 normal_inner_ref:
10046 enum machine_mode mode1, mode2;
10047 HOST_WIDE_INT bitsize, bitpos;
10048 tree offset;
10049 int volatilep = 0, must_force_mem;
10050 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
10051 &mode1, &unsignedp, &volatilep, true);
10052 rtx orig_op0, memloc;
10053 bool mem_attrs_from_type = false;
10055 /* If we got back the original object, something is wrong. Perhaps
10056 we are evaluating an expression too early. In any event, don't
10057 infinitely recurse. */
10058 gcc_assert (tem != exp);
10060 /* If TEM's type is a union of variable size, pass TARGET to the inner
10061 computation, since it will need a temporary and TARGET is known
10062 to have to do. This occurs in unchecked conversion in Ada. */
10063 orig_op0 = op0
10064 = expand_expr_real (tem,
10065 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10066 && COMPLETE_TYPE_P (TREE_TYPE (tem))
10067 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10068 != INTEGER_CST)
10069 && modifier != EXPAND_STACK_PARM
10070 ? target : NULL_RTX),
10071 VOIDmode,
10072 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10073 NULL, true);
10075 /* If the field has a mode, we want to access it in the
10076 field's mode, not the computed mode.
10077 If a MEM has VOIDmode (external with incomplete type),
10078 use BLKmode for it instead. */
10079 if (MEM_P (op0))
10081 if (mode1 != VOIDmode)
10082 op0 = adjust_address (op0, mode1, 0);
10083 else if (GET_MODE (op0) == VOIDmode)
10084 op0 = adjust_address (op0, BLKmode, 0);
10087 mode2
10088 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10090 /* If we have either an offset, a BLKmode result, or a reference
10091 outside the underlying object, we must force it to memory.
10092 Such a case can occur in Ada if we have unchecked conversion
10093 of an expression from a scalar type to an aggregate type or
10094 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10095 passed a partially uninitialized object or a view-conversion
10096 to a larger size. */
10097 must_force_mem = (offset
10098 || mode1 == BLKmode
10099 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
10101 /* Handle CONCAT first. */
10102 if (GET_CODE (op0) == CONCAT && !must_force_mem)
10104 if (bitpos == 0
10105 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
10106 return op0;
10107 if (bitpos == 0
10108 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10109 && bitsize)
10111 op0 = XEXP (op0, 0);
10112 mode2 = GET_MODE (op0);
10114 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10115 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
10116 && bitpos
10117 && bitsize)
10119 op0 = XEXP (op0, 1);
10120 bitpos = 0;
10121 mode2 = GET_MODE (op0);
10123 else
10124 /* Otherwise force into memory. */
10125 must_force_mem = 1;
10128 /* If this is a constant, put it in a register if it is a legitimate
10129 constant and we don't need a memory reference. */
10130 if (CONSTANT_P (op0)
10131 && mode2 != BLKmode
10132 && targetm.legitimate_constant_p (mode2, op0)
10133 && !must_force_mem)
10134 op0 = force_reg (mode2, op0);
10136 /* Otherwise, if this is a constant, try to force it to the constant
10137 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10138 is a legitimate constant. */
10139 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10140 op0 = validize_mem (memloc);
10142 /* Otherwise, if this is a constant or the object is not in memory
10143 and need be, put it there. */
10144 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10146 memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10147 emit_move_insn (memloc, op0);
10148 op0 = memloc;
10149 mem_attrs_from_type = true;
10152 if (offset)
10154 enum machine_mode address_mode;
10155 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10156 EXPAND_SUM);
10158 gcc_assert (MEM_P (op0));
10160 address_mode = get_address_mode (op0);
10161 if (GET_MODE (offset_rtx) != address_mode)
10162 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10164 /* See the comment in expand_assignment for the rationale. */
10165 if (mode1 != VOIDmode
10166 && bitpos != 0
10167 && bitsize > 0
10168 && (bitpos % bitsize) == 0
10169 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
10170 && MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
10172 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10173 bitpos = 0;
10176 op0 = offset_address (op0, offset_rtx,
10177 highest_pow2_factor (offset));
10180 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10181 record its alignment as BIGGEST_ALIGNMENT. */
10182 if (MEM_P (op0) && bitpos == 0 && offset != 0
10183 && is_aligning_offset (offset, tem))
10184 set_mem_align (op0, BIGGEST_ALIGNMENT);
10186 /* Don't forget about volatility even if this is a bitfield. */
10187 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10189 if (op0 == orig_op0)
10190 op0 = copy_rtx (op0);
10192 MEM_VOLATILE_P (op0) = 1;
10195 /* In cases where an aligned union has an unaligned object
10196 as a field, we might be extracting a BLKmode value from
10197 an integer-mode (e.g., SImode) object. Handle this case
10198 by doing the extract into an object as wide as the field
10199 (which we know to be the width of a basic mode), then
10200 storing into memory, and changing the mode to BLKmode. */
10201 if (mode1 == VOIDmode
10202 || REG_P (op0) || GET_CODE (op0) == SUBREG
10203 || (mode1 != BLKmode && ! direct_load[(int) mode1]
10204 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10205 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10206 && modifier != EXPAND_CONST_ADDRESS
10207 && modifier != EXPAND_INITIALIZER
10208 && modifier != EXPAND_MEMORY)
10209 /* If the bitfield is volatile and the bitsize
10210 is narrower than the access size of the bitfield,
10211 we need to extract bitfields from the access. */
10212 || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10213 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10214 && mode1 != BLKmode
10215 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)
10216 /* If the field isn't aligned enough to fetch as a memref,
10217 fetch it as a bit field. */
10218 || (mode1 != BLKmode
10219 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10220 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10221 || (MEM_P (op0)
10222 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10223 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10224 && modifier != EXPAND_MEMORY
10225 && ((modifier == EXPAND_CONST_ADDRESS
10226 || modifier == EXPAND_INITIALIZER)
10227 ? STRICT_ALIGNMENT
10228 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10229 || (bitpos % BITS_PER_UNIT != 0)))
10230 /* If the type and the field are a constant size and the
10231 size of the type isn't the same size as the bitfield,
10232 we must use bitfield operations. */
10233 || (bitsize >= 0
10234 && TYPE_SIZE (TREE_TYPE (exp))
10235 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10236 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10237 bitsize)))
10239 enum machine_mode ext_mode = mode;
10241 if (ext_mode == BLKmode
10242 && ! (target != 0 && MEM_P (op0)
10243 && MEM_P (target)
10244 && bitpos % BITS_PER_UNIT == 0))
10245 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10247 if (ext_mode == BLKmode)
10249 if (target == 0)
10250 target = assign_temp (type, 1, 1);
10252 /* ??? Unlike the similar test a few lines below, this one is
10253 very likely obsolete. */
10254 if (bitsize == 0)
10255 return target;
10257 /* In this case, BITPOS must start at a byte boundary and
10258 TARGET, if specified, must be a MEM. */
10259 gcc_assert (MEM_P (op0)
10260 && (!target || MEM_P (target))
10261 && !(bitpos % BITS_PER_UNIT));
10263 emit_block_move (target,
10264 adjust_address (op0, VOIDmode,
10265 bitpos / BITS_PER_UNIT),
10266 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10267 / BITS_PER_UNIT),
10268 (modifier == EXPAND_STACK_PARM
10269 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10271 return target;
10274 /* If we have nothing to extract, the result will be 0 for targets
10275 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10276 return 0 for the sake of consistency, as reading a zero-sized
10277 bitfield is valid in Ada and the value is fully specified. */
10278 if (bitsize == 0)
10279 return const0_rtx;
10281 op0 = validize_mem (op0);
10283 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10284 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10286 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10287 (modifier == EXPAND_STACK_PARM
10288 ? NULL_RTX : target),
10289 ext_mode, ext_mode);
10291 /* If the result is a record type and BITSIZE is narrower than
10292 the mode of OP0, an integral mode, and this is a big endian
10293 machine, we must put the field into the high-order bits. */
10294 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10295 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10296 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10297 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10298 GET_MODE_BITSIZE (GET_MODE (op0))
10299 - bitsize, op0, 1);
10301 /* If the result type is BLKmode, store the data into a temporary
10302 of the appropriate type, but with the mode corresponding to the
10303 mode for the data we have (op0's mode). */
10304 if (mode == BLKmode)
10306 rtx new_rtx
10307 = assign_stack_temp_for_type (ext_mode,
10308 GET_MODE_BITSIZE (ext_mode),
10309 type);
10310 emit_move_insn (new_rtx, op0);
10311 op0 = copy_rtx (new_rtx);
10312 PUT_MODE (op0, BLKmode);
10315 return op0;
10318 /* If the result is BLKmode, use that to access the object
10319 now as well. */
10320 if (mode == BLKmode)
10321 mode1 = BLKmode;
10323 /* Get a reference to just this component. */
10324 if (modifier == EXPAND_CONST_ADDRESS
10325 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10326 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10327 else
10328 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10330 if (op0 == orig_op0)
10331 op0 = copy_rtx (op0);
10333 /* If op0 is a temporary because of forcing to memory, pass only the
10334 type to set_mem_attributes so that the original expression is never
10335 marked as ADDRESSABLE through MEM_EXPR of the temporary. */
10336 if (mem_attrs_from_type)
10337 set_mem_attributes (op0, type, 0);
10338 else
10339 set_mem_attributes (op0, exp, 0);
10341 if (REG_P (XEXP (op0, 0)))
10342 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10344 MEM_VOLATILE_P (op0) |= volatilep;
10345 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10346 || modifier == EXPAND_CONST_ADDRESS
10347 || modifier == EXPAND_INITIALIZER)
10348 return op0;
10350 if (target == 0)
10351 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10353 convert_move (target, op0, unsignedp);
10354 return target;
10357 case OBJ_TYPE_REF:
10358 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10360 case CALL_EXPR:
10361 /* All valid uses of __builtin_va_arg_pack () are removed during
10362 inlining. */
10363 if (CALL_EXPR_VA_ARG_PACK (exp))
10364 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10366 tree fndecl = get_callee_fndecl (exp), attr;
10368 if (fndecl
10369 && (attr = lookup_attribute ("error",
10370 DECL_ATTRIBUTES (fndecl))) != NULL)
10371 error ("%Kcall to %qs declared with attribute error: %s",
10372 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10373 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10374 if (fndecl
10375 && (attr = lookup_attribute ("warning",
10376 DECL_ATTRIBUTES (fndecl))) != NULL)
10377 warning_at (tree_nonartificial_location (exp),
10378 0, "%Kcall to %qs declared with attribute warning: %s",
10379 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10380 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10382 /* Check for a built-in function. */
10383 if (fndecl && DECL_BUILT_IN (fndecl))
10385 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10386 return expand_builtin (exp, target, subtarget, tmode, ignore);
10389 return expand_call (exp, target, ignore);
10391 case VIEW_CONVERT_EXPR:
10392 op0 = NULL_RTX;
10394 /* If we are converting to BLKmode, try to avoid an intermediate
10395 temporary by fetching an inner memory reference. */
10396 if (mode == BLKmode
10397 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10398 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10399 && handled_component_p (treeop0))
10401 enum machine_mode mode1;
10402 HOST_WIDE_INT bitsize, bitpos;
10403 tree offset;
10404 int unsignedp;
10405 int volatilep = 0;
10406 tree tem
10407 = get_inner_reference (treeop0, &bitsize, &bitpos,
10408 &offset, &mode1, &unsignedp, &volatilep,
10409 true);
10410 rtx orig_op0;
10412 /* ??? We should work harder and deal with non-zero offsets. */
10413 if (!offset
10414 && (bitpos % BITS_PER_UNIT) == 0
10415 && bitsize >= 0
10416 && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10418 /* See the normal_inner_ref case for the rationale. */
10419 orig_op0
10420 = expand_expr_real (tem,
10421 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10422 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10423 != INTEGER_CST)
10424 && modifier != EXPAND_STACK_PARM
10425 ? target : NULL_RTX),
10426 VOIDmode,
10427 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10428 NULL, true);
10430 if (MEM_P (orig_op0))
10432 op0 = orig_op0;
10434 /* Get a reference to just this component. */
10435 if (modifier == EXPAND_CONST_ADDRESS
10436 || modifier == EXPAND_SUM
10437 || modifier == EXPAND_INITIALIZER)
10438 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10439 else
10440 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10442 if (op0 == orig_op0)
10443 op0 = copy_rtx (op0);
10445 set_mem_attributes (op0, treeop0, 0);
10446 if (REG_P (XEXP (op0, 0)))
10447 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10449 MEM_VOLATILE_P (op0) |= volatilep;
10454 if (!op0)
10455 op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
10456 NULL, inner_reference_p);
10458 /* If the input and output modes are both the same, we are done. */
10459 if (mode == GET_MODE (op0))
10461 /* If neither mode is BLKmode, and both modes are the same size
10462 then we can use gen_lowpart. */
10463 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10464 && (GET_MODE_PRECISION (mode)
10465 == GET_MODE_PRECISION (GET_MODE (op0)))
10466 && !COMPLEX_MODE_P (GET_MODE (op0)))
10468 if (GET_CODE (op0) == SUBREG)
10469 op0 = force_reg (GET_MODE (op0), op0);
10470 temp = gen_lowpart_common (mode, op0);
10471 if (temp)
10472 op0 = temp;
10473 else
10475 if (!REG_P (op0) && !MEM_P (op0))
10476 op0 = force_reg (GET_MODE (op0), op0);
10477 op0 = gen_lowpart (mode, op0);
10480 /* If both types are integral, convert from one mode to the other. */
10481 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10482 op0 = convert_modes (mode, GET_MODE (op0), op0,
10483 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10484 /* If the output type is a bit-field type, do an extraction. */
10485 else if (reduce_bit_field)
10486 return extract_bit_field (op0, TYPE_PRECISION (type), 0,
10487 TYPE_UNSIGNED (type), NULL_RTX,
10488 mode, mode);
10489 /* As a last resort, spill op0 to memory, and reload it in a
10490 different mode. */
10491 else if (!MEM_P (op0))
10493 /* If the operand is not a MEM, force it into memory. Since we
10494 are going to be changing the mode of the MEM, don't call
10495 force_const_mem for constants because we don't allow pool
10496 constants to change mode. */
10497 tree inner_type = TREE_TYPE (treeop0);
10499 gcc_assert (!TREE_ADDRESSABLE (exp));
10501 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10502 target
10503 = assign_stack_temp_for_type
10504 (TYPE_MODE (inner_type),
10505 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10507 emit_move_insn (target, op0);
10508 op0 = target;
10511 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10512 output type is such that the operand is known to be aligned, indicate
10513 that it is. Otherwise, we need only be concerned about alignment for
10514 non-BLKmode results. */
10515 if (MEM_P (op0))
10517 enum insn_code icode;
10519 if (TYPE_ALIGN_OK (type))
10521 /* ??? Copying the MEM without substantially changing it might
10522 run afoul of the code handling volatile memory references in
10523 store_expr, which assumes that TARGET is returned unmodified
10524 if it has been used. */
10525 op0 = copy_rtx (op0);
10526 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10528 else if (modifier != EXPAND_WRITE
10529 && modifier != EXPAND_MEMORY
10530 && !inner_reference_p
10531 && mode != BLKmode
10532 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10534 /* If the target does have special handling for unaligned
10535 loads of mode then use them. */
10536 if ((icode = optab_handler (movmisalign_optab, mode))
10537 != CODE_FOR_nothing)
10539 rtx reg, insn;
10541 op0 = adjust_address (op0, mode, 0);
10542 /* We've already validated the memory, and we're creating a
10543 new pseudo destination. The predicates really can't
10544 fail. */
10545 reg = gen_reg_rtx (mode);
10547 /* Nor can the insn generator. */
10548 insn = GEN_FCN (icode) (reg, op0);
10549 emit_insn (insn);
10550 return reg;
10552 else if (STRICT_ALIGNMENT)
10554 tree inner_type = TREE_TYPE (treeop0);
10555 HOST_WIDE_INT temp_size
10556 = MAX (int_size_in_bytes (inner_type),
10557 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10558 rtx new_rtx
10559 = assign_stack_temp_for_type (mode, temp_size, type);
10560 rtx new_with_op0_mode
10561 = adjust_address (new_rtx, GET_MODE (op0), 0);
10563 gcc_assert (!TREE_ADDRESSABLE (exp));
10565 if (GET_MODE (op0) == BLKmode)
10566 emit_block_move (new_with_op0_mode, op0,
10567 GEN_INT (GET_MODE_SIZE (mode)),
10568 (modifier == EXPAND_STACK_PARM
10569 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10570 else
10571 emit_move_insn (new_with_op0_mode, op0);
10573 op0 = new_rtx;
10577 op0 = adjust_address (op0, mode, 0);
10580 return op0;
10582 case MODIFY_EXPR:
10584 tree lhs = treeop0;
10585 tree rhs = treeop1;
10586 gcc_assert (ignore);
10588 /* Check for |= or &= of a bitfield of size one into another bitfield
10589 of size 1. In this case, (unless we need the result of the
10590 assignment) we can do this more efficiently with a
10591 test followed by an assignment, if necessary.
10593 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10594 things change so we do, this code should be enhanced to
10595 support it. */
10596 if (TREE_CODE (lhs) == COMPONENT_REF
10597 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10598 || TREE_CODE (rhs) == BIT_AND_EXPR)
10599 && TREE_OPERAND (rhs, 0) == lhs
10600 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10601 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10602 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10604 rtx_code_label *label = gen_label_rtx ();
10605 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10606 do_jump (TREE_OPERAND (rhs, 1),
10607 value ? label : 0,
10608 value ? 0 : label, -1);
10609 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10610 false);
10611 do_pending_stack_adjust ();
10612 emit_label (label);
10613 return const0_rtx;
10616 expand_assignment (lhs, rhs, false);
10617 return const0_rtx;
10620 case ADDR_EXPR:
10621 return expand_expr_addr_expr (exp, target, tmode, modifier);
10623 case REALPART_EXPR:
10624 op0 = expand_normal (treeop0);
10625 return read_complex_part (op0, false);
10627 case IMAGPART_EXPR:
10628 op0 = expand_normal (treeop0);
10629 return read_complex_part (op0, true);
10631 case RETURN_EXPR:
10632 case LABEL_EXPR:
10633 case GOTO_EXPR:
10634 case SWITCH_EXPR:
10635 case ASM_EXPR:
10636 /* Expanded in cfgexpand.c. */
10637 gcc_unreachable ();
10639 case TRY_CATCH_EXPR:
10640 case CATCH_EXPR:
10641 case EH_FILTER_EXPR:
10642 case TRY_FINALLY_EXPR:
10643 /* Lowered by tree-eh.c. */
10644 gcc_unreachable ();
10646 case WITH_CLEANUP_EXPR:
10647 case CLEANUP_POINT_EXPR:
10648 case TARGET_EXPR:
10649 case CASE_LABEL_EXPR:
10650 case VA_ARG_EXPR:
10651 case BIND_EXPR:
10652 case INIT_EXPR:
10653 case CONJ_EXPR:
10654 case COMPOUND_EXPR:
10655 case PREINCREMENT_EXPR:
10656 case PREDECREMENT_EXPR:
10657 case POSTINCREMENT_EXPR:
10658 case POSTDECREMENT_EXPR:
10659 case LOOP_EXPR:
10660 case EXIT_EXPR:
10661 case COMPOUND_LITERAL_EXPR:
10662 /* Lowered by gimplify.c. */
10663 gcc_unreachable ();
10665 case FDESC_EXPR:
10666 /* Function descriptors are not valid except for as
10667 initialization constants, and should not be expanded. */
10668 gcc_unreachable ();
10670 case WITH_SIZE_EXPR:
10671 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10672 have pulled out the size to use in whatever context it needed. */
10673 return expand_expr_real (treeop0, original_target, tmode,
10674 modifier, alt_rtl, inner_reference_p);
10676 default:
10677 return expand_expr_real_2 (&ops, target, tmode, modifier);
10681 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10682 signedness of TYPE), possibly returning the result in TARGET. */
10683 static rtx
10684 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10686 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10687 if (target && GET_MODE (target) != GET_MODE (exp))
10688 target = 0;
10689 /* For constant values, reduce using build_int_cst_type. */
10690 if (CONST_INT_P (exp))
10692 HOST_WIDE_INT value = INTVAL (exp);
10693 tree t = build_int_cst_type (type, value);
10694 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10696 else if (TYPE_UNSIGNED (type))
10698 enum machine_mode mode = GET_MODE (exp);
10699 rtx mask = immed_wide_int_const
10700 (wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
10701 return expand_and (mode, exp, mask, target);
10703 else
10705 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10706 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10707 exp, count, target, 0);
10708 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10709 exp, count, target, 0);
10713 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10714 when applied to the address of EXP produces an address known to be
10715 aligned more than BIGGEST_ALIGNMENT. */
10717 static int
10718 is_aligning_offset (const_tree offset, const_tree exp)
10720 /* Strip off any conversions. */
10721 while (CONVERT_EXPR_P (offset))
10722 offset = TREE_OPERAND (offset, 0);
10724 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10725 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10726 if (TREE_CODE (offset) != BIT_AND_EXPR
10727 || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
10728 || compare_tree_int (TREE_OPERAND (offset, 1),
10729 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10730 || exact_log2 (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1) < 0)
10731 return 0;
10733 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10734 It must be NEGATE_EXPR. Then strip any more conversions. */
10735 offset = TREE_OPERAND (offset, 0);
10736 while (CONVERT_EXPR_P (offset))
10737 offset = TREE_OPERAND (offset, 0);
10739 if (TREE_CODE (offset) != NEGATE_EXPR)
10740 return 0;
10742 offset = TREE_OPERAND (offset, 0);
10743 while (CONVERT_EXPR_P (offset))
10744 offset = TREE_OPERAND (offset, 0);
10746 /* This must now be the address of EXP. */
10747 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10750 /* Return the tree node if an ARG corresponds to a string constant or zero
10751 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10752 in bytes within the string that ARG is accessing. The type of the
10753 offset will be `sizetype'. */
10755 tree
10756 string_constant (tree arg, tree *ptr_offset)
10758 tree array, offset, lower_bound;
10759 STRIP_NOPS (arg);
10761 if (TREE_CODE (arg) == ADDR_EXPR)
10763 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10765 *ptr_offset = size_zero_node;
10766 return TREE_OPERAND (arg, 0);
10768 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10770 array = TREE_OPERAND (arg, 0);
10771 offset = size_zero_node;
10773 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10775 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10776 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10777 if (TREE_CODE (array) != STRING_CST
10778 && TREE_CODE (array) != VAR_DECL)
10779 return 0;
10781 /* Check if the array has a nonzero lower bound. */
10782 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10783 if (!integer_zerop (lower_bound))
10785 /* If the offset and base aren't both constants, return 0. */
10786 if (TREE_CODE (lower_bound) != INTEGER_CST)
10787 return 0;
10788 if (TREE_CODE (offset) != INTEGER_CST)
10789 return 0;
10790 /* Adjust offset by the lower bound. */
10791 offset = size_diffop (fold_convert (sizetype, offset),
10792 fold_convert (sizetype, lower_bound));
10795 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10797 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10798 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10799 if (TREE_CODE (array) != ADDR_EXPR)
10800 return 0;
10801 array = TREE_OPERAND (array, 0);
10802 if (TREE_CODE (array) != STRING_CST
10803 && TREE_CODE (array) != VAR_DECL)
10804 return 0;
10806 else
10807 return 0;
10809 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10811 tree arg0 = TREE_OPERAND (arg, 0);
10812 tree arg1 = TREE_OPERAND (arg, 1);
10814 STRIP_NOPS (arg0);
10815 STRIP_NOPS (arg1);
10817 if (TREE_CODE (arg0) == ADDR_EXPR
10818 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10819 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10821 array = TREE_OPERAND (arg0, 0);
10822 offset = arg1;
10824 else if (TREE_CODE (arg1) == ADDR_EXPR
10825 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10826 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10828 array = TREE_OPERAND (arg1, 0);
10829 offset = arg0;
10831 else
10832 return 0;
10834 else
10835 return 0;
10837 if (TREE_CODE (array) == STRING_CST)
10839 *ptr_offset = fold_convert (sizetype, offset);
10840 return array;
10842 else if (TREE_CODE (array) == VAR_DECL
10843 || TREE_CODE (array) == CONST_DECL)
10845 int length;
10846 tree init = ctor_for_folding (array);
10848 /* Variables initialized to string literals can be handled too. */
10849 if (init == error_mark_node
10850 || !init
10851 || TREE_CODE (init) != STRING_CST)
10852 return 0;
10854 /* Avoid const char foo[4] = "abcde"; */
10855 if (DECL_SIZE_UNIT (array) == NULL_TREE
10856 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10857 || (length = TREE_STRING_LENGTH (init)) <= 0
10858 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10859 return 0;
10861 /* If variable is bigger than the string literal, OFFSET must be constant
10862 and inside of the bounds of the string literal. */
10863 offset = fold_convert (sizetype, offset);
10864 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10865 && (! tree_fits_uhwi_p (offset)
10866 || compare_tree_int (offset, length) >= 0))
10867 return 0;
10869 *ptr_offset = offset;
10870 return init;
10873 return 0;
10876 /* Generate code to calculate OPS, and exploded expression
10877 using a store-flag instruction and return an rtx for the result.
10878 OPS reflects a comparison.
10880 If TARGET is nonzero, store the result there if convenient.
10882 Return zero if there is no suitable set-flag instruction
10883 available on this machine.
10885 Once expand_expr has been called on the arguments of the comparison,
10886 we are committed to doing the store flag, since it is not safe to
10887 re-evaluate the expression. We emit the store-flag insn by calling
10888 emit_store_flag, but only expand the arguments if we have a reason
10889 to believe that emit_store_flag will be successful. If we think that
10890 it will, but it isn't, we have to simulate the store-flag with a
10891 set/jump/set sequence. */
10893 static rtx
10894 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
10896 enum rtx_code code;
10897 tree arg0, arg1, type;
10898 tree tem;
10899 enum machine_mode operand_mode;
10900 int unsignedp;
10901 rtx op0, op1;
10902 rtx subtarget = target;
10903 location_t loc = ops->location;
10905 arg0 = ops->op0;
10906 arg1 = ops->op1;
10908 /* Don't crash if the comparison was erroneous. */
10909 if (arg0 == error_mark_node || arg1 == error_mark_node)
10910 return const0_rtx;
10912 type = TREE_TYPE (arg0);
10913 operand_mode = TYPE_MODE (type);
10914 unsignedp = TYPE_UNSIGNED (type);
10916 /* We won't bother with BLKmode store-flag operations because it would mean
10917 passing a lot of information to emit_store_flag. */
10918 if (operand_mode == BLKmode)
10919 return 0;
10921 /* We won't bother with store-flag operations involving function pointers
10922 when function pointers must be canonicalized before comparisons. */
10923 #ifdef HAVE_canonicalize_funcptr_for_compare
10924 if (HAVE_canonicalize_funcptr_for_compare
10925 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
10926 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
10927 == FUNCTION_TYPE))
10928 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
10929 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
10930 == FUNCTION_TYPE))))
10931 return 0;
10932 #endif
10934 STRIP_NOPS (arg0);
10935 STRIP_NOPS (arg1);
10937 /* For vector typed comparisons emit code to generate the desired
10938 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10939 expander for this. */
10940 if (TREE_CODE (ops->type) == VECTOR_TYPE)
10942 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10943 tree if_true = constant_boolean_node (true, ops->type);
10944 tree if_false = constant_boolean_node (false, ops->type);
10945 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10948 /* Get the rtx comparison code to use. We know that EXP is a comparison
10949 operation of some type. Some comparisons against 1 and -1 can be
10950 converted to comparisons with zero. Do so here so that the tests
10951 below will be aware that we have a comparison with zero. These
10952 tests will not catch constants in the first operand, but constants
10953 are rarely passed as the first operand. */
10955 switch (ops->code)
10957 case EQ_EXPR:
10958 code = EQ;
10959 break;
10960 case NE_EXPR:
10961 code = NE;
10962 break;
10963 case LT_EXPR:
10964 if (integer_onep (arg1))
10965 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10966 else
10967 code = unsignedp ? LTU : LT;
10968 break;
10969 case LE_EXPR:
10970 if (! unsignedp && integer_all_onesp (arg1))
10971 arg1 = integer_zero_node, code = LT;
10972 else
10973 code = unsignedp ? LEU : LE;
10974 break;
10975 case GT_EXPR:
10976 if (! unsignedp && integer_all_onesp (arg1))
10977 arg1 = integer_zero_node, code = GE;
10978 else
10979 code = unsignedp ? GTU : GT;
10980 break;
10981 case GE_EXPR:
10982 if (integer_onep (arg1))
10983 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10984 else
10985 code = unsignedp ? GEU : GE;
10986 break;
10988 case UNORDERED_EXPR:
10989 code = UNORDERED;
10990 break;
10991 case ORDERED_EXPR:
10992 code = ORDERED;
10993 break;
10994 case UNLT_EXPR:
10995 code = UNLT;
10996 break;
10997 case UNLE_EXPR:
10998 code = UNLE;
10999 break;
11000 case UNGT_EXPR:
11001 code = UNGT;
11002 break;
11003 case UNGE_EXPR:
11004 code = UNGE;
11005 break;
11006 case UNEQ_EXPR:
11007 code = UNEQ;
11008 break;
11009 case LTGT_EXPR:
11010 code = LTGT;
11011 break;
11013 default:
11014 gcc_unreachable ();
11017 /* Put a constant second. */
11018 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
11019 || TREE_CODE (arg0) == FIXED_CST)
11021 tem = arg0; arg0 = arg1; arg1 = tem;
11022 code = swap_condition (code);
11025 /* If this is an equality or inequality test of a single bit, we can
11026 do this by shifting the bit being tested to the low-order bit and
11027 masking the result with the constant 1. If the condition was EQ,
11028 we xor it with 1. This does not require an scc insn and is faster
11029 than an scc insn even if we have it.
11031 The code to make this transformation was moved into fold_single_bit_test,
11032 so we just call into the folder and expand its result. */
11034 if ((code == NE || code == EQ)
11035 && integer_zerop (arg1)
11036 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
11038 gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
11039 if (srcstmt
11040 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
11042 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
11043 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
11044 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
11045 gimple_assign_rhs1 (srcstmt),
11046 gimple_assign_rhs2 (srcstmt));
11047 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
11048 if (temp)
11049 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
11053 if (! get_subtarget (target)
11054 || GET_MODE (subtarget) != operand_mode)
11055 subtarget = 0;
11057 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
11059 if (target == 0)
11060 target = gen_reg_rtx (mode);
11062 /* Try a cstore if possible. */
11063 return emit_store_flag_force (target, code, op0, op1,
11064 operand_mode, unsignedp,
11065 (TYPE_PRECISION (ops->type) == 1
11066 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
11070 /* Stubs in case we haven't got a casesi insn. */
11071 #ifndef HAVE_casesi
11072 # define HAVE_casesi 0
11073 # define gen_casesi(a, b, c, d, e) (0)
11074 # define CODE_FOR_casesi CODE_FOR_nothing
11075 #endif
11077 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11078 0 otherwise (i.e. if there is no casesi instruction).
11080 DEFAULT_PROBABILITY is the probability of jumping to the default
11081 label. */
11083 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
11084 rtx table_label, rtx default_label, rtx fallback_label,
11085 int default_probability)
11087 struct expand_operand ops[5];
11088 enum machine_mode index_mode = SImode;
11089 rtx op1, op2, index;
11091 if (! HAVE_casesi)
11092 return 0;
11094 /* Convert the index to SImode. */
11095 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11097 enum machine_mode omode = TYPE_MODE (index_type);
11098 rtx rangertx = expand_normal (range);
11100 /* We must handle the endpoints in the original mode. */
11101 index_expr = build2 (MINUS_EXPR, index_type,
11102 index_expr, minval);
11103 minval = integer_zero_node;
11104 index = expand_normal (index_expr);
11105 if (default_label)
11106 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11107 omode, 1, default_label,
11108 default_probability);
11109 /* Now we can safely truncate. */
11110 index = convert_to_mode (index_mode, index, 0);
11112 else
11114 if (TYPE_MODE (index_type) != index_mode)
11116 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
11117 index_expr = fold_convert (index_type, index_expr);
11120 index = expand_normal (index_expr);
11123 do_pending_stack_adjust ();
11125 op1 = expand_normal (minval);
11126 op2 = expand_normal (range);
11128 create_input_operand (&ops[0], index, index_mode);
11129 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11130 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11131 create_fixed_operand (&ops[3], table_label);
11132 create_fixed_operand (&ops[4], (default_label
11133 ? default_label
11134 : fallback_label));
11135 expand_jump_insn (CODE_FOR_casesi, 5, ops);
11136 return 1;
11139 /* Attempt to generate a tablejump instruction; same concept. */
11140 #ifndef HAVE_tablejump
11141 #define HAVE_tablejump 0
11142 #define gen_tablejump(x, y) (0)
11143 #endif
11145 /* Subroutine of the next function.
11147 INDEX is the value being switched on, with the lowest value
11148 in the table already subtracted.
11149 MODE is its expected mode (needed if INDEX is constant).
11150 RANGE is the length of the jump table.
11151 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11153 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11154 index value is out of range.
11155 DEFAULT_PROBABILITY is the probability of jumping to
11156 the default label. */
11158 static void
11159 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
11160 rtx default_label, int default_probability)
11162 rtx temp, vector;
11164 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11165 cfun->cfg->max_jumptable_ents = INTVAL (range);
11167 /* Do an unsigned comparison (in the proper mode) between the index
11168 expression and the value which represents the length of the range.
11169 Since we just finished subtracting the lower bound of the range
11170 from the index expression, this comparison allows us to simultaneously
11171 check that the original index expression value is both greater than
11172 or equal to the minimum value of the range and less than or equal to
11173 the maximum value of the range. */
11175 if (default_label)
11176 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11177 default_label, default_probability);
11180 /* If index is in range, it must fit in Pmode.
11181 Convert to Pmode so we can index with it. */
11182 if (mode != Pmode)
11183 index = convert_to_mode (Pmode, index, 1);
11185 /* Don't let a MEM slip through, because then INDEX that comes
11186 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11187 and break_out_memory_refs will go to work on it and mess it up. */
11188 #ifdef PIC_CASE_VECTOR_ADDRESS
11189 if (flag_pic && !REG_P (index))
11190 index = copy_to_mode_reg (Pmode, index);
11191 #endif
11193 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11194 GET_MODE_SIZE, because this indicates how large insns are. The other
11195 uses should all be Pmode, because they are addresses. This code
11196 could fail if addresses and insns are not the same size. */
11197 index = simplify_gen_binary (MULT, Pmode, index,
11198 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
11199 Pmode));
11200 index = simplify_gen_binary (PLUS, Pmode, index,
11201 gen_rtx_LABEL_REF (Pmode, table_label));
11203 #ifdef PIC_CASE_VECTOR_ADDRESS
11204 if (flag_pic)
11205 index = PIC_CASE_VECTOR_ADDRESS (index);
11206 else
11207 #endif
11208 index = memory_address (CASE_VECTOR_MODE, index);
11209 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11210 vector = gen_const_mem (CASE_VECTOR_MODE, index);
11211 convert_move (temp, vector, 0);
11213 emit_jump_insn (gen_tablejump (temp, table_label));
11215 /* If we are generating PIC code or if the table is PC-relative, the
11216 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11217 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11218 emit_barrier ();
11222 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11223 rtx table_label, rtx default_label, int default_probability)
11225 rtx index;
11227 if (! HAVE_tablejump)
11228 return 0;
11230 index_expr = fold_build2 (MINUS_EXPR, index_type,
11231 fold_convert (index_type, index_expr),
11232 fold_convert (index_type, minval));
11233 index = expand_normal (index_expr);
11234 do_pending_stack_adjust ();
11236 do_tablejump (index, TYPE_MODE (index_type),
11237 convert_modes (TYPE_MODE (index_type),
11238 TYPE_MODE (TREE_TYPE (range)),
11239 expand_normal (range),
11240 TYPE_UNSIGNED (TREE_TYPE (range))),
11241 table_label, default_label, default_probability);
11242 return 1;
11245 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11246 static rtx
11247 const_vector_from_tree (tree exp)
11249 rtvec v;
11250 unsigned i;
11251 int units;
11252 tree elt;
11253 enum machine_mode inner, mode;
11255 mode = TYPE_MODE (TREE_TYPE (exp));
11257 if (initializer_zerop (exp))
11258 return CONST0_RTX (mode);
11260 units = GET_MODE_NUNITS (mode);
11261 inner = GET_MODE_INNER (mode);
11263 v = rtvec_alloc (units);
11265 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11267 elt = VECTOR_CST_ELT (exp, i);
11269 if (TREE_CODE (elt) == REAL_CST)
11270 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11271 inner);
11272 else if (TREE_CODE (elt) == FIXED_CST)
11273 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11274 inner);
11275 else
11276 RTVEC_ELT (v, i) = immed_wide_int_const (elt, inner);
11279 return gen_rtx_CONST_VECTOR (mode, v);
11282 /* Build a decl for a personality function given a language prefix. */
11284 tree
11285 build_personality_function (const char *lang)
11287 const char *unwind_and_version;
11288 tree decl, type;
11289 char *name;
11291 switch (targetm_common.except_unwind_info (&global_options))
11293 case UI_NONE:
11294 return NULL;
11295 case UI_SJLJ:
11296 unwind_and_version = "_sj0";
11297 break;
11298 case UI_DWARF2:
11299 case UI_TARGET:
11300 unwind_and_version = "_v0";
11301 break;
11302 case UI_SEH:
11303 unwind_and_version = "_seh0";
11304 break;
11305 default:
11306 gcc_unreachable ();
11309 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11311 type = build_function_type_list (integer_type_node, integer_type_node,
11312 long_long_unsigned_type_node,
11313 ptr_type_node, ptr_type_node, NULL_TREE);
11314 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11315 get_identifier (name), type);
11316 DECL_ARTIFICIAL (decl) = 1;
11317 DECL_EXTERNAL (decl) = 1;
11318 TREE_PUBLIC (decl) = 1;
11320 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11321 are the flags assigned by targetm.encode_section_info. */
11322 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11324 return decl;
11327 /* Extracts the personality function of DECL and returns the corresponding
11328 libfunc. */
11331 get_personality_function (tree decl)
11333 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11334 enum eh_personality_kind pk;
11336 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11337 if (pk == eh_personality_none)
11338 return NULL;
11340 if (!personality
11341 && pk == eh_personality_any)
11342 personality = lang_hooks.eh_personality ();
11344 if (pk == eh_personality_lang)
11345 gcc_assert (personality != NULL_TREE);
11347 return XEXP (DECL_RTL (personality), 0);
11350 #include "gt-expr.h"