[Patch 1/7] Hookize *_BY_PIECES_P
[official-gcc.git] / gcc / expr.c
blobef851777c47fddc089b096d2ee527efb33aa90d3
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
29 #include "attribs.h"
30 #include "varasm.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "hashtab.h"
36 #include "hash-set.h"
37 #include "vec.h"
38 #include "input.h"
39 #include "function.h"
40 #include "insn-config.h"
41 #include "insn-attr.h"
42 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
43 #include "expr.h"
44 #include "optabs.h"
45 #include "libfuncs.h"
46 #include "recog.h"
47 #include "reload.h"
48 #include "typeclass.h"
49 #include "toplev.h"
50 #include "langhooks.h"
51 #include "intl.h"
52 #include "tm_p.h"
53 #include "tree-iterator.h"
54 #include "predict.h"
55 #include "dominance.h"
56 #include "cfg.h"
57 #include "basic-block.h"
58 #include "tree-ssa-alias.h"
59 #include "internal-fn.h"
60 #include "gimple-expr.h"
61 #include "is-a.h"
62 #include "gimple.h"
63 #include "gimple-ssa.h"
64 #include "hash-map.h"
65 #include "plugin-api.h"
66 #include "ipa-ref.h"
67 #include "cgraph.h"
68 #include "tree-ssanames.h"
69 #include "target.h"
70 #include "common/common-target.h"
71 #include "timevar.h"
72 #include "df.h"
73 #include "diagnostic.h"
74 #include "tree-ssa-live.h"
75 #include "tree-outof-ssa.h"
76 #include "target-globals.h"
77 #include "params.h"
78 #include "tree-ssa-address.h"
79 #include "cfgexpand.h"
80 #include "builtins.h"
82 #ifndef STACK_PUSH_CODE
83 #ifdef STACK_GROWS_DOWNWARD
84 #define STACK_PUSH_CODE PRE_DEC
85 #else
86 #define STACK_PUSH_CODE PRE_INC
87 #endif
88 #endif
91 /* If this is nonzero, we do not bother generating VOLATILE
92 around volatile memory references, and we are willing to
93 output indirect addresses. If cse is to follow, we reject
94 indirect addresses so a useful potential cse is generated;
95 if it is used only once, instruction combination will produce
96 the same indirect address eventually. */
97 int cse_not_expected;
99 /* This structure is used by move_by_pieces to describe the move to
100 be performed. */
101 struct move_by_pieces_d
103 rtx to;
104 rtx to_addr;
105 int autinc_to;
106 int explicit_inc_to;
107 rtx from;
108 rtx from_addr;
109 int autinc_from;
110 int explicit_inc_from;
111 unsigned HOST_WIDE_INT len;
112 HOST_WIDE_INT offset;
113 int reverse;
116 /* This structure is used by store_by_pieces to describe the clear to
117 be performed. */
119 struct store_by_pieces_d
121 rtx to;
122 rtx to_addr;
123 int autinc_to;
124 int explicit_inc_to;
125 unsigned HOST_WIDE_INT len;
126 HOST_WIDE_INT offset;
127 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode);
128 void *constfundata;
129 int reverse;
132 static void move_by_pieces_1 (insn_gen_fn, machine_mode,
133 struct move_by_pieces_d *);
134 static bool block_move_libcall_safe_for_call_parm (void);
135 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
136 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
137 unsigned HOST_WIDE_INT);
138 static tree emit_block_move_libcall_fn (int);
139 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
140 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, machine_mode);
141 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
142 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
143 static void store_by_pieces_2 (insn_gen_fn, machine_mode,
144 struct store_by_pieces_d *);
145 static tree clear_storage_libcall_fn (int);
146 static rtx_insn *compress_float_constant (rtx, rtx);
147 static rtx get_subtarget (rtx);
148 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
149 HOST_WIDE_INT, machine_mode,
150 tree, int, alias_set_type);
151 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
152 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
153 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
154 machine_mode, tree, alias_set_type, bool);
156 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
158 static int is_aligning_offset (const_tree, const_tree);
159 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
160 enum expand_modifier);
161 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
162 static rtx do_store_flag (sepops, rtx, machine_mode);
163 #ifdef PUSH_ROUNDING
164 static void emit_single_push_insn (machine_mode, rtx, tree);
165 #endif
166 static void do_tablejump (rtx, machine_mode, rtx, rtx, rtx, int);
167 static rtx const_vector_from_tree (tree);
168 static void write_complex_part (rtx, rtx, bool);
170 /* This macro is used to determine whether move_by_pieces should be called
171 to perform a structure copy. */
172 #ifndef MOVE_BY_PIECES_P
173 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
174 (targetm.use_by_pieces_infrastructure_p (SIZE, ALIGN, MOVE_BY_PIECES, \
175 optimize_insn_for_speed_p ()))
176 #endif
178 /* This macro is used to determine whether clear_by_pieces should be
179 called to clear storage. */
180 #ifndef CLEAR_BY_PIECES_P
181 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
182 (targetm.use_by_pieces_infrastructure_p (SIZE, ALIGN, CLEAR_BY_PIECES, \
183 optimize_insn_for_speed_p ()))
184 #endif
186 /* This macro is used to determine whether store_by_pieces should be
187 called to "memset" storage with byte values other than zero. */
188 #ifndef SET_BY_PIECES_P
189 #define SET_BY_PIECES_P(SIZE, ALIGN) \
190 (targetm.use_by_pieces_infrastructure_p (SIZE, ALIGN, SET_BY_PIECES, \
191 optimize_insn_for_speed_p ()))
192 #endif
194 /* This macro is used to determine whether store_by_pieces should be
195 called to "memcpy" storage when the source is a constant string. */
196 #ifndef STORE_BY_PIECES_P
197 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
198 (targetm.use_by_pieces_infrastructure_p (SIZE, ALIGN, STORE_BY_PIECES, \
199 optimize_insn_for_speed_p ()))
200 #endif
202 /* This is run to set up which modes can be used
203 directly in memory and to initialize the block move optab. It is run
204 at the beginning of compilation and when the target is reinitialized. */
206 void
207 init_expr_target (void)
209 rtx insn, pat;
210 machine_mode mode;
211 int num_clobbers;
212 rtx mem, mem1;
213 rtx reg;
215 /* Try indexing by frame ptr and try by stack ptr.
216 It is known that on the Convex the stack ptr isn't a valid index.
217 With luck, one or the other is valid on any machine. */
218 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
219 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
221 /* A scratch register we can modify in-place below to avoid
222 useless RTL allocations. */
223 reg = gen_rtx_REG (VOIDmode, -1);
225 insn = rtx_alloc (INSN);
226 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
227 PATTERN (insn) = pat;
229 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
230 mode = (machine_mode) ((int) mode + 1))
232 int regno;
234 direct_load[(int) mode] = direct_store[(int) mode] = 0;
235 PUT_MODE (mem, mode);
236 PUT_MODE (mem1, mode);
237 PUT_MODE (reg, mode);
239 /* See if there is some register that can be used in this mode and
240 directly loaded or stored from memory. */
242 if (mode != VOIDmode && mode != BLKmode)
243 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
244 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
245 regno++)
247 if (! HARD_REGNO_MODE_OK (regno, mode))
248 continue;
250 SET_REGNO (reg, regno);
252 SET_SRC (pat) = mem;
253 SET_DEST (pat) = reg;
254 if (recog (pat, insn, &num_clobbers) >= 0)
255 direct_load[(int) mode] = 1;
257 SET_SRC (pat) = mem1;
258 SET_DEST (pat) = reg;
259 if (recog (pat, insn, &num_clobbers) >= 0)
260 direct_load[(int) mode] = 1;
262 SET_SRC (pat) = reg;
263 SET_DEST (pat) = mem;
264 if (recog (pat, insn, &num_clobbers) >= 0)
265 direct_store[(int) mode] = 1;
267 SET_SRC (pat) = reg;
268 SET_DEST (pat) = mem1;
269 if (recog (pat, insn, &num_clobbers) >= 0)
270 direct_store[(int) mode] = 1;
274 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
276 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
277 mode = GET_MODE_WIDER_MODE (mode))
279 machine_mode srcmode;
280 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
281 srcmode = GET_MODE_WIDER_MODE (srcmode))
283 enum insn_code ic;
285 ic = can_extend_p (mode, srcmode, 0);
286 if (ic == CODE_FOR_nothing)
287 continue;
289 PUT_MODE (mem, srcmode);
291 if (insn_operand_matches (ic, 1, mem))
292 float_extend_from_mem[mode][srcmode] = true;
297 /* This is run at the start of compiling a function. */
299 void
300 init_expr (void)
302 memset (&crtl->expr, 0, sizeof (crtl->expr));
305 /* Copy data from FROM to TO, where the machine modes are not the same.
306 Both modes may be integer, or both may be floating, or both may be
307 fixed-point.
308 UNSIGNEDP should be nonzero if FROM is an unsigned type.
309 This causes zero-extension instead of sign-extension. */
311 void
312 convert_move (rtx to, rtx from, int unsignedp)
314 machine_mode to_mode = GET_MODE (to);
315 machine_mode from_mode = GET_MODE (from);
316 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
317 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
318 enum insn_code code;
319 rtx libcall;
321 /* rtx code for making an equivalent value. */
322 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
323 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
326 gcc_assert (to_real == from_real);
327 gcc_assert (to_mode != BLKmode);
328 gcc_assert (from_mode != BLKmode);
330 /* If the source and destination are already the same, then there's
331 nothing to do. */
332 if (to == from)
333 return;
335 /* If FROM is a SUBREG that indicates that we have already done at least
336 the required extension, strip it. We don't handle such SUBREGs as
337 TO here. */
339 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
340 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
341 >= GET_MODE_PRECISION (to_mode))
342 && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp))
343 from = gen_lowpart (to_mode, from), from_mode = to_mode;
345 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
347 if (to_mode == from_mode
348 || (from_mode == VOIDmode && CONSTANT_P (from)))
350 emit_move_insn (to, from);
351 return;
354 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
356 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
358 if (VECTOR_MODE_P (to_mode))
359 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
360 else
361 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
363 emit_move_insn (to, from);
364 return;
367 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
369 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
370 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
371 return;
374 if (to_real)
376 rtx value;
377 rtx_insn *insns;
378 convert_optab tab;
380 gcc_assert ((GET_MODE_PRECISION (from_mode)
381 != GET_MODE_PRECISION (to_mode))
382 || (DECIMAL_FLOAT_MODE_P (from_mode)
383 != DECIMAL_FLOAT_MODE_P (to_mode)));
385 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
386 /* Conversion between decimal float and binary float, same size. */
387 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
388 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
389 tab = sext_optab;
390 else
391 tab = trunc_optab;
393 /* Try converting directly if the insn is supported. */
395 code = convert_optab_handler (tab, to_mode, from_mode);
396 if (code != CODE_FOR_nothing)
398 emit_unop_insn (code, to, from,
399 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
400 return;
403 /* Otherwise use a libcall. */
404 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
406 /* Is this conversion implemented yet? */
407 gcc_assert (libcall);
409 start_sequence ();
410 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
411 1, from, from_mode);
412 insns = get_insns ();
413 end_sequence ();
414 emit_libcall_block (insns, to, value,
415 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
416 from)
417 : gen_rtx_FLOAT_EXTEND (to_mode, from));
418 return;
421 /* Handle pointer conversion. */ /* SPEE 900220. */
422 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
424 convert_optab ctab;
426 if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
427 ctab = trunc_optab;
428 else if (unsignedp)
429 ctab = zext_optab;
430 else
431 ctab = sext_optab;
433 if (convert_optab_handler (ctab, to_mode, from_mode)
434 != CODE_FOR_nothing)
436 emit_unop_insn (convert_optab_handler (ctab, to_mode, from_mode),
437 to, from, UNKNOWN);
438 return;
442 /* Targets are expected to provide conversion insns between PxImode and
443 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
444 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
446 machine_mode full_mode
447 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
449 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
450 != CODE_FOR_nothing);
452 if (full_mode != from_mode)
453 from = convert_to_mode (full_mode, from, unsignedp);
454 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
455 to, from, UNKNOWN);
456 return;
458 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
460 rtx new_from;
461 machine_mode full_mode
462 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
463 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
464 enum insn_code icode;
466 icode = convert_optab_handler (ctab, full_mode, from_mode);
467 gcc_assert (icode != CODE_FOR_nothing);
469 if (to_mode == full_mode)
471 emit_unop_insn (icode, to, from, UNKNOWN);
472 return;
475 new_from = gen_reg_rtx (full_mode);
476 emit_unop_insn (icode, new_from, from, UNKNOWN);
478 /* else proceed to integer conversions below. */
479 from_mode = full_mode;
480 from = new_from;
483 /* Make sure both are fixed-point modes or both are not. */
484 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
485 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
486 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
488 /* If we widen from_mode to to_mode and they are in the same class,
489 we won't saturate the result.
490 Otherwise, always saturate the result to play safe. */
491 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
492 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
493 expand_fixed_convert (to, from, 0, 0);
494 else
495 expand_fixed_convert (to, from, 0, 1);
496 return;
499 /* Now both modes are integers. */
501 /* Handle expanding beyond a word. */
502 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
503 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
505 rtx_insn *insns;
506 rtx lowpart;
507 rtx fill_value;
508 rtx lowfrom;
509 int i;
510 machine_mode lowpart_mode;
511 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
513 /* Try converting directly if the insn is supported. */
514 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
515 != CODE_FOR_nothing)
517 /* If FROM is a SUBREG, put it into a register. Do this
518 so that we always generate the same set of insns for
519 better cse'ing; if an intermediate assignment occurred,
520 we won't be doing the operation directly on the SUBREG. */
521 if (optimize > 0 && GET_CODE (from) == SUBREG)
522 from = force_reg (from_mode, from);
523 emit_unop_insn (code, to, from, equiv_code);
524 return;
526 /* Next, try converting via full word. */
527 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
528 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
529 != CODE_FOR_nothing))
531 rtx word_to = gen_reg_rtx (word_mode);
532 if (REG_P (to))
534 if (reg_overlap_mentioned_p (to, from))
535 from = force_reg (from_mode, from);
536 emit_clobber (to);
538 convert_move (word_to, from, unsignedp);
539 emit_unop_insn (code, to, word_to, equiv_code);
540 return;
543 /* No special multiword conversion insn; do it by hand. */
544 start_sequence ();
546 /* Since we will turn this into a no conflict block, we must ensure the
547 the source does not overlap the target so force it into an isolated
548 register when maybe so. Likewise for any MEM input, since the
549 conversion sequence might require several references to it and we
550 must ensure we're getting the same value every time. */
552 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
553 from = force_reg (from_mode, from);
555 /* Get a copy of FROM widened to a word, if necessary. */
556 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
557 lowpart_mode = word_mode;
558 else
559 lowpart_mode = from_mode;
561 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
563 lowpart = gen_lowpart (lowpart_mode, to);
564 emit_move_insn (lowpart, lowfrom);
566 /* Compute the value to put in each remaining word. */
567 if (unsignedp)
568 fill_value = const0_rtx;
569 else
570 fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
571 LT, lowfrom, const0_rtx,
572 lowpart_mode, 0, -1);
574 /* Fill the remaining words. */
575 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
577 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
578 rtx subword = operand_subword (to, index, 1, to_mode);
580 gcc_assert (subword);
582 if (fill_value != subword)
583 emit_move_insn (subword, fill_value);
586 insns = get_insns ();
587 end_sequence ();
589 emit_insn (insns);
590 return;
593 /* Truncating multi-word to a word or less. */
594 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
595 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
597 if (!((MEM_P (from)
598 && ! MEM_VOLATILE_P (from)
599 && direct_load[(int) to_mode]
600 && ! mode_dependent_address_p (XEXP (from, 0),
601 MEM_ADDR_SPACE (from)))
602 || REG_P (from)
603 || GET_CODE (from) == SUBREG))
604 from = force_reg (from_mode, from);
605 convert_move (to, gen_lowpart (word_mode, from), 0);
606 return;
609 /* Now follow all the conversions between integers
610 no more than a word long. */
612 /* For truncation, usually we can just refer to FROM in a narrower mode. */
613 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
614 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
616 if (!((MEM_P (from)
617 && ! MEM_VOLATILE_P (from)
618 && direct_load[(int) to_mode]
619 && ! mode_dependent_address_p (XEXP (from, 0),
620 MEM_ADDR_SPACE (from)))
621 || REG_P (from)
622 || GET_CODE (from) == SUBREG))
623 from = force_reg (from_mode, from);
624 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
625 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
626 from = copy_to_reg (from);
627 emit_move_insn (to, gen_lowpart (to_mode, from));
628 return;
631 /* Handle extension. */
632 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
634 /* Convert directly if that works. */
635 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
636 != CODE_FOR_nothing)
638 emit_unop_insn (code, to, from, equiv_code);
639 return;
641 else
643 machine_mode intermediate;
644 rtx tmp;
645 int shift_amount;
647 /* Search for a mode to convert via. */
648 for (intermediate = from_mode; intermediate != VOIDmode;
649 intermediate = GET_MODE_WIDER_MODE (intermediate))
650 if (((can_extend_p (to_mode, intermediate, unsignedp)
651 != CODE_FOR_nothing)
652 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
653 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
654 && (can_extend_p (intermediate, from_mode, unsignedp)
655 != CODE_FOR_nothing))
657 convert_move (to, convert_to_mode (intermediate, from,
658 unsignedp), unsignedp);
659 return;
662 /* No suitable intermediate mode.
663 Generate what we need with shifts. */
664 shift_amount = (GET_MODE_PRECISION (to_mode)
665 - GET_MODE_PRECISION (from_mode));
666 from = gen_lowpart (to_mode, force_reg (from_mode, from));
667 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
668 to, unsignedp);
669 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
670 to, unsignedp);
671 if (tmp != to)
672 emit_move_insn (to, tmp);
673 return;
677 /* Support special truncate insns for certain modes. */
678 if (convert_optab_handler (trunc_optab, to_mode,
679 from_mode) != CODE_FOR_nothing)
681 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
682 to, from, UNKNOWN);
683 return;
686 /* Handle truncation of volatile memrefs, and so on;
687 the things that couldn't be truncated directly,
688 and for which there was no special instruction.
690 ??? Code above formerly short-circuited this, for most integer
691 mode pairs, with a force_reg in from_mode followed by a recursive
692 call to this routine. Appears always to have been wrong. */
693 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
695 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
696 emit_move_insn (to, temp);
697 return;
700 /* Mode combination is not recognized. */
701 gcc_unreachable ();
704 /* Return an rtx for a value that would result
705 from converting X to mode MODE.
706 Both X and MODE may be floating, or both integer.
707 UNSIGNEDP is nonzero if X is an unsigned value.
708 This can be done by referring to a part of X in place
709 or by copying to a new temporary with conversion. */
712 convert_to_mode (machine_mode mode, rtx x, int unsignedp)
714 return convert_modes (mode, VOIDmode, x, unsignedp);
717 /* Return an rtx for a value that would result
718 from converting X from mode OLDMODE to mode MODE.
719 Both modes may be floating, or both integer.
720 UNSIGNEDP is nonzero if X is an unsigned value.
722 This can be done by referring to a part of X in place
723 or by copying to a new temporary with conversion.
725 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
728 convert_modes (machine_mode mode, machine_mode oldmode, rtx x, int unsignedp)
730 rtx temp;
732 /* If FROM is a SUBREG that indicates that we have already done at least
733 the required extension, strip it. */
735 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
736 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
737 && SUBREG_CHECK_PROMOTED_SIGN (x, unsignedp))
738 x = gen_lowpart (mode, SUBREG_REG (x));
740 if (GET_MODE (x) != VOIDmode)
741 oldmode = GET_MODE (x);
743 if (mode == oldmode)
744 return x;
746 if (CONST_SCALAR_INT_P (x) && GET_MODE_CLASS (mode) == MODE_INT)
748 /* If the caller did not tell us the old mode, then there is not
749 much to do with respect to canonicalization. We have to
750 assume that all the bits are significant. */
751 if (GET_MODE_CLASS (oldmode) != MODE_INT)
752 oldmode = MAX_MODE_INT;
753 wide_int w = wide_int::from (std::make_pair (x, oldmode),
754 GET_MODE_PRECISION (mode),
755 unsignedp ? UNSIGNED : SIGNED);
756 return immed_wide_int_const (w, mode);
759 /* We can do this with a gen_lowpart if both desired and current modes
760 are integer, and this is either a constant integer, a register, or a
761 non-volatile MEM. */
762 if (GET_MODE_CLASS (mode) == MODE_INT
763 && GET_MODE_CLASS (oldmode) == MODE_INT
764 && GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
765 && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) mode])
766 || (REG_P (x)
767 && (!HARD_REGISTER_P (x)
768 || HARD_REGNO_MODE_OK (REGNO (x), mode))
769 && TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (x)))))
771 return gen_lowpart (mode, x);
773 /* Converting from integer constant into mode is always equivalent to an
774 subreg operation. */
775 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
777 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
778 return simplify_gen_subreg (mode, x, oldmode, 0);
781 temp = gen_reg_rtx (mode);
782 convert_move (temp, x, unsignedp);
783 return temp;
786 /* Return the largest alignment we can use for doing a move (or store)
787 of MAX_PIECES. ALIGN is the largest alignment we could use. */
789 static unsigned int
790 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
792 machine_mode tmode;
794 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
795 if (align >= GET_MODE_ALIGNMENT (tmode))
796 align = GET_MODE_ALIGNMENT (tmode);
797 else
799 machine_mode tmode, xmode;
801 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
802 tmode != VOIDmode;
803 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
804 if (GET_MODE_SIZE (tmode) > max_pieces
805 || SLOW_UNALIGNED_ACCESS (tmode, align))
806 break;
808 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
811 return align;
814 /* Return the widest integer mode no wider than SIZE. If no such mode
815 can be found, return VOIDmode. */
817 static machine_mode
818 widest_int_mode_for_size (unsigned int size)
820 machine_mode tmode, mode = VOIDmode;
822 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
823 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
824 if (GET_MODE_SIZE (tmode) < size)
825 mode = tmode;
827 return mode;
830 /* Determine whether the LEN bytes can be moved by using several move
831 instructions. Return nonzero if a call to move_by_pieces should
832 succeed. */
835 can_move_by_pieces (unsigned HOST_WIDE_INT len ATTRIBUTE_UNUSED,
836 unsigned int align ATTRIBUTE_UNUSED)
838 return MOVE_BY_PIECES_P (len, align);
841 /* Generate several move instructions to copy LEN bytes from block FROM to
842 block TO. (These are MEM rtx's with BLKmode).
844 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
845 used to push FROM to the stack.
847 ALIGN is maximum stack alignment we can assume.
849 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
850 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
851 stpcpy. */
854 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
855 unsigned int align, int endp)
857 struct move_by_pieces_d data;
858 machine_mode to_addr_mode;
859 machine_mode from_addr_mode = get_address_mode (from);
860 rtx to_addr, from_addr = XEXP (from, 0);
861 unsigned int max_size = MOVE_MAX_PIECES + 1;
862 enum insn_code icode;
864 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
866 data.offset = 0;
867 data.from_addr = from_addr;
868 if (to)
870 to_addr_mode = get_address_mode (to);
871 to_addr = XEXP (to, 0);
872 data.to = to;
873 data.autinc_to
874 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
875 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
876 data.reverse
877 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
879 else
881 to_addr_mode = VOIDmode;
882 to_addr = NULL_RTX;
883 data.to = NULL_RTX;
884 data.autinc_to = 1;
885 #ifdef STACK_GROWS_DOWNWARD
886 data.reverse = 1;
887 #else
888 data.reverse = 0;
889 #endif
891 data.to_addr = to_addr;
892 data.from = from;
893 data.autinc_from
894 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
895 || GET_CODE (from_addr) == POST_INC
896 || GET_CODE (from_addr) == POST_DEC);
898 data.explicit_inc_from = 0;
899 data.explicit_inc_to = 0;
900 if (data.reverse) data.offset = len;
901 data.len = len;
903 /* If copying requires more than two move insns,
904 copy addresses to registers (to make displacements shorter)
905 and use post-increment if available. */
906 if (!(data.autinc_from && data.autinc_to)
907 && move_by_pieces_ninsns (len, align, max_size) > 2)
909 /* Find the mode of the largest move...
910 MODE might not be used depending on the definitions of the
911 USE_* macros below. */
912 machine_mode mode ATTRIBUTE_UNUSED
913 = widest_int_mode_for_size (max_size);
915 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
917 data.from_addr = copy_to_mode_reg (from_addr_mode,
918 plus_constant (from_addr_mode,
919 from_addr, len));
920 data.autinc_from = 1;
921 data.explicit_inc_from = -1;
923 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
925 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
926 data.autinc_from = 1;
927 data.explicit_inc_from = 1;
929 if (!data.autinc_from && CONSTANT_P (from_addr))
930 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
931 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
933 data.to_addr = copy_to_mode_reg (to_addr_mode,
934 plus_constant (to_addr_mode,
935 to_addr, len));
936 data.autinc_to = 1;
937 data.explicit_inc_to = -1;
939 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
941 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
942 data.autinc_to = 1;
943 data.explicit_inc_to = 1;
945 if (!data.autinc_to && CONSTANT_P (to_addr))
946 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
949 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
951 /* First move what we can in the largest integer mode, then go to
952 successively smaller modes. */
954 while (max_size > 1 && data.len > 0)
956 machine_mode mode = widest_int_mode_for_size (max_size);
958 if (mode == VOIDmode)
959 break;
961 icode = optab_handler (mov_optab, mode);
962 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
963 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
965 max_size = GET_MODE_SIZE (mode);
968 /* The code above should have handled everything. */
969 gcc_assert (!data.len);
971 if (endp)
973 rtx to1;
975 gcc_assert (!data.reverse);
976 if (data.autinc_to)
978 if (endp == 2)
980 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
981 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
982 else
983 data.to_addr = copy_to_mode_reg (to_addr_mode,
984 plus_constant (to_addr_mode,
985 data.to_addr,
986 -1));
988 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
989 data.offset);
991 else
993 if (endp == 2)
994 --data.offset;
995 to1 = adjust_address (data.to, QImode, data.offset);
997 return to1;
999 else
1000 return data.to;
1003 /* Return number of insns required to move L bytes by pieces.
1004 ALIGN (in bits) is maximum alignment we can assume. */
1006 unsigned HOST_WIDE_INT
1007 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1008 unsigned int max_size)
1010 unsigned HOST_WIDE_INT n_insns = 0;
1012 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1014 while (max_size > 1 && l > 0)
1016 machine_mode mode;
1017 enum insn_code icode;
1019 mode = widest_int_mode_for_size (max_size);
1021 if (mode == VOIDmode)
1022 break;
1024 icode = optab_handler (mov_optab, mode);
1025 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1026 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1028 max_size = GET_MODE_SIZE (mode);
1031 gcc_assert (!l);
1032 return n_insns;
1035 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1036 with move instructions for mode MODE. GENFUN is the gen_... function
1037 to make a move insn for that mode. DATA has all the other info. */
1039 static void
1040 move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
1041 struct move_by_pieces_d *data)
1043 unsigned int size = GET_MODE_SIZE (mode);
1044 rtx to1 = NULL_RTX, from1;
1046 while (data->len >= size)
1048 if (data->reverse)
1049 data->offset -= size;
1051 if (data->to)
1053 if (data->autinc_to)
1054 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1055 data->offset);
1056 else
1057 to1 = adjust_address (data->to, mode, data->offset);
1060 if (data->autinc_from)
1061 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1062 data->offset);
1063 else
1064 from1 = adjust_address (data->from, mode, data->offset);
1066 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1067 emit_insn (gen_add2_insn (data->to_addr,
1068 gen_int_mode (-(HOST_WIDE_INT) size,
1069 GET_MODE (data->to_addr))));
1070 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1071 emit_insn (gen_add2_insn (data->from_addr,
1072 gen_int_mode (-(HOST_WIDE_INT) size,
1073 GET_MODE (data->from_addr))));
1075 if (data->to)
1076 emit_insn ((*genfun) (to1, from1));
1077 else
1079 #ifdef PUSH_ROUNDING
1080 emit_single_push_insn (mode, from1, NULL);
1081 #else
1082 gcc_unreachable ();
1083 #endif
1086 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1087 emit_insn (gen_add2_insn (data->to_addr,
1088 gen_int_mode (size,
1089 GET_MODE (data->to_addr))));
1090 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1091 emit_insn (gen_add2_insn (data->from_addr,
1092 gen_int_mode (size,
1093 GET_MODE (data->from_addr))));
1095 if (! data->reverse)
1096 data->offset += size;
1098 data->len -= size;
1102 /* Emit code to move a block Y to a block X. This may be done with
1103 string-move instructions, with multiple scalar move instructions,
1104 or with a library call.
1106 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1107 SIZE is an rtx that says how long they are.
1108 ALIGN is the maximum alignment we can assume they have.
1109 METHOD describes what kind of copy this is, and what mechanisms may be used.
1110 MIN_SIZE is the minimal size of block to move
1111 MAX_SIZE is the maximal size of block to move, if it can not be represented
1112 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1114 Return the address of the new block, if memcpy is called and returns it,
1115 0 otherwise. */
1118 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1119 unsigned int expected_align, HOST_WIDE_INT expected_size,
1120 unsigned HOST_WIDE_INT min_size,
1121 unsigned HOST_WIDE_INT max_size,
1122 unsigned HOST_WIDE_INT probable_max_size)
1124 bool may_use_call;
1125 rtx retval = 0;
1126 unsigned int align;
1128 gcc_assert (size);
1129 if (CONST_INT_P (size)
1130 && INTVAL (size) == 0)
1131 return 0;
1133 switch (method)
1135 case BLOCK_OP_NORMAL:
1136 case BLOCK_OP_TAILCALL:
1137 may_use_call = true;
1138 break;
1140 case BLOCK_OP_CALL_PARM:
1141 may_use_call = block_move_libcall_safe_for_call_parm ();
1143 /* Make inhibit_defer_pop nonzero around the library call
1144 to force it to pop the arguments right away. */
1145 NO_DEFER_POP;
1146 break;
1148 case BLOCK_OP_NO_LIBCALL:
1149 may_use_call = false;
1150 break;
1152 default:
1153 gcc_unreachable ();
1156 gcc_assert (MEM_P (x) && MEM_P (y));
1157 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1158 gcc_assert (align >= BITS_PER_UNIT);
1160 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1161 block copy is more efficient for other large modes, e.g. DCmode. */
1162 x = adjust_address (x, BLKmode, 0);
1163 y = adjust_address (y, BLKmode, 0);
1165 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1166 can be incorrect is coming from __builtin_memcpy. */
1167 if (CONST_INT_P (size))
1169 x = shallow_copy_rtx (x);
1170 y = shallow_copy_rtx (y);
1171 set_mem_size (x, INTVAL (size));
1172 set_mem_size (y, INTVAL (size));
1175 if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1176 move_by_pieces (x, y, INTVAL (size), align, 0);
1177 else if (emit_block_move_via_movmem (x, y, size, align,
1178 expected_align, expected_size,
1179 min_size, max_size, probable_max_size))
1181 else if (may_use_call
1182 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1183 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1185 /* Since x and y are passed to a libcall, mark the corresponding
1186 tree EXPR as addressable. */
1187 tree y_expr = MEM_EXPR (y);
1188 tree x_expr = MEM_EXPR (x);
1189 if (y_expr)
1190 mark_addressable (y_expr);
1191 if (x_expr)
1192 mark_addressable (x_expr);
1193 retval = emit_block_move_via_libcall (x, y, size,
1194 method == BLOCK_OP_TAILCALL);
1197 else
1198 emit_block_move_via_loop (x, y, size, align);
1200 if (method == BLOCK_OP_CALL_PARM)
1201 OK_DEFER_POP;
1203 return retval;
1207 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1209 unsigned HOST_WIDE_INT max, min = 0;
1210 if (GET_CODE (size) == CONST_INT)
1211 min = max = UINTVAL (size);
1212 else
1213 max = GET_MODE_MASK (GET_MODE (size));
1214 return emit_block_move_hints (x, y, size, method, 0, -1,
1215 min, max, max);
1218 /* A subroutine of emit_block_move. Returns true if calling the
1219 block move libcall will not clobber any parameters which may have
1220 already been placed on the stack. */
1222 static bool
1223 block_move_libcall_safe_for_call_parm (void)
1225 #if defined (REG_PARM_STACK_SPACE)
1226 tree fn;
1227 #endif
1229 /* If arguments are pushed on the stack, then they're safe. */
1230 if (PUSH_ARGS)
1231 return true;
1233 /* If registers go on the stack anyway, any argument is sure to clobber
1234 an outgoing argument. */
1235 #if defined (REG_PARM_STACK_SPACE)
1236 fn = emit_block_move_libcall_fn (false);
1237 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1238 depend on its argument. */
1239 (void) fn;
1240 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1241 && REG_PARM_STACK_SPACE (fn) != 0)
1242 return false;
1243 #endif
1245 /* If any argument goes in memory, then it might clobber an outgoing
1246 argument. */
1248 CUMULATIVE_ARGS args_so_far_v;
1249 cumulative_args_t args_so_far;
1250 tree fn, arg;
1252 fn = emit_block_move_libcall_fn (false);
1253 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1254 args_so_far = pack_cumulative_args (&args_so_far_v);
1256 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1257 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1259 machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1260 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1261 NULL_TREE, true);
1262 if (!tmp || !REG_P (tmp))
1263 return false;
1264 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1265 return false;
1266 targetm.calls.function_arg_advance (args_so_far, mode,
1267 NULL_TREE, true);
1270 return true;
1273 /* A subroutine of emit_block_move. Expand a movmem pattern;
1274 return true if successful. */
1276 static bool
1277 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1278 unsigned int expected_align, HOST_WIDE_INT expected_size,
1279 unsigned HOST_WIDE_INT min_size,
1280 unsigned HOST_WIDE_INT max_size,
1281 unsigned HOST_WIDE_INT probable_max_size)
1283 int save_volatile_ok = volatile_ok;
1284 machine_mode mode;
1286 if (expected_align < align)
1287 expected_align = align;
1288 if (expected_size != -1)
1290 if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1291 expected_size = probable_max_size;
1292 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1293 expected_size = min_size;
1296 /* Since this is a move insn, we don't care about volatility. */
1297 volatile_ok = 1;
1299 /* Try the most limited insn first, because there's no point
1300 including more than one in the machine description unless
1301 the more limited one has some advantage. */
1303 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1304 mode = GET_MODE_WIDER_MODE (mode))
1306 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1308 if (code != CODE_FOR_nothing
1309 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1310 here because if SIZE is less than the mode mask, as it is
1311 returned by the macro, it will definitely be less than the
1312 actual mode mask. Since SIZE is within the Pmode address
1313 space, we limit MODE to Pmode. */
1314 && ((CONST_INT_P (size)
1315 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1316 <= (GET_MODE_MASK (mode) >> 1)))
1317 || max_size <= (GET_MODE_MASK (mode) >> 1)
1318 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1320 struct expand_operand ops[9];
1321 unsigned int nops;
1323 /* ??? When called via emit_block_move_for_call, it'd be
1324 nice if there were some way to inform the backend, so
1325 that it doesn't fail the expansion because it thinks
1326 emitting the libcall would be more efficient. */
1327 nops = insn_data[(int) code].n_generator_args;
1328 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1330 create_fixed_operand (&ops[0], x);
1331 create_fixed_operand (&ops[1], y);
1332 /* The check above guarantees that this size conversion is valid. */
1333 create_convert_operand_to (&ops[2], size, mode, true);
1334 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1335 if (nops >= 6)
1337 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1338 create_integer_operand (&ops[5], expected_size);
1340 if (nops >= 8)
1342 create_integer_operand (&ops[6], min_size);
1343 /* If we can not represent the maximal size,
1344 make parameter NULL. */
1345 if ((HOST_WIDE_INT) max_size != -1)
1346 create_integer_operand (&ops[7], max_size);
1347 else
1348 create_fixed_operand (&ops[7], NULL);
1350 if (nops == 9)
1352 /* If we can not represent the maximal size,
1353 make parameter NULL. */
1354 if ((HOST_WIDE_INT) probable_max_size != -1)
1355 create_integer_operand (&ops[8], probable_max_size);
1356 else
1357 create_fixed_operand (&ops[8], NULL);
1359 if (maybe_expand_insn (code, nops, ops))
1361 volatile_ok = save_volatile_ok;
1362 return true;
1367 volatile_ok = save_volatile_ok;
1368 return false;
1371 /* A subroutine of emit_block_move. Expand a call to memcpy.
1372 Return the return value from memcpy, 0 otherwise. */
1375 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1377 rtx dst_addr, src_addr;
1378 tree call_expr, fn, src_tree, dst_tree, size_tree;
1379 machine_mode size_mode;
1380 rtx retval;
1382 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1383 pseudos. We can then place those new pseudos into a VAR_DECL and
1384 use them later. */
1386 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1387 src_addr = copy_addr_to_reg (XEXP (src, 0));
1389 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1390 src_addr = convert_memory_address (ptr_mode, src_addr);
1392 dst_tree = make_tree (ptr_type_node, dst_addr);
1393 src_tree = make_tree (ptr_type_node, src_addr);
1395 size_mode = TYPE_MODE (sizetype);
1397 size = convert_to_mode (size_mode, size, 1);
1398 size = copy_to_mode_reg (size_mode, size);
1400 /* It is incorrect to use the libcall calling conventions to call
1401 memcpy in this context. This could be a user call to memcpy and
1402 the user may wish to examine the return value from memcpy. For
1403 targets where libcalls and normal calls have different conventions
1404 for returning pointers, we could end up generating incorrect code. */
1406 size_tree = make_tree (sizetype, size);
1408 fn = emit_block_move_libcall_fn (true);
1409 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1410 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1412 retval = expand_normal (call_expr);
1414 return retval;
1417 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1418 for the function we use for block copies. */
1420 static GTY(()) tree block_move_fn;
1422 void
1423 init_block_move_fn (const char *asmspec)
1425 if (!block_move_fn)
1427 tree args, fn, attrs, attr_args;
1429 fn = get_identifier ("memcpy");
1430 args = build_function_type_list (ptr_type_node, ptr_type_node,
1431 const_ptr_type_node, sizetype,
1432 NULL_TREE);
1434 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1435 DECL_EXTERNAL (fn) = 1;
1436 TREE_PUBLIC (fn) = 1;
1437 DECL_ARTIFICIAL (fn) = 1;
1438 TREE_NOTHROW (fn) = 1;
1439 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1440 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1442 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1443 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1445 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1447 block_move_fn = fn;
1450 if (asmspec)
1451 set_user_assembler_name (block_move_fn, asmspec);
1454 static tree
1455 emit_block_move_libcall_fn (int for_call)
1457 static bool emitted_extern;
1459 if (!block_move_fn)
1460 init_block_move_fn (NULL);
1462 if (for_call && !emitted_extern)
1464 emitted_extern = true;
1465 make_decl_rtl (block_move_fn);
1468 return block_move_fn;
1471 /* A subroutine of emit_block_move. Copy the data via an explicit
1472 loop. This is used only when libcalls are forbidden. */
1473 /* ??? It'd be nice to copy in hunks larger than QImode. */
1475 static void
1476 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1477 unsigned int align ATTRIBUTE_UNUSED)
1479 rtx_code_label *cmp_label, *top_label;
1480 rtx iter, x_addr, y_addr, tmp;
1481 machine_mode x_addr_mode = get_address_mode (x);
1482 machine_mode y_addr_mode = get_address_mode (y);
1483 machine_mode iter_mode;
1485 iter_mode = GET_MODE (size);
1486 if (iter_mode == VOIDmode)
1487 iter_mode = word_mode;
1489 top_label = gen_label_rtx ();
1490 cmp_label = gen_label_rtx ();
1491 iter = gen_reg_rtx (iter_mode);
1493 emit_move_insn (iter, const0_rtx);
1495 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1496 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1497 do_pending_stack_adjust ();
1499 emit_jump (cmp_label);
1500 emit_label (top_label);
1502 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1503 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1505 if (x_addr_mode != y_addr_mode)
1506 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1507 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1509 x = change_address (x, QImode, x_addr);
1510 y = change_address (y, QImode, y_addr);
1512 emit_move_insn (x, y);
1514 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1515 true, OPTAB_LIB_WIDEN);
1516 if (tmp != iter)
1517 emit_move_insn (iter, tmp);
1519 emit_label (cmp_label);
1521 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1522 true, top_label, REG_BR_PROB_BASE * 90 / 100);
1525 /* Copy all or part of a value X into registers starting at REGNO.
1526 The number of registers to be filled is NREGS. */
1528 void
1529 move_block_to_reg (int regno, rtx x, int nregs, machine_mode mode)
1531 int i;
1532 #ifdef HAVE_load_multiple
1533 rtx pat;
1534 rtx_insn *last;
1535 #endif
1537 if (nregs == 0)
1538 return;
1540 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1541 x = validize_mem (force_const_mem (mode, x));
1543 /* See if the machine can do this with a load multiple insn. */
1544 #ifdef HAVE_load_multiple
1545 if (HAVE_load_multiple)
1547 last = get_last_insn ();
1548 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1549 GEN_INT (nregs));
1550 if (pat)
1552 emit_insn (pat);
1553 return;
1555 else
1556 delete_insns_since (last);
1558 #endif
1560 for (i = 0; i < nregs; i++)
1561 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1562 operand_subword_force (x, i, mode));
1565 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1566 The number of registers to be filled is NREGS. */
1568 void
1569 move_block_from_reg (int regno, rtx x, int nregs)
1571 int i;
1573 if (nregs == 0)
1574 return;
1576 /* See if the machine can do this with a store multiple insn. */
1577 #ifdef HAVE_store_multiple
1578 if (HAVE_store_multiple)
1580 rtx_insn *last = get_last_insn ();
1581 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1582 GEN_INT (nregs));
1583 if (pat)
1585 emit_insn (pat);
1586 return;
1588 else
1589 delete_insns_since (last);
1591 #endif
1593 for (i = 0; i < nregs; i++)
1595 rtx tem = operand_subword (x, i, 1, BLKmode);
1597 gcc_assert (tem);
1599 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1603 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1604 ORIG, where ORIG is a non-consecutive group of registers represented by
1605 a PARALLEL. The clone is identical to the original except in that the
1606 original set of registers is replaced by a new set of pseudo registers.
1607 The new set has the same modes as the original set. */
1610 gen_group_rtx (rtx orig)
1612 int i, length;
1613 rtx *tmps;
1615 gcc_assert (GET_CODE (orig) == PARALLEL);
1617 length = XVECLEN (orig, 0);
1618 tmps = XALLOCAVEC (rtx, length);
1620 /* Skip a NULL entry in first slot. */
1621 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1623 if (i)
1624 tmps[0] = 0;
1626 for (; i < length; i++)
1628 machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1629 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1631 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1634 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1637 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1638 except that values are placed in TMPS[i], and must later be moved
1639 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1641 static void
1642 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1644 rtx src;
1645 int start, i;
1646 machine_mode m = GET_MODE (orig_src);
1648 gcc_assert (GET_CODE (dst) == PARALLEL);
1650 if (m != VOIDmode
1651 && !SCALAR_INT_MODE_P (m)
1652 && !MEM_P (orig_src)
1653 && GET_CODE (orig_src) != CONCAT)
1655 machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1656 if (imode == BLKmode)
1657 src = assign_stack_temp (GET_MODE (orig_src), ssize);
1658 else
1659 src = gen_reg_rtx (imode);
1660 if (imode != BLKmode)
1661 src = gen_lowpart (GET_MODE (orig_src), src);
1662 emit_move_insn (src, orig_src);
1663 /* ...and back again. */
1664 if (imode != BLKmode)
1665 src = gen_lowpart (imode, src);
1666 emit_group_load_1 (tmps, dst, src, type, ssize);
1667 return;
1670 /* Check for a NULL entry, used to indicate that the parameter goes
1671 both on the stack and in registers. */
1672 if (XEXP (XVECEXP (dst, 0, 0), 0))
1673 start = 0;
1674 else
1675 start = 1;
1677 /* Process the pieces. */
1678 for (i = start; i < XVECLEN (dst, 0); i++)
1680 machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1681 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1682 unsigned int bytelen = GET_MODE_SIZE (mode);
1683 int shift = 0;
1685 /* Handle trailing fragments that run over the size of the struct. */
1686 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1688 /* Arrange to shift the fragment to where it belongs.
1689 extract_bit_field loads to the lsb of the reg. */
1690 if (
1691 #ifdef BLOCK_REG_PADDING
1692 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1693 == (BYTES_BIG_ENDIAN ? upward : downward)
1694 #else
1695 BYTES_BIG_ENDIAN
1696 #endif
1698 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1699 bytelen = ssize - bytepos;
1700 gcc_assert (bytelen > 0);
1703 /* If we won't be loading directly from memory, protect the real source
1704 from strange tricks we might play; but make sure that the source can
1705 be loaded directly into the destination. */
1706 src = orig_src;
1707 if (!MEM_P (orig_src)
1708 && (!CONSTANT_P (orig_src)
1709 || (GET_MODE (orig_src) != mode
1710 && GET_MODE (orig_src) != VOIDmode)))
1712 if (GET_MODE (orig_src) == VOIDmode)
1713 src = gen_reg_rtx (mode);
1714 else
1715 src = gen_reg_rtx (GET_MODE (orig_src));
1717 emit_move_insn (src, orig_src);
1720 /* Optimize the access just a bit. */
1721 if (MEM_P (src)
1722 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1723 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1724 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1725 && bytelen == GET_MODE_SIZE (mode))
1727 tmps[i] = gen_reg_rtx (mode);
1728 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1730 else if (COMPLEX_MODE_P (mode)
1731 && GET_MODE (src) == mode
1732 && bytelen == GET_MODE_SIZE (mode))
1733 /* Let emit_move_complex do the bulk of the work. */
1734 tmps[i] = src;
1735 else if (GET_CODE (src) == CONCAT)
1737 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1738 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1740 if ((bytepos == 0 && bytelen == slen0)
1741 || (bytepos != 0 && bytepos + bytelen <= slen))
1743 /* The following assumes that the concatenated objects all
1744 have the same size. In this case, a simple calculation
1745 can be used to determine the object and the bit field
1746 to be extracted. */
1747 tmps[i] = XEXP (src, bytepos / slen0);
1748 if (! CONSTANT_P (tmps[i])
1749 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1750 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1751 (bytepos % slen0) * BITS_PER_UNIT,
1752 1, NULL_RTX, mode, mode);
1754 else
1756 rtx mem;
1758 gcc_assert (!bytepos);
1759 mem = assign_stack_temp (GET_MODE (src), slen);
1760 emit_move_insn (mem, src);
1761 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1762 0, 1, NULL_RTX, mode, mode);
1765 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1766 SIMD register, which is currently broken. While we get GCC
1767 to emit proper RTL for these cases, let's dump to memory. */
1768 else if (VECTOR_MODE_P (GET_MODE (dst))
1769 && REG_P (src))
1771 int slen = GET_MODE_SIZE (GET_MODE (src));
1772 rtx mem;
1774 mem = assign_stack_temp (GET_MODE (src), slen);
1775 emit_move_insn (mem, src);
1776 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1778 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1779 && XVECLEN (dst, 0) > 1)
1780 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
1781 else if (CONSTANT_P (src))
1783 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1785 if (len == ssize)
1786 tmps[i] = src;
1787 else
1789 rtx first, second;
1791 /* TODO: const_wide_int can have sizes other than this... */
1792 gcc_assert (2 * len == ssize);
1793 split_double (src, &first, &second);
1794 if (i)
1795 tmps[i] = second;
1796 else
1797 tmps[i] = first;
1800 else if (REG_P (src) && GET_MODE (src) == mode)
1801 tmps[i] = src;
1802 else
1803 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1804 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1805 mode, mode);
1807 if (shift)
1808 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1809 shift, tmps[i], 0);
1813 /* Emit code to move a block SRC of type TYPE to a block DST,
1814 where DST is non-consecutive registers represented by a PARALLEL.
1815 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1816 if not known. */
1818 void
1819 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1821 rtx *tmps;
1822 int i;
1824 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1825 emit_group_load_1 (tmps, dst, src, type, ssize);
1827 /* Copy the extracted pieces into the proper (probable) hard regs. */
1828 for (i = 0; i < XVECLEN (dst, 0); i++)
1830 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1831 if (d == NULL)
1832 continue;
1833 emit_move_insn (d, tmps[i]);
1837 /* Similar, but load SRC into new pseudos in a format that looks like
1838 PARALLEL. This can later be fed to emit_group_move to get things
1839 in the right place. */
1842 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1844 rtvec vec;
1845 int i;
1847 vec = rtvec_alloc (XVECLEN (parallel, 0));
1848 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1850 /* Convert the vector to look just like the original PARALLEL, except
1851 with the computed values. */
1852 for (i = 0; i < XVECLEN (parallel, 0); i++)
1854 rtx e = XVECEXP (parallel, 0, i);
1855 rtx d = XEXP (e, 0);
1857 if (d)
1859 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1860 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1862 RTVEC_ELT (vec, i) = e;
1865 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1868 /* Emit code to move a block SRC to block DST, where SRC and DST are
1869 non-consecutive groups of registers, each represented by a PARALLEL. */
1871 void
1872 emit_group_move (rtx dst, rtx src)
1874 int i;
1876 gcc_assert (GET_CODE (src) == PARALLEL
1877 && GET_CODE (dst) == PARALLEL
1878 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1880 /* Skip first entry if NULL. */
1881 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1882 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1883 XEXP (XVECEXP (src, 0, i), 0));
1886 /* Move a group of registers represented by a PARALLEL into pseudos. */
1889 emit_group_move_into_temps (rtx src)
1891 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1892 int i;
1894 for (i = 0; i < XVECLEN (src, 0); i++)
1896 rtx e = XVECEXP (src, 0, i);
1897 rtx d = XEXP (e, 0);
1899 if (d)
1900 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1901 RTVEC_ELT (vec, i) = e;
1904 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1907 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1908 where SRC is non-consecutive registers represented by a PARALLEL.
1909 SSIZE represents the total size of block ORIG_DST, or -1 if not
1910 known. */
1912 void
1913 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1915 rtx *tmps, dst;
1916 int start, finish, i;
1917 machine_mode m = GET_MODE (orig_dst);
1919 gcc_assert (GET_CODE (src) == PARALLEL);
1921 if (!SCALAR_INT_MODE_P (m)
1922 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1924 machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1925 if (imode == BLKmode)
1926 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1927 else
1928 dst = gen_reg_rtx (imode);
1929 emit_group_store (dst, src, type, ssize);
1930 if (imode != BLKmode)
1931 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1932 emit_move_insn (orig_dst, dst);
1933 return;
1936 /* Check for a NULL entry, used to indicate that the parameter goes
1937 both on the stack and in registers. */
1938 if (XEXP (XVECEXP (src, 0, 0), 0))
1939 start = 0;
1940 else
1941 start = 1;
1942 finish = XVECLEN (src, 0);
1944 tmps = XALLOCAVEC (rtx, finish);
1946 /* Copy the (probable) hard regs into pseudos. */
1947 for (i = start; i < finish; i++)
1949 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1950 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1952 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1953 emit_move_insn (tmps[i], reg);
1955 else
1956 tmps[i] = reg;
1959 /* If we won't be storing directly into memory, protect the real destination
1960 from strange tricks we might play. */
1961 dst = orig_dst;
1962 if (GET_CODE (dst) == PARALLEL)
1964 rtx temp;
1966 /* We can get a PARALLEL dst if there is a conditional expression in
1967 a return statement. In that case, the dst and src are the same,
1968 so no action is necessary. */
1969 if (rtx_equal_p (dst, src))
1970 return;
1972 /* It is unclear if we can ever reach here, but we may as well handle
1973 it. Allocate a temporary, and split this into a store/load to/from
1974 the temporary. */
1975 temp = assign_stack_temp (GET_MODE (dst), ssize);
1976 emit_group_store (temp, src, type, ssize);
1977 emit_group_load (dst, temp, type, ssize);
1978 return;
1980 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1982 machine_mode outer = GET_MODE (dst);
1983 machine_mode inner;
1984 HOST_WIDE_INT bytepos;
1985 bool done = false;
1986 rtx temp;
1988 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1989 dst = gen_reg_rtx (outer);
1991 /* Make life a bit easier for combine. */
1992 /* If the first element of the vector is the low part
1993 of the destination mode, use a paradoxical subreg to
1994 initialize the destination. */
1995 if (start < finish)
1997 inner = GET_MODE (tmps[start]);
1998 bytepos = subreg_lowpart_offset (inner, outer);
1999 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
2001 temp = simplify_gen_subreg (outer, tmps[start],
2002 inner, 0);
2003 if (temp)
2005 emit_move_insn (dst, temp);
2006 done = true;
2007 start++;
2012 /* If the first element wasn't the low part, try the last. */
2013 if (!done
2014 && start < finish - 1)
2016 inner = GET_MODE (tmps[finish - 1]);
2017 bytepos = subreg_lowpart_offset (inner, outer);
2018 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
2020 temp = simplify_gen_subreg (outer, tmps[finish - 1],
2021 inner, 0);
2022 if (temp)
2024 emit_move_insn (dst, temp);
2025 done = true;
2026 finish--;
2031 /* Otherwise, simply initialize the result to zero. */
2032 if (!done)
2033 emit_move_insn (dst, CONST0_RTX (outer));
2036 /* Process the pieces. */
2037 for (i = start; i < finish; i++)
2039 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2040 machine_mode mode = GET_MODE (tmps[i]);
2041 unsigned int bytelen = GET_MODE_SIZE (mode);
2042 unsigned int adj_bytelen;
2043 rtx dest = dst;
2045 /* Handle trailing fragments that run over the size of the struct. */
2046 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2047 adj_bytelen = ssize - bytepos;
2048 else
2049 adj_bytelen = bytelen;
2051 if (GET_CODE (dst) == CONCAT)
2053 if (bytepos + adj_bytelen
2054 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2055 dest = XEXP (dst, 0);
2056 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2058 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2059 dest = XEXP (dst, 1);
2061 else
2063 machine_mode dest_mode = GET_MODE (dest);
2064 machine_mode tmp_mode = GET_MODE (tmps[i]);
2066 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2068 if (GET_MODE_ALIGNMENT (dest_mode)
2069 >= GET_MODE_ALIGNMENT (tmp_mode))
2071 dest = assign_stack_temp (dest_mode,
2072 GET_MODE_SIZE (dest_mode));
2073 emit_move_insn (adjust_address (dest,
2074 tmp_mode,
2075 bytepos),
2076 tmps[i]);
2077 dst = dest;
2079 else
2081 dest = assign_stack_temp (tmp_mode,
2082 GET_MODE_SIZE (tmp_mode));
2083 emit_move_insn (dest, tmps[i]);
2084 dst = adjust_address (dest, dest_mode, bytepos);
2086 break;
2090 /* Handle trailing fragments that run over the size of the struct. */
2091 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2093 /* store_bit_field always takes its value from the lsb.
2094 Move the fragment to the lsb if it's not already there. */
2095 if (
2096 #ifdef BLOCK_REG_PADDING
2097 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2098 == (BYTES_BIG_ENDIAN ? upward : downward)
2099 #else
2100 BYTES_BIG_ENDIAN
2101 #endif
2104 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2105 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2106 shift, tmps[i], 0);
2109 /* Make sure not to write past the end of the struct. */
2110 store_bit_field (dest,
2111 adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2112 bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2113 VOIDmode, tmps[i]);
2116 /* Optimize the access just a bit. */
2117 else if (MEM_P (dest)
2118 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2119 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2120 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2121 && bytelen == GET_MODE_SIZE (mode))
2122 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2124 else
2125 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2126 0, 0, mode, tmps[i]);
2129 /* Copy from the pseudo into the (probable) hard reg. */
2130 if (orig_dst != dst)
2131 emit_move_insn (orig_dst, dst);
2134 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2135 of the value stored in X. */
2138 maybe_emit_group_store (rtx x, tree type)
2140 machine_mode mode = TYPE_MODE (type);
2141 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2142 if (GET_CODE (x) == PARALLEL)
2144 rtx result = gen_reg_rtx (mode);
2145 emit_group_store (result, x, type, int_size_in_bytes (type));
2146 return result;
2148 return x;
2151 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2153 This is used on targets that return BLKmode values in registers. */
2155 void
2156 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2158 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2159 rtx src = NULL, dst = NULL;
2160 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2161 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2162 machine_mode mode = GET_MODE (srcreg);
2163 machine_mode tmode = GET_MODE (target);
2164 machine_mode copy_mode;
2166 /* BLKmode registers created in the back-end shouldn't have survived. */
2167 gcc_assert (mode != BLKmode);
2169 /* If the structure doesn't take up a whole number of words, see whether
2170 SRCREG is padded on the left or on the right. If it's on the left,
2171 set PADDING_CORRECTION to the number of bits to skip.
2173 In most ABIs, the structure will be returned at the least end of
2174 the register, which translates to right padding on little-endian
2175 targets and left padding on big-endian targets. The opposite
2176 holds if the structure is returned at the most significant
2177 end of the register. */
2178 if (bytes % UNITS_PER_WORD != 0
2179 && (targetm.calls.return_in_msb (type)
2180 ? !BYTES_BIG_ENDIAN
2181 : BYTES_BIG_ENDIAN))
2182 padding_correction
2183 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2185 /* We can use a single move if we have an exact mode for the size. */
2186 else if (MEM_P (target)
2187 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2188 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2189 && bytes == GET_MODE_SIZE (mode))
2191 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2192 return;
2195 /* And if we additionally have the same mode for a register. */
2196 else if (REG_P (target)
2197 && GET_MODE (target) == mode
2198 && bytes == GET_MODE_SIZE (mode))
2200 emit_move_insn (target, srcreg);
2201 return;
2204 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2205 into a new pseudo which is a full word. */
2206 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2208 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2209 mode = word_mode;
2212 /* Copy the structure BITSIZE bits at a time. If the target lives in
2213 memory, take care of not reading/writing past its end by selecting
2214 a copy mode suited to BITSIZE. This should always be possible given
2215 how it is computed.
2217 If the target lives in register, make sure not to select a copy mode
2218 larger than the mode of the register.
2220 We could probably emit more efficient code for machines which do not use
2221 strict alignment, but it doesn't seem worth the effort at the current
2222 time. */
2224 copy_mode = word_mode;
2225 if (MEM_P (target))
2227 machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2228 if (mem_mode != BLKmode)
2229 copy_mode = mem_mode;
2231 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2232 copy_mode = tmode;
2234 for (bitpos = 0, xbitpos = padding_correction;
2235 bitpos < bytes * BITS_PER_UNIT;
2236 bitpos += bitsize, xbitpos += bitsize)
2238 /* We need a new source operand each time xbitpos is on a
2239 word boundary and when xbitpos == padding_correction
2240 (the first time through). */
2241 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2242 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2244 /* We need a new destination operand each time bitpos is on
2245 a word boundary. */
2246 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2247 dst = target;
2248 else if (bitpos % BITS_PER_WORD == 0)
2249 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2251 /* Use xbitpos for the source extraction (right justified) and
2252 bitpos for the destination store (left justified). */
2253 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2254 extract_bit_field (src, bitsize,
2255 xbitpos % BITS_PER_WORD, 1,
2256 NULL_RTX, copy_mode, copy_mode));
2260 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2261 register if it contains any data, otherwise return null.
2263 This is used on targets that return BLKmode values in registers. */
2266 copy_blkmode_to_reg (machine_mode mode, tree src)
2268 int i, n_regs;
2269 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2270 unsigned int bitsize;
2271 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2272 machine_mode dst_mode;
2274 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2276 x = expand_normal (src);
2278 bytes = int_size_in_bytes (TREE_TYPE (src));
2279 if (bytes == 0)
2280 return NULL_RTX;
2282 /* If the structure doesn't take up a whole number of words, see
2283 whether the register value should be padded on the left or on
2284 the right. Set PADDING_CORRECTION to the number of padding
2285 bits needed on the left side.
2287 In most ABIs, the structure will be returned at the least end of
2288 the register, which translates to right padding on little-endian
2289 targets and left padding on big-endian targets. The opposite
2290 holds if the structure is returned at the most significant
2291 end of the register. */
2292 if (bytes % UNITS_PER_WORD != 0
2293 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2294 ? !BYTES_BIG_ENDIAN
2295 : BYTES_BIG_ENDIAN))
2296 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2297 * BITS_PER_UNIT));
2299 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2300 dst_words = XALLOCAVEC (rtx, n_regs);
2301 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2303 /* Copy the structure BITSIZE bits at a time. */
2304 for (bitpos = 0, xbitpos = padding_correction;
2305 bitpos < bytes * BITS_PER_UNIT;
2306 bitpos += bitsize, xbitpos += bitsize)
2308 /* We need a new destination pseudo each time xbitpos is
2309 on a word boundary and when xbitpos == padding_correction
2310 (the first time through). */
2311 if (xbitpos % BITS_PER_WORD == 0
2312 || xbitpos == padding_correction)
2314 /* Generate an appropriate register. */
2315 dst_word = gen_reg_rtx (word_mode);
2316 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2318 /* Clear the destination before we move anything into it. */
2319 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2322 /* We need a new source operand each time bitpos is on a word
2323 boundary. */
2324 if (bitpos % BITS_PER_WORD == 0)
2325 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2327 /* Use bitpos for the source extraction (left justified) and
2328 xbitpos for the destination store (right justified). */
2329 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2330 0, 0, word_mode,
2331 extract_bit_field (src_word, bitsize,
2332 bitpos % BITS_PER_WORD, 1,
2333 NULL_RTX, word_mode, word_mode));
2336 if (mode == BLKmode)
2338 /* Find the smallest integer mode large enough to hold the
2339 entire structure. */
2340 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2341 mode != VOIDmode;
2342 mode = GET_MODE_WIDER_MODE (mode))
2343 /* Have we found a large enough mode? */
2344 if (GET_MODE_SIZE (mode) >= bytes)
2345 break;
2347 /* A suitable mode should have been found. */
2348 gcc_assert (mode != VOIDmode);
2351 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2352 dst_mode = word_mode;
2353 else
2354 dst_mode = mode;
2355 dst = gen_reg_rtx (dst_mode);
2357 for (i = 0; i < n_regs; i++)
2358 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2360 if (mode != dst_mode)
2361 dst = gen_lowpart (mode, dst);
2363 return dst;
2366 /* Add a USE expression for REG to the (possibly empty) list pointed
2367 to by CALL_FUSAGE. REG must denote a hard register. */
2369 void
2370 use_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2372 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2374 *call_fusage
2375 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2378 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2379 to by CALL_FUSAGE. REG must denote a hard register. */
2381 void
2382 clobber_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2384 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2386 *call_fusage
2387 = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2390 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2391 starting at REGNO. All of these registers must be hard registers. */
2393 void
2394 use_regs (rtx *call_fusage, int regno, int nregs)
2396 int i;
2398 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2400 for (i = 0; i < nregs; i++)
2401 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2404 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2405 PARALLEL REGS. This is for calls that pass values in multiple
2406 non-contiguous locations. The Irix 6 ABI has examples of this. */
2408 void
2409 use_group_regs (rtx *call_fusage, rtx regs)
2411 int i;
2413 for (i = 0; i < XVECLEN (regs, 0); i++)
2415 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2417 /* A NULL entry means the parameter goes both on the stack and in
2418 registers. This can also be a MEM for targets that pass values
2419 partially on the stack and partially in registers. */
2420 if (reg != 0 && REG_P (reg))
2421 use_reg (call_fusage, reg);
2425 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2426 assigment and the code of the expresion on the RHS is CODE. Return
2427 NULL otherwise. */
2429 static gimple
2430 get_def_for_expr (tree name, enum tree_code code)
2432 gimple def_stmt;
2434 if (TREE_CODE (name) != SSA_NAME)
2435 return NULL;
2437 def_stmt = get_gimple_for_ssa_name (name);
2438 if (!def_stmt
2439 || gimple_assign_rhs_code (def_stmt) != code)
2440 return NULL;
2442 return def_stmt;
2445 #ifdef HAVE_conditional_move
2446 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2447 assigment and the class of the expresion on the RHS is CLASS. Return
2448 NULL otherwise. */
2450 static gimple
2451 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2453 gimple def_stmt;
2455 if (TREE_CODE (name) != SSA_NAME)
2456 return NULL;
2458 def_stmt = get_gimple_for_ssa_name (name);
2459 if (!def_stmt
2460 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2461 return NULL;
2463 return def_stmt;
2465 #endif
2468 /* Determine whether the LEN bytes generated by CONSTFUN can be
2469 stored to memory using several move instructions. CONSTFUNDATA is
2470 a pointer which will be passed as argument in every CONSTFUN call.
2471 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2472 a memset operation and false if it's a copy of a constant string.
2473 Return nonzero if a call to store_by_pieces should succeed. */
2476 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2477 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2478 void *constfundata, unsigned int align, bool memsetp)
2480 unsigned HOST_WIDE_INT l;
2481 unsigned int max_size;
2482 HOST_WIDE_INT offset = 0;
2483 machine_mode mode;
2484 enum insn_code icode;
2485 int reverse;
2486 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2487 rtx cst ATTRIBUTE_UNUSED;
2489 if (len == 0)
2490 return 1;
2492 if (! (memsetp
2493 ? SET_BY_PIECES_P (len, align)
2494 : STORE_BY_PIECES_P (len, align)))
2495 return 0;
2497 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2499 /* We would first store what we can in the largest integer mode, then go to
2500 successively smaller modes. */
2502 for (reverse = 0;
2503 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2504 reverse++)
2506 l = len;
2507 max_size = STORE_MAX_PIECES + 1;
2508 while (max_size > 1 && l > 0)
2510 mode = widest_int_mode_for_size (max_size);
2512 if (mode == VOIDmode)
2513 break;
2515 icode = optab_handler (mov_optab, mode);
2516 if (icode != CODE_FOR_nothing
2517 && align >= GET_MODE_ALIGNMENT (mode))
2519 unsigned int size = GET_MODE_SIZE (mode);
2521 while (l >= size)
2523 if (reverse)
2524 offset -= size;
2526 cst = (*constfun) (constfundata, offset, mode);
2527 if (!targetm.legitimate_constant_p (mode, cst))
2528 return 0;
2530 if (!reverse)
2531 offset += size;
2533 l -= size;
2537 max_size = GET_MODE_SIZE (mode);
2540 /* The code above should have handled everything. */
2541 gcc_assert (!l);
2544 return 1;
2547 /* Generate several move instructions to store LEN bytes generated by
2548 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2549 pointer which will be passed as argument in every CONSTFUN call.
2550 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2551 a memset operation and false if it's a copy of a constant string.
2552 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2553 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2554 stpcpy. */
2557 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2558 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2559 void *constfundata, unsigned int align, bool memsetp, int endp)
2561 machine_mode to_addr_mode = get_address_mode (to);
2562 struct store_by_pieces_d data;
2564 if (len == 0)
2566 gcc_assert (endp != 2);
2567 return to;
2570 gcc_assert (memsetp
2571 ? SET_BY_PIECES_P (len, align)
2572 : STORE_BY_PIECES_P (len, align));
2573 data.constfun = constfun;
2574 data.constfundata = constfundata;
2575 data.len = len;
2576 data.to = to;
2577 store_by_pieces_1 (&data, align);
2578 if (endp)
2580 rtx to1;
2582 gcc_assert (!data.reverse);
2583 if (data.autinc_to)
2585 if (endp == 2)
2587 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2588 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2589 else
2590 data.to_addr = copy_to_mode_reg (to_addr_mode,
2591 plus_constant (to_addr_mode,
2592 data.to_addr,
2593 -1));
2595 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2596 data.offset);
2598 else
2600 if (endp == 2)
2601 --data.offset;
2602 to1 = adjust_address (data.to, QImode, data.offset);
2604 return to1;
2606 else
2607 return data.to;
2610 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2611 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2613 static void
2614 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2616 struct store_by_pieces_d data;
2618 if (len == 0)
2619 return;
2621 data.constfun = clear_by_pieces_1;
2622 data.constfundata = NULL;
2623 data.len = len;
2624 data.to = to;
2625 store_by_pieces_1 (&data, align);
2628 /* Callback routine for clear_by_pieces.
2629 Return const0_rtx unconditionally. */
2631 static rtx
2632 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2633 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2634 machine_mode mode ATTRIBUTE_UNUSED)
2636 return const0_rtx;
2639 /* Subroutine of clear_by_pieces and store_by_pieces.
2640 Generate several move instructions to store LEN bytes of block TO. (A MEM
2641 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2643 static void
2644 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2645 unsigned int align ATTRIBUTE_UNUSED)
2647 machine_mode to_addr_mode = get_address_mode (data->to);
2648 rtx to_addr = XEXP (data->to, 0);
2649 unsigned int max_size = STORE_MAX_PIECES + 1;
2650 enum insn_code icode;
2652 data->offset = 0;
2653 data->to_addr = to_addr;
2654 data->autinc_to
2655 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2656 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2658 data->explicit_inc_to = 0;
2659 data->reverse
2660 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2661 if (data->reverse)
2662 data->offset = data->len;
2664 /* If storing requires more than two move insns,
2665 copy addresses to registers (to make displacements shorter)
2666 and use post-increment if available. */
2667 if (!data->autinc_to
2668 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2670 /* Determine the main mode we'll be using.
2671 MODE might not be used depending on the definitions of the
2672 USE_* macros below. */
2673 machine_mode mode ATTRIBUTE_UNUSED
2674 = widest_int_mode_for_size (max_size);
2676 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2678 data->to_addr = copy_to_mode_reg (to_addr_mode,
2679 plus_constant (to_addr_mode,
2680 to_addr,
2681 data->len));
2682 data->autinc_to = 1;
2683 data->explicit_inc_to = -1;
2686 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2687 && ! data->autinc_to)
2689 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2690 data->autinc_to = 1;
2691 data->explicit_inc_to = 1;
2694 if ( !data->autinc_to && CONSTANT_P (to_addr))
2695 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2698 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2700 /* First store what we can in the largest integer mode, then go to
2701 successively smaller modes. */
2703 while (max_size > 1 && data->len > 0)
2705 machine_mode mode = widest_int_mode_for_size (max_size);
2707 if (mode == VOIDmode)
2708 break;
2710 icode = optab_handler (mov_optab, mode);
2711 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2712 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2714 max_size = GET_MODE_SIZE (mode);
2717 /* The code above should have handled everything. */
2718 gcc_assert (!data->len);
2721 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2722 with move instructions for mode MODE. GENFUN is the gen_... function
2723 to make a move insn for that mode. DATA has all the other info. */
2725 static void
2726 store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
2727 struct store_by_pieces_d *data)
2729 unsigned int size = GET_MODE_SIZE (mode);
2730 rtx to1, cst;
2732 while (data->len >= size)
2734 if (data->reverse)
2735 data->offset -= size;
2737 if (data->autinc_to)
2738 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2739 data->offset);
2740 else
2741 to1 = adjust_address (data->to, mode, data->offset);
2743 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2744 emit_insn (gen_add2_insn (data->to_addr,
2745 gen_int_mode (-(HOST_WIDE_INT) size,
2746 GET_MODE (data->to_addr))));
2748 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2749 emit_insn ((*genfun) (to1, cst));
2751 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2752 emit_insn (gen_add2_insn (data->to_addr,
2753 gen_int_mode (size,
2754 GET_MODE (data->to_addr))));
2756 if (! data->reverse)
2757 data->offset += size;
2759 data->len -= size;
2763 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2764 its length in bytes. */
2767 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2768 unsigned int expected_align, HOST_WIDE_INT expected_size,
2769 unsigned HOST_WIDE_INT min_size,
2770 unsigned HOST_WIDE_INT max_size,
2771 unsigned HOST_WIDE_INT probable_max_size)
2773 machine_mode mode = GET_MODE (object);
2774 unsigned int align;
2776 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2778 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2779 just move a zero. Otherwise, do this a piece at a time. */
2780 if (mode != BLKmode
2781 && CONST_INT_P (size)
2782 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2784 rtx zero = CONST0_RTX (mode);
2785 if (zero != NULL)
2787 emit_move_insn (object, zero);
2788 return NULL;
2791 if (COMPLEX_MODE_P (mode))
2793 zero = CONST0_RTX (GET_MODE_INNER (mode));
2794 if (zero != NULL)
2796 write_complex_part (object, zero, 0);
2797 write_complex_part (object, zero, 1);
2798 return NULL;
2803 if (size == const0_rtx)
2804 return NULL;
2806 align = MEM_ALIGN (object);
2808 if (CONST_INT_P (size)
2809 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2810 clear_by_pieces (object, INTVAL (size), align);
2811 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2812 expected_align, expected_size,
2813 min_size, max_size, probable_max_size))
2815 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2816 return set_storage_via_libcall (object, size, const0_rtx,
2817 method == BLOCK_OP_TAILCALL);
2818 else
2819 gcc_unreachable ();
2821 return NULL;
2825 clear_storage (rtx object, rtx size, enum block_op_methods method)
2827 unsigned HOST_WIDE_INT max, min = 0;
2828 if (GET_CODE (size) == CONST_INT)
2829 min = max = UINTVAL (size);
2830 else
2831 max = GET_MODE_MASK (GET_MODE (size));
2832 return clear_storage_hints (object, size, method, 0, -1, min, max, max);
2836 /* A subroutine of clear_storage. Expand a call to memset.
2837 Return the return value of memset, 0 otherwise. */
2840 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2842 tree call_expr, fn, object_tree, size_tree, val_tree;
2843 machine_mode size_mode;
2844 rtx retval;
2846 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2847 place those into new pseudos into a VAR_DECL and use them later. */
2849 object = copy_addr_to_reg (XEXP (object, 0));
2851 size_mode = TYPE_MODE (sizetype);
2852 size = convert_to_mode (size_mode, size, 1);
2853 size = copy_to_mode_reg (size_mode, size);
2855 /* It is incorrect to use the libcall calling conventions to call
2856 memset in this context. This could be a user call to memset and
2857 the user may wish to examine the return value from memset. For
2858 targets where libcalls and normal calls have different conventions
2859 for returning pointers, we could end up generating incorrect code. */
2861 object_tree = make_tree (ptr_type_node, object);
2862 if (!CONST_INT_P (val))
2863 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2864 size_tree = make_tree (sizetype, size);
2865 val_tree = make_tree (integer_type_node, val);
2867 fn = clear_storage_libcall_fn (true);
2868 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2869 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2871 retval = expand_normal (call_expr);
2873 return retval;
2876 /* A subroutine of set_storage_via_libcall. Create the tree node
2877 for the function we use for block clears. */
2879 tree block_clear_fn;
2881 void
2882 init_block_clear_fn (const char *asmspec)
2884 if (!block_clear_fn)
2886 tree fn, args;
2888 fn = get_identifier ("memset");
2889 args = build_function_type_list (ptr_type_node, ptr_type_node,
2890 integer_type_node, sizetype,
2891 NULL_TREE);
2893 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2894 DECL_EXTERNAL (fn) = 1;
2895 TREE_PUBLIC (fn) = 1;
2896 DECL_ARTIFICIAL (fn) = 1;
2897 TREE_NOTHROW (fn) = 1;
2898 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2899 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2901 block_clear_fn = fn;
2904 if (asmspec)
2905 set_user_assembler_name (block_clear_fn, asmspec);
2908 static tree
2909 clear_storage_libcall_fn (int for_call)
2911 static bool emitted_extern;
2913 if (!block_clear_fn)
2914 init_block_clear_fn (NULL);
2916 if (for_call && !emitted_extern)
2918 emitted_extern = true;
2919 make_decl_rtl (block_clear_fn);
2922 return block_clear_fn;
2925 /* Expand a setmem pattern; return true if successful. */
2927 bool
2928 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2929 unsigned int expected_align, HOST_WIDE_INT expected_size,
2930 unsigned HOST_WIDE_INT min_size,
2931 unsigned HOST_WIDE_INT max_size,
2932 unsigned HOST_WIDE_INT probable_max_size)
2934 /* Try the most limited insn first, because there's no point
2935 including more than one in the machine description unless
2936 the more limited one has some advantage. */
2938 machine_mode mode;
2940 if (expected_align < align)
2941 expected_align = align;
2942 if (expected_size != -1)
2944 if ((unsigned HOST_WIDE_INT)expected_size > max_size)
2945 expected_size = max_size;
2946 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
2947 expected_size = min_size;
2950 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2951 mode = GET_MODE_WIDER_MODE (mode))
2953 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2955 if (code != CODE_FOR_nothing
2956 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2957 here because if SIZE is less than the mode mask, as it is
2958 returned by the macro, it will definitely be less than the
2959 actual mode mask. Since SIZE is within the Pmode address
2960 space, we limit MODE to Pmode. */
2961 && ((CONST_INT_P (size)
2962 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2963 <= (GET_MODE_MASK (mode) >> 1)))
2964 || max_size <= (GET_MODE_MASK (mode) >> 1)
2965 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
2967 struct expand_operand ops[9];
2968 unsigned int nops;
2970 nops = insn_data[(int) code].n_generator_args;
2971 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
2973 create_fixed_operand (&ops[0], object);
2974 /* The check above guarantees that this size conversion is valid. */
2975 create_convert_operand_to (&ops[1], size, mode, true);
2976 create_convert_operand_from (&ops[2], val, byte_mode, true);
2977 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2978 if (nops >= 6)
2980 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2981 create_integer_operand (&ops[5], expected_size);
2983 if (nops >= 8)
2985 create_integer_operand (&ops[6], min_size);
2986 /* If we can not represent the maximal size,
2987 make parameter NULL. */
2988 if ((HOST_WIDE_INT) max_size != -1)
2989 create_integer_operand (&ops[7], max_size);
2990 else
2991 create_fixed_operand (&ops[7], NULL);
2993 if (nops == 9)
2995 /* If we can not represent the maximal size,
2996 make parameter NULL. */
2997 if ((HOST_WIDE_INT) probable_max_size != -1)
2998 create_integer_operand (&ops[8], probable_max_size);
2999 else
3000 create_fixed_operand (&ops[8], NULL);
3002 if (maybe_expand_insn (code, nops, ops))
3003 return true;
3007 return false;
3011 /* Write to one of the components of the complex value CPLX. Write VAL to
3012 the real part if IMAG_P is false, and the imaginary part if its true. */
3014 static void
3015 write_complex_part (rtx cplx, rtx val, bool imag_p)
3017 machine_mode cmode;
3018 machine_mode imode;
3019 unsigned ibitsize;
3021 if (GET_CODE (cplx) == CONCAT)
3023 emit_move_insn (XEXP (cplx, imag_p), val);
3024 return;
3027 cmode = GET_MODE (cplx);
3028 imode = GET_MODE_INNER (cmode);
3029 ibitsize = GET_MODE_BITSIZE (imode);
3031 /* For MEMs simplify_gen_subreg may generate an invalid new address
3032 because, e.g., the original address is considered mode-dependent
3033 by the target, which restricts simplify_subreg from invoking
3034 adjust_address_nv. Instead of preparing fallback support for an
3035 invalid address, we call adjust_address_nv directly. */
3036 if (MEM_P (cplx))
3038 emit_move_insn (adjust_address_nv (cplx, imode,
3039 imag_p ? GET_MODE_SIZE (imode) : 0),
3040 val);
3041 return;
3044 /* If the sub-object is at least word sized, then we know that subregging
3045 will work. This special case is important, since store_bit_field
3046 wants to operate on integer modes, and there's rarely an OImode to
3047 correspond to TCmode. */
3048 if (ibitsize >= BITS_PER_WORD
3049 /* For hard regs we have exact predicates. Assume we can split
3050 the original object if it spans an even number of hard regs.
3051 This special case is important for SCmode on 64-bit platforms
3052 where the natural size of floating-point regs is 32-bit. */
3053 || (REG_P (cplx)
3054 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3055 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3057 rtx part = simplify_gen_subreg (imode, cplx, cmode,
3058 imag_p ? GET_MODE_SIZE (imode) : 0);
3059 if (part)
3061 emit_move_insn (part, val);
3062 return;
3064 else
3065 /* simplify_gen_subreg may fail for sub-word MEMs. */
3066 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3069 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
3072 /* Extract one of the components of the complex value CPLX. Extract the
3073 real part if IMAG_P is false, and the imaginary part if it's true. */
3075 static rtx
3076 read_complex_part (rtx cplx, bool imag_p)
3078 machine_mode cmode, imode;
3079 unsigned ibitsize;
3081 if (GET_CODE (cplx) == CONCAT)
3082 return XEXP (cplx, imag_p);
3084 cmode = GET_MODE (cplx);
3085 imode = GET_MODE_INNER (cmode);
3086 ibitsize = GET_MODE_BITSIZE (imode);
3088 /* Special case reads from complex constants that got spilled to memory. */
3089 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3091 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3092 if (decl && TREE_CODE (decl) == COMPLEX_CST)
3094 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3095 if (CONSTANT_CLASS_P (part))
3096 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3100 /* For MEMs simplify_gen_subreg may generate an invalid new address
3101 because, e.g., the original address is considered mode-dependent
3102 by the target, which restricts simplify_subreg from invoking
3103 adjust_address_nv. Instead of preparing fallback support for an
3104 invalid address, we call adjust_address_nv directly. */
3105 if (MEM_P (cplx))
3106 return adjust_address_nv (cplx, imode,
3107 imag_p ? GET_MODE_SIZE (imode) : 0);
3109 /* If the sub-object is at least word sized, then we know that subregging
3110 will work. This special case is important, since extract_bit_field
3111 wants to operate on integer modes, and there's rarely an OImode to
3112 correspond to TCmode. */
3113 if (ibitsize >= BITS_PER_WORD
3114 /* For hard regs we have exact predicates. Assume we can split
3115 the original object if it spans an even number of hard regs.
3116 This special case is important for SCmode on 64-bit platforms
3117 where the natural size of floating-point regs is 32-bit. */
3118 || (REG_P (cplx)
3119 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3120 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3122 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3123 imag_p ? GET_MODE_SIZE (imode) : 0);
3124 if (ret)
3125 return ret;
3126 else
3127 /* simplify_gen_subreg may fail for sub-word MEMs. */
3128 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3131 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3132 true, NULL_RTX, imode, imode);
3135 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3136 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3137 represented in NEW_MODE. If FORCE is true, this will never happen, as
3138 we'll force-create a SUBREG if needed. */
3140 static rtx
3141 emit_move_change_mode (machine_mode new_mode,
3142 machine_mode old_mode, rtx x, bool force)
3144 rtx ret;
3146 if (push_operand (x, GET_MODE (x)))
3148 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3149 MEM_COPY_ATTRIBUTES (ret, x);
3151 else if (MEM_P (x))
3153 /* We don't have to worry about changing the address since the
3154 size in bytes is supposed to be the same. */
3155 if (reload_in_progress)
3157 /* Copy the MEM to change the mode and move any
3158 substitutions from the old MEM to the new one. */
3159 ret = adjust_address_nv (x, new_mode, 0);
3160 copy_replacements (x, ret);
3162 else
3163 ret = adjust_address (x, new_mode, 0);
3165 else
3167 /* Note that we do want simplify_subreg's behavior of validating
3168 that the new mode is ok for a hard register. If we were to use
3169 simplify_gen_subreg, we would create the subreg, but would
3170 probably run into the target not being able to implement it. */
3171 /* Except, of course, when FORCE is true, when this is exactly what
3172 we want. Which is needed for CCmodes on some targets. */
3173 if (force)
3174 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3175 else
3176 ret = simplify_subreg (new_mode, x, old_mode, 0);
3179 return ret;
3182 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3183 an integer mode of the same size as MODE. Returns the instruction
3184 emitted, or NULL if such a move could not be generated. */
3186 static rtx_insn *
3187 emit_move_via_integer (machine_mode mode, rtx x, rtx y, bool force)
3189 machine_mode imode;
3190 enum insn_code code;
3192 /* There must exist a mode of the exact size we require. */
3193 imode = int_mode_for_mode (mode);
3194 if (imode == BLKmode)
3195 return NULL;
3197 /* The target must support moves in this mode. */
3198 code = optab_handler (mov_optab, imode);
3199 if (code == CODE_FOR_nothing)
3200 return NULL;
3202 x = emit_move_change_mode (imode, mode, x, force);
3203 if (x == NULL_RTX)
3204 return NULL;
3205 y = emit_move_change_mode (imode, mode, y, force);
3206 if (y == NULL_RTX)
3207 return NULL;
3208 return emit_insn (GEN_FCN (code) (x, y));
3211 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3212 Return an equivalent MEM that does not use an auto-increment. */
3215 emit_move_resolve_push (machine_mode mode, rtx x)
3217 enum rtx_code code = GET_CODE (XEXP (x, 0));
3218 HOST_WIDE_INT adjust;
3219 rtx temp;
3221 adjust = GET_MODE_SIZE (mode);
3222 #ifdef PUSH_ROUNDING
3223 adjust = PUSH_ROUNDING (adjust);
3224 #endif
3225 if (code == PRE_DEC || code == POST_DEC)
3226 adjust = -adjust;
3227 else if (code == PRE_MODIFY || code == POST_MODIFY)
3229 rtx expr = XEXP (XEXP (x, 0), 1);
3230 HOST_WIDE_INT val;
3232 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3233 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3234 val = INTVAL (XEXP (expr, 1));
3235 if (GET_CODE (expr) == MINUS)
3236 val = -val;
3237 gcc_assert (adjust == val || adjust == -val);
3238 adjust = val;
3241 /* Do not use anti_adjust_stack, since we don't want to update
3242 stack_pointer_delta. */
3243 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3244 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3245 0, OPTAB_LIB_WIDEN);
3246 if (temp != stack_pointer_rtx)
3247 emit_move_insn (stack_pointer_rtx, temp);
3249 switch (code)
3251 case PRE_INC:
3252 case PRE_DEC:
3253 case PRE_MODIFY:
3254 temp = stack_pointer_rtx;
3255 break;
3256 case POST_INC:
3257 case POST_DEC:
3258 case POST_MODIFY:
3259 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3260 break;
3261 default:
3262 gcc_unreachable ();
3265 return replace_equiv_address (x, temp);
3268 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3269 X is known to satisfy push_operand, and MODE is known to be complex.
3270 Returns the last instruction emitted. */
3272 rtx_insn *
3273 emit_move_complex_push (machine_mode mode, rtx x, rtx y)
3275 machine_mode submode = GET_MODE_INNER (mode);
3276 bool imag_first;
3278 #ifdef PUSH_ROUNDING
3279 unsigned int submodesize = GET_MODE_SIZE (submode);
3281 /* In case we output to the stack, but the size is smaller than the
3282 machine can push exactly, we need to use move instructions. */
3283 if (PUSH_ROUNDING (submodesize) != submodesize)
3285 x = emit_move_resolve_push (mode, x);
3286 return emit_move_insn (x, y);
3288 #endif
3290 /* Note that the real part always precedes the imag part in memory
3291 regardless of machine's endianness. */
3292 switch (GET_CODE (XEXP (x, 0)))
3294 case PRE_DEC:
3295 case POST_DEC:
3296 imag_first = true;
3297 break;
3298 case PRE_INC:
3299 case POST_INC:
3300 imag_first = false;
3301 break;
3302 default:
3303 gcc_unreachable ();
3306 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3307 read_complex_part (y, imag_first));
3308 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3309 read_complex_part (y, !imag_first));
3312 /* A subroutine of emit_move_complex. Perform the move from Y to X
3313 via two moves of the parts. Returns the last instruction emitted. */
3315 rtx_insn *
3316 emit_move_complex_parts (rtx x, rtx y)
3318 /* Show the output dies here. This is necessary for SUBREGs
3319 of pseudos since we cannot track their lifetimes correctly;
3320 hard regs shouldn't appear here except as return values. */
3321 if (!reload_completed && !reload_in_progress
3322 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3323 emit_clobber (x);
3325 write_complex_part (x, read_complex_part (y, false), false);
3326 write_complex_part (x, read_complex_part (y, true), true);
3328 return get_last_insn ();
3331 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3332 MODE is known to be complex. Returns the last instruction emitted. */
3334 static rtx_insn *
3335 emit_move_complex (machine_mode mode, rtx x, rtx y)
3337 bool try_int;
3339 /* Need to take special care for pushes, to maintain proper ordering
3340 of the data, and possibly extra padding. */
3341 if (push_operand (x, mode))
3342 return emit_move_complex_push (mode, x, y);
3344 /* See if we can coerce the target into moving both values at once, except
3345 for floating point where we favor moving as parts if this is easy. */
3346 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3347 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3348 && !(REG_P (x)
3349 && HARD_REGISTER_P (x)
3350 && hard_regno_nregs[REGNO (x)][mode] == 1)
3351 && !(REG_P (y)
3352 && HARD_REGISTER_P (y)
3353 && hard_regno_nregs[REGNO (y)][mode] == 1))
3354 try_int = false;
3355 /* Not possible if the values are inherently not adjacent. */
3356 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3357 try_int = false;
3358 /* Is possible if both are registers (or subregs of registers). */
3359 else if (register_operand (x, mode) && register_operand (y, mode))
3360 try_int = true;
3361 /* If one of the operands is a memory, and alignment constraints
3362 are friendly enough, we may be able to do combined memory operations.
3363 We do not attempt this if Y is a constant because that combination is
3364 usually better with the by-parts thing below. */
3365 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3366 && (!STRICT_ALIGNMENT
3367 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3368 try_int = true;
3369 else
3370 try_int = false;
3372 if (try_int)
3374 rtx_insn *ret;
3376 /* For memory to memory moves, optimal behavior can be had with the
3377 existing block move logic. */
3378 if (MEM_P (x) && MEM_P (y))
3380 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3381 BLOCK_OP_NO_LIBCALL);
3382 return get_last_insn ();
3385 ret = emit_move_via_integer (mode, x, y, true);
3386 if (ret)
3387 return ret;
3390 return emit_move_complex_parts (x, y);
3393 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3394 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3396 static rtx_insn *
3397 emit_move_ccmode (machine_mode mode, rtx x, rtx y)
3399 rtx_insn *ret;
3401 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3402 if (mode != CCmode)
3404 enum insn_code code = optab_handler (mov_optab, CCmode);
3405 if (code != CODE_FOR_nothing)
3407 x = emit_move_change_mode (CCmode, mode, x, true);
3408 y = emit_move_change_mode (CCmode, mode, y, true);
3409 return emit_insn (GEN_FCN (code) (x, y));
3413 /* Otherwise, find the MODE_INT mode of the same width. */
3414 ret = emit_move_via_integer (mode, x, y, false);
3415 gcc_assert (ret != NULL);
3416 return ret;
3419 /* Return true if word I of OP lies entirely in the
3420 undefined bits of a paradoxical subreg. */
3422 static bool
3423 undefined_operand_subword_p (const_rtx op, int i)
3425 machine_mode innermode, innermostmode;
3426 int offset;
3427 if (GET_CODE (op) != SUBREG)
3428 return false;
3429 innermode = GET_MODE (op);
3430 innermostmode = GET_MODE (SUBREG_REG (op));
3431 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3432 /* The SUBREG_BYTE represents offset, as if the value were stored in
3433 memory, except for a paradoxical subreg where we define
3434 SUBREG_BYTE to be 0; undo this exception as in
3435 simplify_subreg. */
3436 if (SUBREG_BYTE (op) == 0
3437 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3439 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3440 if (WORDS_BIG_ENDIAN)
3441 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3442 if (BYTES_BIG_ENDIAN)
3443 offset += difference % UNITS_PER_WORD;
3445 if (offset >= GET_MODE_SIZE (innermostmode)
3446 || offset <= -GET_MODE_SIZE (word_mode))
3447 return true;
3448 return false;
3451 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3452 MODE is any multi-word or full-word mode that lacks a move_insn
3453 pattern. Note that you will get better code if you define such
3454 patterns, even if they must turn into multiple assembler instructions. */
3456 static rtx_insn *
3457 emit_move_multi_word (machine_mode mode, rtx x, rtx y)
3459 rtx_insn *last_insn = 0;
3460 rtx_insn *seq;
3461 rtx inner;
3462 bool need_clobber;
3463 int i;
3465 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3467 /* If X is a push on the stack, do the push now and replace
3468 X with a reference to the stack pointer. */
3469 if (push_operand (x, mode))
3470 x = emit_move_resolve_push (mode, x);
3472 /* If we are in reload, see if either operand is a MEM whose address
3473 is scheduled for replacement. */
3474 if (reload_in_progress && MEM_P (x)
3475 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3476 x = replace_equiv_address_nv (x, inner);
3477 if (reload_in_progress && MEM_P (y)
3478 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3479 y = replace_equiv_address_nv (y, inner);
3481 start_sequence ();
3483 need_clobber = false;
3484 for (i = 0;
3485 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3486 i++)
3488 rtx xpart = operand_subword (x, i, 1, mode);
3489 rtx ypart;
3491 /* Do not generate code for a move if it would come entirely
3492 from the undefined bits of a paradoxical subreg. */
3493 if (undefined_operand_subword_p (y, i))
3494 continue;
3496 ypart = operand_subword (y, i, 1, mode);
3498 /* If we can't get a part of Y, put Y into memory if it is a
3499 constant. Otherwise, force it into a register. Then we must
3500 be able to get a part of Y. */
3501 if (ypart == 0 && CONSTANT_P (y))
3503 y = use_anchored_address (force_const_mem (mode, y));
3504 ypart = operand_subword (y, i, 1, mode);
3506 else if (ypart == 0)
3507 ypart = operand_subword_force (y, i, mode);
3509 gcc_assert (xpart && ypart);
3511 need_clobber |= (GET_CODE (xpart) == SUBREG);
3513 last_insn = emit_move_insn (xpart, ypart);
3516 seq = get_insns ();
3517 end_sequence ();
3519 /* Show the output dies here. This is necessary for SUBREGs
3520 of pseudos since we cannot track their lifetimes correctly;
3521 hard regs shouldn't appear here except as return values.
3522 We never want to emit such a clobber after reload. */
3523 if (x != y
3524 && ! (reload_in_progress || reload_completed)
3525 && need_clobber != 0)
3526 emit_clobber (x);
3528 emit_insn (seq);
3530 return last_insn;
3533 /* Low level part of emit_move_insn.
3534 Called just like emit_move_insn, but assumes X and Y
3535 are basically valid. */
3537 rtx_insn *
3538 emit_move_insn_1 (rtx x, rtx y)
3540 machine_mode mode = GET_MODE (x);
3541 enum insn_code code;
3543 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3545 code = optab_handler (mov_optab, mode);
3546 if (code != CODE_FOR_nothing)
3547 return emit_insn (GEN_FCN (code) (x, y));
3549 /* Expand complex moves by moving real part and imag part. */
3550 if (COMPLEX_MODE_P (mode))
3551 return emit_move_complex (mode, x, y);
3553 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3554 || ALL_FIXED_POINT_MODE_P (mode))
3556 rtx_insn *result = emit_move_via_integer (mode, x, y, true);
3558 /* If we can't find an integer mode, use multi words. */
3559 if (result)
3560 return result;
3561 else
3562 return emit_move_multi_word (mode, x, y);
3565 if (GET_MODE_CLASS (mode) == MODE_CC)
3566 return emit_move_ccmode (mode, x, y);
3568 /* Try using a move pattern for the corresponding integer mode. This is
3569 only safe when simplify_subreg can convert MODE constants into integer
3570 constants. At present, it can only do this reliably if the value
3571 fits within a HOST_WIDE_INT. */
3572 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3574 rtx_insn *ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3576 if (ret)
3578 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3579 return ret;
3583 return emit_move_multi_word (mode, x, y);
3586 /* Generate code to copy Y into X.
3587 Both Y and X must have the same mode, except that
3588 Y can be a constant with VOIDmode.
3589 This mode cannot be BLKmode; use emit_block_move for that.
3591 Return the last instruction emitted. */
3593 rtx_insn *
3594 emit_move_insn (rtx x, rtx y)
3596 machine_mode mode = GET_MODE (x);
3597 rtx y_cst = NULL_RTX;
3598 rtx_insn *last_insn;
3599 rtx set;
3601 gcc_assert (mode != BLKmode
3602 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3604 if (CONSTANT_P (y))
3606 if (optimize
3607 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3608 && (last_insn = compress_float_constant (x, y)))
3609 return last_insn;
3611 y_cst = y;
3613 if (!targetm.legitimate_constant_p (mode, y))
3615 y = force_const_mem (mode, y);
3617 /* If the target's cannot_force_const_mem prevented the spill,
3618 assume that the target's move expanders will also take care
3619 of the non-legitimate constant. */
3620 if (!y)
3621 y = y_cst;
3622 else
3623 y = use_anchored_address (y);
3627 /* If X or Y are memory references, verify that their addresses are valid
3628 for the machine. */
3629 if (MEM_P (x)
3630 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3631 MEM_ADDR_SPACE (x))
3632 && ! push_operand (x, GET_MODE (x))))
3633 x = validize_mem (x);
3635 if (MEM_P (y)
3636 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3637 MEM_ADDR_SPACE (y)))
3638 y = validize_mem (y);
3640 gcc_assert (mode != BLKmode);
3642 last_insn = emit_move_insn_1 (x, y);
3644 if (y_cst && REG_P (x)
3645 && (set = single_set (last_insn)) != NULL_RTX
3646 && SET_DEST (set) == x
3647 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3648 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3650 return last_insn;
3653 /* If Y is representable exactly in a narrower mode, and the target can
3654 perform the extension directly from constant or memory, then emit the
3655 move as an extension. */
3657 static rtx_insn *
3658 compress_float_constant (rtx x, rtx y)
3660 machine_mode dstmode = GET_MODE (x);
3661 machine_mode orig_srcmode = GET_MODE (y);
3662 machine_mode srcmode;
3663 REAL_VALUE_TYPE r;
3664 int oldcost, newcost;
3665 bool speed = optimize_insn_for_speed_p ();
3667 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3669 if (targetm.legitimate_constant_p (dstmode, y))
3670 oldcost = set_src_cost (y, speed);
3671 else
3672 oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3674 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3675 srcmode != orig_srcmode;
3676 srcmode = GET_MODE_WIDER_MODE (srcmode))
3678 enum insn_code ic;
3679 rtx trunc_y;
3680 rtx_insn *last_insn;
3682 /* Skip if the target can't extend this way. */
3683 ic = can_extend_p (dstmode, srcmode, 0);
3684 if (ic == CODE_FOR_nothing)
3685 continue;
3687 /* Skip if the narrowed value isn't exact. */
3688 if (! exact_real_truncate (srcmode, &r))
3689 continue;
3691 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3693 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3695 /* Skip if the target needs extra instructions to perform
3696 the extension. */
3697 if (!insn_operand_matches (ic, 1, trunc_y))
3698 continue;
3699 /* This is valid, but may not be cheaper than the original. */
3700 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3701 speed);
3702 if (oldcost < newcost)
3703 continue;
3705 else if (float_extend_from_mem[dstmode][srcmode])
3707 trunc_y = force_const_mem (srcmode, trunc_y);
3708 /* This is valid, but may not be cheaper than the original. */
3709 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3710 speed);
3711 if (oldcost < newcost)
3712 continue;
3713 trunc_y = validize_mem (trunc_y);
3715 else
3716 continue;
3718 /* For CSE's benefit, force the compressed constant pool entry
3719 into a new pseudo. This constant may be used in different modes,
3720 and if not, combine will put things back together for us. */
3721 trunc_y = force_reg (srcmode, trunc_y);
3723 /* If x is a hard register, perform the extension into a pseudo,
3724 so that e.g. stack realignment code is aware of it. */
3725 rtx target = x;
3726 if (REG_P (x) && HARD_REGISTER_P (x))
3727 target = gen_reg_rtx (dstmode);
3729 emit_unop_insn (ic, target, trunc_y, UNKNOWN);
3730 last_insn = get_last_insn ();
3732 if (REG_P (target))
3733 set_unique_reg_note (last_insn, REG_EQUAL, y);
3735 if (target != x)
3736 return emit_move_insn (x, target);
3737 return last_insn;
3740 return NULL;
3743 /* Pushing data onto the stack. */
3745 /* Push a block of length SIZE (perhaps variable)
3746 and return an rtx to address the beginning of the block.
3747 The value may be virtual_outgoing_args_rtx.
3749 EXTRA is the number of bytes of padding to push in addition to SIZE.
3750 BELOW nonzero means this padding comes at low addresses;
3751 otherwise, the padding comes at high addresses. */
3754 push_block (rtx size, int extra, int below)
3756 rtx temp;
3758 size = convert_modes (Pmode, ptr_mode, size, 1);
3759 if (CONSTANT_P (size))
3760 anti_adjust_stack (plus_constant (Pmode, size, extra));
3761 else if (REG_P (size) && extra == 0)
3762 anti_adjust_stack (size);
3763 else
3765 temp = copy_to_mode_reg (Pmode, size);
3766 if (extra != 0)
3767 temp = expand_binop (Pmode, add_optab, temp,
3768 gen_int_mode (extra, Pmode),
3769 temp, 0, OPTAB_LIB_WIDEN);
3770 anti_adjust_stack (temp);
3773 #ifndef STACK_GROWS_DOWNWARD
3774 if (0)
3775 #else
3776 if (1)
3777 #endif
3779 temp = virtual_outgoing_args_rtx;
3780 if (extra != 0 && below)
3781 temp = plus_constant (Pmode, temp, extra);
3783 else
3785 if (CONST_INT_P (size))
3786 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3787 -INTVAL (size) - (below ? 0 : extra));
3788 else if (extra != 0 && !below)
3789 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3790 negate_rtx (Pmode, plus_constant (Pmode, size,
3791 extra)));
3792 else
3793 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3794 negate_rtx (Pmode, size));
3797 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3800 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3802 static rtx
3803 mem_autoinc_base (rtx mem)
3805 if (MEM_P (mem))
3807 rtx addr = XEXP (mem, 0);
3808 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3809 return XEXP (addr, 0);
3811 return NULL;
3814 /* A utility routine used here, in reload, and in try_split. The insns
3815 after PREV up to and including LAST are known to adjust the stack,
3816 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3817 placing notes as appropriate. PREV may be NULL, indicating the
3818 entire insn sequence prior to LAST should be scanned.
3820 The set of allowed stack pointer modifications is small:
3821 (1) One or more auto-inc style memory references (aka pushes),
3822 (2) One or more addition/subtraction with the SP as destination,
3823 (3) A single move insn with the SP as destination,
3824 (4) A call_pop insn,
3825 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3827 Insns in the sequence that do not modify the SP are ignored,
3828 except for noreturn calls.
3830 The return value is the amount of adjustment that can be trivially
3831 verified, via immediate operand or auto-inc. If the adjustment
3832 cannot be trivially extracted, the return value is INT_MIN. */
3834 HOST_WIDE_INT
3835 find_args_size_adjust (rtx_insn *insn)
3837 rtx dest, set, pat;
3838 int i;
3840 pat = PATTERN (insn);
3841 set = NULL;
3843 /* Look for a call_pop pattern. */
3844 if (CALL_P (insn))
3846 /* We have to allow non-call_pop patterns for the case
3847 of emit_single_push_insn of a TLS address. */
3848 if (GET_CODE (pat) != PARALLEL)
3849 return 0;
3851 /* All call_pop have a stack pointer adjust in the parallel.
3852 The call itself is always first, and the stack adjust is
3853 usually last, so search from the end. */
3854 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3856 set = XVECEXP (pat, 0, i);
3857 if (GET_CODE (set) != SET)
3858 continue;
3859 dest = SET_DEST (set);
3860 if (dest == stack_pointer_rtx)
3861 break;
3863 /* We'd better have found the stack pointer adjust. */
3864 if (i == 0)
3865 return 0;
3866 /* Fall through to process the extracted SET and DEST
3867 as if it was a standalone insn. */
3869 else if (GET_CODE (pat) == SET)
3870 set = pat;
3871 else if ((set = single_set (insn)) != NULL)
3873 else if (GET_CODE (pat) == PARALLEL)
3875 /* ??? Some older ports use a parallel with a stack adjust
3876 and a store for a PUSH_ROUNDING pattern, rather than a
3877 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3878 /* ??? See h8300 and m68k, pushqi1. */
3879 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3881 set = XVECEXP (pat, 0, i);
3882 if (GET_CODE (set) != SET)
3883 continue;
3884 dest = SET_DEST (set);
3885 if (dest == stack_pointer_rtx)
3886 break;
3888 /* We do not expect an auto-inc of the sp in the parallel. */
3889 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3890 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3891 != stack_pointer_rtx);
3893 if (i < 0)
3894 return 0;
3896 else
3897 return 0;
3899 dest = SET_DEST (set);
3901 /* Look for direct modifications of the stack pointer. */
3902 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3904 /* Look for a trivial adjustment, otherwise assume nothing. */
3905 /* Note that the SPU restore_stack_block pattern refers to
3906 the stack pointer in V4SImode. Consider that non-trivial. */
3907 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3908 && GET_CODE (SET_SRC (set)) == PLUS
3909 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3910 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3911 return INTVAL (XEXP (SET_SRC (set), 1));
3912 /* ??? Reload can generate no-op moves, which will be cleaned
3913 up later. Recognize it and continue searching. */
3914 else if (rtx_equal_p (dest, SET_SRC (set)))
3915 return 0;
3916 else
3917 return HOST_WIDE_INT_MIN;
3919 else
3921 rtx mem, addr;
3923 /* Otherwise only think about autoinc patterns. */
3924 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3926 mem = dest;
3927 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3928 != stack_pointer_rtx);
3930 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3931 mem = SET_SRC (set);
3932 else
3933 return 0;
3935 addr = XEXP (mem, 0);
3936 switch (GET_CODE (addr))
3938 case PRE_INC:
3939 case POST_INC:
3940 return GET_MODE_SIZE (GET_MODE (mem));
3941 case PRE_DEC:
3942 case POST_DEC:
3943 return -GET_MODE_SIZE (GET_MODE (mem));
3944 case PRE_MODIFY:
3945 case POST_MODIFY:
3946 addr = XEXP (addr, 1);
3947 gcc_assert (GET_CODE (addr) == PLUS);
3948 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3949 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3950 return INTVAL (XEXP (addr, 1));
3951 default:
3952 gcc_unreachable ();
3958 fixup_args_size_notes (rtx_insn *prev, rtx_insn *last, int end_args_size)
3960 int args_size = end_args_size;
3961 bool saw_unknown = false;
3962 rtx_insn *insn;
3964 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3966 HOST_WIDE_INT this_delta;
3968 if (!NONDEBUG_INSN_P (insn))
3969 continue;
3971 this_delta = find_args_size_adjust (insn);
3972 if (this_delta == 0)
3974 if (!CALL_P (insn)
3975 || ACCUMULATE_OUTGOING_ARGS
3976 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3977 continue;
3980 gcc_assert (!saw_unknown);
3981 if (this_delta == HOST_WIDE_INT_MIN)
3982 saw_unknown = true;
3984 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3985 #ifdef STACK_GROWS_DOWNWARD
3986 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
3987 #endif
3988 args_size -= this_delta;
3991 return saw_unknown ? INT_MIN : args_size;
3994 #ifdef PUSH_ROUNDING
3995 /* Emit single push insn. */
3997 static void
3998 emit_single_push_insn_1 (machine_mode mode, rtx x, tree type)
4000 rtx dest_addr;
4001 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
4002 rtx dest;
4003 enum insn_code icode;
4005 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
4006 /* If there is push pattern, use it. Otherwise try old way of throwing
4007 MEM representing push operation to move expander. */
4008 icode = optab_handler (push_optab, mode);
4009 if (icode != CODE_FOR_nothing)
4011 struct expand_operand ops[1];
4013 create_input_operand (&ops[0], x, mode);
4014 if (maybe_expand_insn (icode, 1, ops))
4015 return;
4017 if (GET_MODE_SIZE (mode) == rounded_size)
4018 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
4019 /* If we are to pad downward, adjust the stack pointer first and
4020 then store X into the stack location using an offset. This is
4021 because emit_move_insn does not know how to pad; it does not have
4022 access to type. */
4023 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
4025 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
4026 HOST_WIDE_INT offset;
4028 emit_move_insn (stack_pointer_rtx,
4029 expand_binop (Pmode,
4030 #ifdef STACK_GROWS_DOWNWARD
4031 sub_optab,
4032 #else
4033 add_optab,
4034 #endif
4035 stack_pointer_rtx,
4036 gen_int_mode (rounded_size, Pmode),
4037 NULL_RTX, 0, OPTAB_LIB_WIDEN));
4039 offset = (HOST_WIDE_INT) padding_size;
4040 #ifdef STACK_GROWS_DOWNWARD
4041 if (STACK_PUSH_CODE == POST_DEC)
4042 /* We have already decremented the stack pointer, so get the
4043 previous value. */
4044 offset += (HOST_WIDE_INT) rounded_size;
4045 #else
4046 if (STACK_PUSH_CODE == POST_INC)
4047 /* We have already incremented the stack pointer, so get the
4048 previous value. */
4049 offset -= (HOST_WIDE_INT) rounded_size;
4050 #endif
4051 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4052 gen_int_mode (offset, Pmode));
4054 else
4056 #ifdef STACK_GROWS_DOWNWARD
4057 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4058 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4059 gen_int_mode (-(HOST_WIDE_INT) rounded_size,
4060 Pmode));
4061 #else
4062 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4063 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4064 gen_int_mode (rounded_size, Pmode));
4065 #endif
4066 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4069 dest = gen_rtx_MEM (mode, dest_addr);
4071 if (type != 0)
4073 set_mem_attributes (dest, type, 1);
4075 if (cfun->tail_call_marked)
4076 /* Function incoming arguments may overlap with sibling call
4077 outgoing arguments and we cannot allow reordering of reads
4078 from function arguments with stores to outgoing arguments
4079 of sibling calls. */
4080 set_mem_alias_set (dest, 0);
4082 emit_move_insn (dest, x);
4085 /* Emit and annotate a single push insn. */
4087 static void
4088 emit_single_push_insn (machine_mode mode, rtx x, tree type)
4090 int delta, old_delta = stack_pointer_delta;
4091 rtx_insn *prev = get_last_insn ();
4092 rtx_insn *last;
4094 emit_single_push_insn_1 (mode, x, type);
4096 last = get_last_insn ();
4098 /* Notice the common case where we emitted exactly one insn. */
4099 if (PREV_INSN (last) == prev)
4101 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4102 return;
4105 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4106 gcc_assert (delta == INT_MIN || delta == old_delta);
4108 #endif
4110 /* Generate code to push X onto the stack, assuming it has mode MODE and
4111 type TYPE.
4112 MODE is redundant except when X is a CONST_INT (since they don't
4113 carry mode info).
4114 SIZE is an rtx for the size of data to be copied (in bytes),
4115 needed only if X is BLKmode.
4117 ALIGN (in bits) is maximum alignment we can assume.
4119 If PARTIAL and REG are both nonzero, then copy that many of the first
4120 bytes of X into registers starting with REG, and push the rest of X.
4121 The amount of space pushed is decreased by PARTIAL bytes.
4122 REG must be a hard register in this case.
4123 If REG is zero but PARTIAL is not, take any all others actions for an
4124 argument partially in registers, but do not actually load any
4125 registers.
4127 EXTRA is the amount in bytes of extra space to leave next to this arg.
4128 This is ignored if an argument block has already been allocated.
4130 On a machine that lacks real push insns, ARGS_ADDR is the address of
4131 the bottom of the argument block for this call. We use indexing off there
4132 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4133 argument block has not been preallocated.
4135 ARGS_SO_FAR is the size of args previously pushed for this call.
4137 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4138 for arguments passed in registers. If nonzero, it will be the number
4139 of bytes required. */
4141 void
4142 emit_push_insn (rtx x, machine_mode mode, tree type, rtx size,
4143 unsigned int align, int partial, rtx reg, int extra,
4144 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4145 rtx alignment_pad)
4147 rtx xinner;
4148 enum direction stack_direction
4149 #ifdef STACK_GROWS_DOWNWARD
4150 = downward;
4151 #else
4152 = upward;
4153 #endif
4155 /* Decide where to pad the argument: `downward' for below,
4156 `upward' for above, or `none' for don't pad it.
4157 Default is below for small data on big-endian machines; else above. */
4158 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4160 /* Invert direction if stack is post-decrement.
4161 FIXME: why? */
4162 if (STACK_PUSH_CODE == POST_DEC)
4163 if (where_pad != none)
4164 where_pad = (where_pad == downward ? upward : downward);
4166 xinner = x;
4168 if (mode == BLKmode
4169 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4171 /* Copy a block into the stack, entirely or partially. */
4173 rtx temp;
4174 int used;
4175 int offset;
4176 int skip;
4178 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4179 used = partial - offset;
4181 if (mode != BLKmode)
4183 /* A value is to be stored in an insufficiently aligned
4184 stack slot; copy via a suitably aligned slot if
4185 necessary. */
4186 size = GEN_INT (GET_MODE_SIZE (mode));
4187 if (!MEM_P (xinner))
4189 temp = assign_temp (type, 1, 1);
4190 emit_move_insn (temp, xinner);
4191 xinner = temp;
4195 gcc_assert (size);
4197 /* USED is now the # of bytes we need not copy to the stack
4198 because registers will take care of them. */
4200 if (partial != 0)
4201 xinner = adjust_address (xinner, BLKmode, used);
4203 /* If the partial register-part of the arg counts in its stack size,
4204 skip the part of stack space corresponding to the registers.
4205 Otherwise, start copying to the beginning of the stack space,
4206 by setting SKIP to 0. */
4207 skip = (reg_parm_stack_space == 0) ? 0 : used;
4209 #ifdef PUSH_ROUNDING
4210 /* Do it with several push insns if that doesn't take lots of insns
4211 and if there is no difficulty with push insns that skip bytes
4212 on the stack for alignment purposes. */
4213 if (args_addr == 0
4214 && PUSH_ARGS
4215 && CONST_INT_P (size)
4216 && skip == 0
4217 && MEM_ALIGN (xinner) >= align
4218 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
4219 /* Here we avoid the case of a structure whose weak alignment
4220 forces many pushes of a small amount of data,
4221 and such small pushes do rounding that causes trouble. */
4222 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4223 || align >= BIGGEST_ALIGNMENT
4224 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4225 == (align / BITS_PER_UNIT)))
4226 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4228 /* Push padding now if padding above and stack grows down,
4229 or if padding below and stack grows up.
4230 But if space already allocated, this has already been done. */
4231 if (extra && args_addr == 0
4232 && where_pad != none && where_pad != stack_direction)
4233 anti_adjust_stack (GEN_INT (extra));
4235 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4237 else
4238 #endif /* PUSH_ROUNDING */
4240 rtx target;
4242 /* Otherwise make space on the stack and copy the data
4243 to the address of that space. */
4245 /* Deduct words put into registers from the size we must copy. */
4246 if (partial != 0)
4248 if (CONST_INT_P (size))
4249 size = GEN_INT (INTVAL (size) - used);
4250 else
4251 size = expand_binop (GET_MODE (size), sub_optab, size,
4252 gen_int_mode (used, GET_MODE (size)),
4253 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4256 /* Get the address of the stack space.
4257 In this case, we do not deal with EXTRA separately.
4258 A single stack adjust will do. */
4259 if (! args_addr)
4261 temp = push_block (size, extra, where_pad == downward);
4262 extra = 0;
4264 else if (CONST_INT_P (args_so_far))
4265 temp = memory_address (BLKmode,
4266 plus_constant (Pmode, args_addr,
4267 skip + INTVAL (args_so_far)));
4268 else
4269 temp = memory_address (BLKmode,
4270 plus_constant (Pmode,
4271 gen_rtx_PLUS (Pmode,
4272 args_addr,
4273 args_so_far),
4274 skip));
4276 if (!ACCUMULATE_OUTGOING_ARGS)
4278 /* If the source is referenced relative to the stack pointer,
4279 copy it to another register to stabilize it. We do not need
4280 to do this if we know that we won't be changing sp. */
4282 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4283 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4284 temp = copy_to_reg (temp);
4287 target = gen_rtx_MEM (BLKmode, temp);
4289 /* We do *not* set_mem_attributes here, because incoming arguments
4290 may overlap with sibling call outgoing arguments and we cannot
4291 allow reordering of reads from function arguments with stores
4292 to outgoing arguments of sibling calls. We do, however, want
4293 to record the alignment of the stack slot. */
4294 /* ALIGN may well be better aligned than TYPE, e.g. due to
4295 PARM_BOUNDARY. Assume the caller isn't lying. */
4296 set_mem_align (target, align);
4298 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4301 else if (partial > 0)
4303 /* Scalar partly in registers. */
4305 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4306 int i;
4307 int not_stack;
4308 /* # bytes of start of argument
4309 that we must make space for but need not store. */
4310 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4311 int args_offset = INTVAL (args_so_far);
4312 int skip;
4314 /* Push padding now if padding above and stack grows down,
4315 or if padding below and stack grows up.
4316 But if space already allocated, this has already been done. */
4317 if (extra && args_addr == 0
4318 && where_pad != none && where_pad != stack_direction)
4319 anti_adjust_stack (GEN_INT (extra));
4321 /* If we make space by pushing it, we might as well push
4322 the real data. Otherwise, we can leave OFFSET nonzero
4323 and leave the space uninitialized. */
4324 if (args_addr == 0)
4325 offset = 0;
4327 /* Now NOT_STACK gets the number of words that we don't need to
4328 allocate on the stack. Convert OFFSET to words too. */
4329 not_stack = (partial - offset) / UNITS_PER_WORD;
4330 offset /= UNITS_PER_WORD;
4332 /* If the partial register-part of the arg counts in its stack size,
4333 skip the part of stack space corresponding to the registers.
4334 Otherwise, start copying to the beginning of the stack space,
4335 by setting SKIP to 0. */
4336 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4338 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4339 x = validize_mem (force_const_mem (mode, x));
4341 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4342 SUBREGs of such registers are not allowed. */
4343 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4344 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4345 x = copy_to_reg (x);
4347 /* Loop over all the words allocated on the stack for this arg. */
4348 /* We can do it by words, because any scalar bigger than a word
4349 has a size a multiple of a word. */
4350 for (i = size - 1; i >= not_stack; i--)
4351 if (i >= not_stack + offset)
4352 emit_push_insn (operand_subword_force (x, i, mode),
4353 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4354 0, args_addr,
4355 GEN_INT (args_offset + ((i - not_stack + skip)
4356 * UNITS_PER_WORD)),
4357 reg_parm_stack_space, alignment_pad);
4359 else
4361 rtx addr;
4362 rtx dest;
4364 /* Push padding now if padding above and stack grows down,
4365 or if padding below and stack grows up.
4366 But if space already allocated, this has already been done. */
4367 if (extra && args_addr == 0
4368 && where_pad != none && where_pad != stack_direction)
4369 anti_adjust_stack (GEN_INT (extra));
4371 #ifdef PUSH_ROUNDING
4372 if (args_addr == 0 && PUSH_ARGS)
4373 emit_single_push_insn (mode, x, type);
4374 else
4375 #endif
4377 if (CONST_INT_P (args_so_far))
4378 addr
4379 = memory_address (mode,
4380 plus_constant (Pmode, args_addr,
4381 INTVAL (args_so_far)));
4382 else
4383 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4384 args_so_far));
4385 dest = gen_rtx_MEM (mode, addr);
4387 /* We do *not* set_mem_attributes here, because incoming arguments
4388 may overlap with sibling call outgoing arguments and we cannot
4389 allow reordering of reads from function arguments with stores
4390 to outgoing arguments of sibling calls. We do, however, want
4391 to record the alignment of the stack slot. */
4392 /* ALIGN may well be better aligned than TYPE, e.g. due to
4393 PARM_BOUNDARY. Assume the caller isn't lying. */
4394 set_mem_align (dest, align);
4396 emit_move_insn (dest, x);
4400 /* If part should go in registers, copy that part
4401 into the appropriate registers. Do this now, at the end,
4402 since mem-to-mem copies above may do function calls. */
4403 if (partial > 0 && reg != 0)
4405 /* Handle calls that pass values in multiple non-contiguous locations.
4406 The Irix 6 ABI has examples of this. */
4407 if (GET_CODE (reg) == PARALLEL)
4408 emit_group_load (reg, x, type, -1);
4409 else
4411 gcc_assert (partial % UNITS_PER_WORD == 0);
4412 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4416 if (extra && args_addr == 0 && where_pad == stack_direction)
4417 anti_adjust_stack (GEN_INT (extra));
4419 if (alignment_pad && args_addr == 0)
4420 anti_adjust_stack (alignment_pad);
4423 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4424 operations. */
4426 static rtx
4427 get_subtarget (rtx x)
4429 return (optimize
4430 || x == 0
4431 /* Only registers can be subtargets. */
4432 || !REG_P (x)
4433 /* Don't use hard regs to avoid extending their life. */
4434 || REGNO (x) < FIRST_PSEUDO_REGISTER
4435 ? 0 : x);
4438 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4439 FIELD is a bitfield. Returns true if the optimization was successful,
4440 and there's nothing else to do. */
4442 static bool
4443 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4444 unsigned HOST_WIDE_INT bitpos,
4445 unsigned HOST_WIDE_INT bitregion_start,
4446 unsigned HOST_WIDE_INT bitregion_end,
4447 machine_mode mode1, rtx str_rtx,
4448 tree to, tree src)
4450 machine_mode str_mode = GET_MODE (str_rtx);
4451 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4452 tree op0, op1;
4453 rtx value, result;
4454 optab binop;
4455 gimple srcstmt;
4456 enum tree_code code;
4458 if (mode1 != VOIDmode
4459 || bitsize >= BITS_PER_WORD
4460 || str_bitsize > BITS_PER_WORD
4461 || TREE_SIDE_EFFECTS (to)
4462 || TREE_THIS_VOLATILE (to))
4463 return false;
4465 STRIP_NOPS (src);
4466 if (TREE_CODE (src) != SSA_NAME)
4467 return false;
4468 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4469 return false;
4471 srcstmt = get_gimple_for_ssa_name (src);
4472 if (!srcstmt
4473 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4474 return false;
4476 code = gimple_assign_rhs_code (srcstmt);
4478 op0 = gimple_assign_rhs1 (srcstmt);
4480 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4481 to find its initialization. Hopefully the initialization will
4482 be from a bitfield load. */
4483 if (TREE_CODE (op0) == SSA_NAME)
4485 gimple op0stmt = get_gimple_for_ssa_name (op0);
4487 /* We want to eventually have OP0 be the same as TO, which
4488 should be a bitfield. */
4489 if (!op0stmt
4490 || !is_gimple_assign (op0stmt)
4491 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4492 return false;
4493 op0 = gimple_assign_rhs1 (op0stmt);
4496 op1 = gimple_assign_rhs2 (srcstmt);
4498 if (!operand_equal_p (to, op0, 0))
4499 return false;
4501 if (MEM_P (str_rtx))
4503 unsigned HOST_WIDE_INT offset1;
4505 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4506 str_mode = word_mode;
4507 str_mode = get_best_mode (bitsize, bitpos,
4508 bitregion_start, bitregion_end,
4509 MEM_ALIGN (str_rtx), str_mode, 0);
4510 if (str_mode == VOIDmode)
4511 return false;
4512 str_bitsize = GET_MODE_BITSIZE (str_mode);
4514 offset1 = bitpos;
4515 bitpos %= str_bitsize;
4516 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4517 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4519 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4520 return false;
4522 /* If the bit field covers the whole REG/MEM, store_field
4523 will likely generate better code. */
4524 if (bitsize >= str_bitsize)
4525 return false;
4527 /* We can't handle fields split across multiple entities. */
4528 if (bitpos + bitsize > str_bitsize)
4529 return false;
4531 if (BYTES_BIG_ENDIAN)
4532 bitpos = str_bitsize - bitpos - bitsize;
4534 switch (code)
4536 case PLUS_EXPR:
4537 case MINUS_EXPR:
4538 /* For now, just optimize the case of the topmost bitfield
4539 where we don't need to do any masking and also
4540 1 bit bitfields where xor can be used.
4541 We might win by one instruction for the other bitfields
4542 too if insv/extv instructions aren't used, so that
4543 can be added later. */
4544 if (bitpos + bitsize != str_bitsize
4545 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4546 break;
4548 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4549 value = convert_modes (str_mode,
4550 TYPE_MODE (TREE_TYPE (op1)), value,
4551 TYPE_UNSIGNED (TREE_TYPE (op1)));
4553 /* We may be accessing data outside the field, which means
4554 we can alias adjacent data. */
4555 if (MEM_P (str_rtx))
4557 str_rtx = shallow_copy_rtx (str_rtx);
4558 set_mem_alias_set (str_rtx, 0);
4559 set_mem_expr (str_rtx, 0);
4562 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4563 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4565 value = expand_and (str_mode, value, const1_rtx, NULL);
4566 binop = xor_optab;
4568 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4569 result = expand_binop (str_mode, binop, str_rtx,
4570 value, str_rtx, 1, OPTAB_WIDEN);
4571 if (result != str_rtx)
4572 emit_move_insn (str_rtx, result);
4573 return true;
4575 case BIT_IOR_EXPR:
4576 case BIT_XOR_EXPR:
4577 if (TREE_CODE (op1) != INTEGER_CST)
4578 break;
4579 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4580 value = convert_modes (str_mode,
4581 TYPE_MODE (TREE_TYPE (op1)), value,
4582 TYPE_UNSIGNED (TREE_TYPE (op1)));
4584 /* We may be accessing data outside the field, which means
4585 we can alias adjacent data. */
4586 if (MEM_P (str_rtx))
4588 str_rtx = shallow_copy_rtx (str_rtx);
4589 set_mem_alias_set (str_rtx, 0);
4590 set_mem_expr (str_rtx, 0);
4593 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4594 if (bitpos + bitsize != str_bitsize)
4596 rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4597 str_mode);
4598 value = expand_and (str_mode, value, mask, NULL_RTX);
4600 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4601 result = expand_binop (str_mode, binop, str_rtx,
4602 value, str_rtx, 1, OPTAB_WIDEN);
4603 if (result != str_rtx)
4604 emit_move_insn (str_rtx, result);
4605 return true;
4607 default:
4608 break;
4611 return false;
4614 /* In the C++ memory model, consecutive bit fields in a structure are
4615 considered one memory location.
4617 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4618 returns the bit range of consecutive bits in which this COMPONENT_REF
4619 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4620 and *OFFSET may be adjusted in the process.
4622 If the access does not need to be restricted, 0 is returned in both
4623 *BITSTART and *BITEND. */
4625 static void
4626 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4627 unsigned HOST_WIDE_INT *bitend,
4628 tree exp,
4629 HOST_WIDE_INT *bitpos,
4630 tree *offset)
4632 HOST_WIDE_INT bitoffset;
4633 tree field, repr;
4635 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4637 field = TREE_OPERAND (exp, 1);
4638 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4639 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4640 need to limit the range we can access. */
4641 if (!repr)
4643 *bitstart = *bitend = 0;
4644 return;
4647 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4648 part of a larger bit field, then the representative does not serve any
4649 useful purpose. This can occur in Ada. */
4650 if (handled_component_p (TREE_OPERAND (exp, 0)))
4652 machine_mode rmode;
4653 HOST_WIDE_INT rbitsize, rbitpos;
4654 tree roffset;
4655 int unsignedp;
4656 int volatilep = 0;
4657 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4658 &roffset, &rmode, &unsignedp, &volatilep, false);
4659 if ((rbitpos % BITS_PER_UNIT) != 0)
4661 *bitstart = *bitend = 0;
4662 return;
4666 /* Compute the adjustment to bitpos from the offset of the field
4667 relative to the representative. DECL_FIELD_OFFSET of field and
4668 repr are the same by construction if they are not constants,
4669 see finish_bitfield_layout. */
4670 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
4671 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
4672 bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
4673 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
4674 else
4675 bitoffset = 0;
4676 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4677 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4679 /* If the adjustment is larger than bitpos, we would have a negative bit
4680 position for the lower bound and this may wreak havoc later. Adjust
4681 offset and bitpos to make the lower bound non-negative in that case. */
4682 if (bitoffset > *bitpos)
4684 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4685 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4687 *bitpos += adjust;
4688 if (*offset == NULL_TREE)
4689 *offset = size_int (-adjust / BITS_PER_UNIT);
4690 else
4691 *offset
4692 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4693 *bitstart = 0;
4695 else
4696 *bitstart = *bitpos - bitoffset;
4698 *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4701 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4702 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4703 DECL_RTL was not set yet, return NORTL. */
4705 static inline bool
4706 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4708 if (TREE_CODE (addr) != ADDR_EXPR)
4709 return false;
4711 tree base = TREE_OPERAND (addr, 0);
4713 if (!DECL_P (base)
4714 || TREE_ADDRESSABLE (base)
4715 || DECL_MODE (base) == BLKmode)
4716 return false;
4718 if (!DECL_RTL_SET_P (base))
4719 return nortl;
4721 return (!MEM_P (DECL_RTL (base)));
4724 /* Returns true if the MEM_REF REF refers to an object that does not
4725 reside in memory and has non-BLKmode. */
4727 static inline bool
4728 mem_ref_refers_to_non_mem_p (tree ref)
4730 tree base = TREE_OPERAND (ref, 0);
4731 return addr_expr_of_non_mem_decl_p_1 (base, false);
4734 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4735 is true, try generating a nontemporal store. */
4737 void
4738 expand_assignment (tree to, tree from, bool nontemporal)
4740 rtx to_rtx = 0;
4741 rtx result;
4742 machine_mode mode;
4743 unsigned int align;
4744 enum insn_code icode;
4746 /* Don't crash if the lhs of the assignment was erroneous. */
4747 if (TREE_CODE (to) == ERROR_MARK)
4749 expand_normal (from);
4750 return;
4753 /* Optimize away no-op moves without side-effects. */
4754 if (operand_equal_p (to, from, 0))
4755 return;
4757 /* Handle misaligned stores. */
4758 mode = TYPE_MODE (TREE_TYPE (to));
4759 if ((TREE_CODE (to) == MEM_REF
4760 || TREE_CODE (to) == TARGET_MEM_REF)
4761 && mode != BLKmode
4762 && !mem_ref_refers_to_non_mem_p (to)
4763 && ((align = get_object_alignment (to))
4764 < GET_MODE_ALIGNMENT (mode))
4765 && (((icode = optab_handler (movmisalign_optab, mode))
4766 != CODE_FOR_nothing)
4767 || SLOW_UNALIGNED_ACCESS (mode, align)))
4769 rtx reg, mem;
4771 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4772 reg = force_not_mem (reg);
4773 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4775 if (icode != CODE_FOR_nothing)
4777 struct expand_operand ops[2];
4779 create_fixed_operand (&ops[0], mem);
4780 create_input_operand (&ops[1], reg, mode);
4781 /* The movmisalign<mode> pattern cannot fail, else the assignment
4782 would silently be omitted. */
4783 expand_insn (icode, 2, ops);
4785 else
4786 store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg);
4787 return;
4790 /* Assignment of a structure component needs special treatment
4791 if the structure component's rtx is not simply a MEM.
4792 Assignment of an array element at a constant index, and assignment of
4793 an array element in an unaligned packed structure field, has the same
4794 problem. Same for (partially) storing into a non-memory object. */
4795 if (handled_component_p (to)
4796 || (TREE_CODE (to) == MEM_REF
4797 && mem_ref_refers_to_non_mem_p (to))
4798 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4800 machine_mode mode1;
4801 HOST_WIDE_INT bitsize, bitpos;
4802 unsigned HOST_WIDE_INT bitregion_start = 0;
4803 unsigned HOST_WIDE_INT bitregion_end = 0;
4804 tree offset;
4805 int unsignedp;
4806 int volatilep = 0;
4807 tree tem;
4809 push_temp_slots ();
4810 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4811 &unsignedp, &volatilep, true);
4813 /* Make sure bitpos is not negative, it can wreak havoc later. */
4814 if (bitpos < 0)
4816 gcc_assert (offset == NULL_TREE);
4817 offset = size_int (bitpos >> (BITS_PER_UNIT == 8
4818 ? 3 : exact_log2 (BITS_PER_UNIT)));
4819 bitpos &= BITS_PER_UNIT - 1;
4822 if (TREE_CODE (to) == COMPONENT_REF
4823 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4824 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4825 /* The C++ memory model naturally applies to byte-aligned fields.
4826 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4827 BITSIZE are not byte-aligned, there is no need to limit the range
4828 we can access. This can occur with packed structures in Ada. */
4829 else if (bitsize > 0
4830 && bitsize % BITS_PER_UNIT == 0
4831 && bitpos % BITS_PER_UNIT == 0)
4833 bitregion_start = bitpos;
4834 bitregion_end = bitpos + bitsize - 1;
4837 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4839 /* If the field has a mode, we want to access it in the
4840 field's mode, not the computed mode.
4841 If a MEM has VOIDmode (external with incomplete type),
4842 use BLKmode for it instead. */
4843 if (MEM_P (to_rtx))
4845 if (mode1 != VOIDmode)
4846 to_rtx = adjust_address (to_rtx, mode1, 0);
4847 else if (GET_MODE (to_rtx) == VOIDmode)
4848 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4851 if (offset != 0)
4853 machine_mode address_mode;
4854 rtx offset_rtx;
4856 if (!MEM_P (to_rtx))
4858 /* We can get constant negative offsets into arrays with broken
4859 user code. Translate this to a trap instead of ICEing. */
4860 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4861 expand_builtin_trap ();
4862 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4865 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4866 address_mode = get_address_mode (to_rtx);
4867 if (GET_MODE (offset_rtx) != address_mode)
4868 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4870 /* If we have an expression in OFFSET_RTX and a non-zero
4871 byte offset in BITPOS, adding the byte offset before the
4872 OFFSET_RTX results in better intermediate code, which makes
4873 later rtl optimization passes perform better.
4875 We prefer intermediate code like this:
4877 r124:DI=r123:DI+0x18
4878 [r124:DI]=r121:DI
4880 ... instead of ...
4882 r124:DI=r123:DI+0x10
4883 [r124:DI+0x8]=r121:DI
4885 This is only done for aligned data values, as these can
4886 be expected to result in single move instructions. */
4887 if (mode1 != VOIDmode
4888 && bitpos != 0
4889 && bitsize > 0
4890 && (bitpos % bitsize) == 0
4891 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4892 && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
4894 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4895 bitregion_start = 0;
4896 if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos)
4897 bitregion_end -= bitpos;
4898 bitpos = 0;
4901 to_rtx = offset_address (to_rtx, offset_rtx,
4902 highest_pow2_factor_for_target (to,
4903 offset));
4906 /* No action is needed if the target is not a memory and the field
4907 lies completely outside that target. This can occur if the source
4908 code contains an out-of-bounds access to a small array. */
4909 if (!MEM_P (to_rtx)
4910 && GET_MODE (to_rtx) != BLKmode
4911 && (unsigned HOST_WIDE_INT) bitpos
4912 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4914 expand_normal (from);
4915 result = NULL;
4917 /* Handle expand_expr of a complex value returning a CONCAT. */
4918 else if (GET_CODE (to_rtx) == CONCAT)
4920 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4921 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4922 && bitpos == 0
4923 && bitsize == mode_bitsize)
4924 result = store_expr (from, to_rtx, false, nontemporal);
4925 else if (bitsize == mode_bitsize / 2
4926 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4927 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4928 nontemporal);
4929 else if (bitpos + bitsize <= mode_bitsize / 2)
4930 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4931 bitregion_start, bitregion_end,
4932 mode1, from,
4933 get_alias_set (to), nontemporal);
4934 else if (bitpos >= mode_bitsize / 2)
4935 result = store_field (XEXP (to_rtx, 1), bitsize,
4936 bitpos - mode_bitsize / 2,
4937 bitregion_start, bitregion_end,
4938 mode1, from,
4939 get_alias_set (to), nontemporal);
4940 else if (bitpos == 0 && bitsize == mode_bitsize)
4942 rtx from_rtx;
4943 result = expand_normal (from);
4944 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4945 TYPE_MODE (TREE_TYPE (from)), 0);
4946 emit_move_insn (XEXP (to_rtx, 0),
4947 read_complex_part (from_rtx, false));
4948 emit_move_insn (XEXP (to_rtx, 1),
4949 read_complex_part (from_rtx, true));
4951 else
4953 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4954 GET_MODE_SIZE (GET_MODE (to_rtx)));
4955 write_complex_part (temp, XEXP (to_rtx, 0), false);
4956 write_complex_part (temp, XEXP (to_rtx, 1), true);
4957 result = store_field (temp, bitsize, bitpos,
4958 bitregion_start, bitregion_end,
4959 mode1, from,
4960 get_alias_set (to), nontemporal);
4961 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4962 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4965 else
4967 if (MEM_P (to_rtx))
4969 /* If the field is at offset zero, we could have been given the
4970 DECL_RTX of the parent struct. Don't munge it. */
4971 to_rtx = shallow_copy_rtx (to_rtx);
4972 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4973 if (volatilep)
4974 MEM_VOLATILE_P (to_rtx) = 1;
4977 if (optimize_bitfield_assignment_op (bitsize, bitpos,
4978 bitregion_start, bitregion_end,
4979 mode1,
4980 to_rtx, to, from))
4981 result = NULL;
4982 else
4983 result = store_field (to_rtx, bitsize, bitpos,
4984 bitregion_start, bitregion_end,
4985 mode1, from,
4986 get_alias_set (to), nontemporal);
4989 if (result)
4990 preserve_temp_slots (result);
4991 pop_temp_slots ();
4992 return;
4995 /* If the rhs is a function call and its value is not an aggregate,
4996 call the function before we start to compute the lhs.
4997 This is needed for correct code for cases such as
4998 val = setjmp (buf) on machines where reference to val
4999 requires loading up part of an address in a separate insn.
5001 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5002 since it might be a promoted variable where the zero- or sign- extension
5003 needs to be done. Handling this in the normal way is safe because no
5004 computation is done before the call. The same is true for SSA names. */
5005 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
5006 && COMPLETE_TYPE_P (TREE_TYPE (from))
5007 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
5008 && ! (((TREE_CODE (to) == VAR_DECL
5009 || TREE_CODE (to) == PARM_DECL
5010 || TREE_CODE (to) == RESULT_DECL)
5011 && REG_P (DECL_RTL (to)))
5012 || TREE_CODE (to) == SSA_NAME))
5014 rtx value;
5016 push_temp_slots ();
5017 value = expand_normal (from);
5018 if (to_rtx == 0)
5019 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5021 /* Handle calls that return values in multiple non-contiguous locations.
5022 The Irix 6 ABI has examples of this. */
5023 if (GET_CODE (to_rtx) == PARALLEL)
5025 if (GET_CODE (value) == PARALLEL)
5026 emit_group_move (to_rtx, value);
5027 else
5028 emit_group_load (to_rtx, value, TREE_TYPE (from),
5029 int_size_in_bytes (TREE_TYPE (from)));
5031 else if (GET_CODE (value) == PARALLEL)
5032 emit_group_store (to_rtx, value, TREE_TYPE (from),
5033 int_size_in_bytes (TREE_TYPE (from)));
5034 else if (GET_MODE (to_rtx) == BLKmode)
5036 /* Handle calls that return BLKmode values in registers. */
5037 if (REG_P (value))
5038 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5039 else
5040 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5042 else
5044 if (POINTER_TYPE_P (TREE_TYPE (to)))
5045 value = convert_memory_address_addr_space
5046 (GET_MODE (to_rtx), value,
5047 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5049 emit_move_insn (to_rtx, value);
5051 preserve_temp_slots (to_rtx);
5052 pop_temp_slots ();
5053 return;
5056 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5057 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5059 /* Don't move directly into a return register. */
5060 if (TREE_CODE (to) == RESULT_DECL
5061 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5063 rtx temp;
5065 push_temp_slots ();
5067 /* If the source is itself a return value, it still is in a pseudo at
5068 this point so we can move it back to the return register directly. */
5069 if (REG_P (to_rtx)
5070 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5071 && TREE_CODE (from) != CALL_EXPR)
5072 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5073 else
5074 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5076 /* Handle calls that return values in multiple non-contiguous locations.
5077 The Irix 6 ABI has examples of this. */
5078 if (GET_CODE (to_rtx) == PARALLEL)
5080 if (GET_CODE (temp) == PARALLEL)
5081 emit_group_move (to_rtx, temp);
5082 else
5083 emit_group_load (to_rtx, temp, TREE_TYPE (from),
5084 int_size_in_bytes (TREE_TYPE (from)));
5086 else if (temp)
5087 emit_move_insn (to_rtx, temp);
5089 preserve_temp_slots (to_rtx);
5090 pop_temp_slots ();
5091 return;
5094 /* In case we are returning the contents of an object which overlaps
5095 the place the value is being stored, use a safe function when copying
5096 a value through a pointer into a structure value return block. */
5097 if (TREE_CODE (to) == RESULT_DECL
5098 && TREE_CODE (from) == INDIRECT_REF
5099 && ADDR_SPACE_GENERIC_P
5100 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5101 && refs_may_alias_p (to, from)
5102 && cfun->returns_struct
5103 && !cfun->returns_pcc_struct)
5105 rtx from_rtx, size;
5107 push_temp_slots ();
5108 size = expr_size (from);
5109 from_rtx = expand_normal (from);
5111 emit_library_call (memmove_libfunc, LCT_NORMAL,
5112 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
5113 XEXP (from_rtx, 0), Pmode,
5114 convert_to_mode (TYPE_MODE (sizetype),
5115 size, TYPE_UNSIGNED (sizetype)),
5116 TYPE_MODE (sizetype));
5118 preserve_temp_slots (to_rtx);
5119 pop_temp_slots ();
5120 return;
5123 /* Compute FROM and store the value in the rtx we got. */
5125 push_temp_slots ();
5126 result = store_expr (from, to_rtx, 0, nontemporal);
5127 preserve_temp_slots (result);
5128 pop_temp_slots ();
5129 return;
5132 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5133 succeeded, false otherwise. */
5135 bool
5136 emit_storent_insn (rtx to, rtx from)
5138 struct expand_operand ops[2];
5139 machine_mode mode = GET_MODE (to);
5140 enum insn_code code = optab_handler (storent_optab, mode);
5142 if (code == CODE_FOR_nothing)
5143 return false;
5145 create_fixed_operand (&ops[0], to);
5146 create_input_operand (&ops[1], from, mode);
5147 return maybe_expand_insn (code, 2, ops);
5150 /* Generate code for computing expression EXP,
5151 and storing the value into TARGET.
5153 If the mode is BLKmode then we may return TARGET itself.
5154 It turns out that in BLKmode it doesn't cause a problem.
5155 because C has no operators that could combine two different
5156 assignments into the same BLKmode object with different values
5157 with no sequence point. Will other languages need this to
5158 be more thorough?
5160 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5161 stack, and block moves may need to be treated specially.
5163 If NONTEMPORAL is true, try using a nontemporal store instruction. */
5166 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
5168 rtx temp;
5169 rtx alt_rtl = NULL_RTX;
5170 location_t loc = curr_insn_location ();
5172 if (VOID_TYPE_P (TREE_TYPE (exp)))
5174 /* C++ can generate ?: expressions with a throw expression in one
5175 branch and an rvalue in the other. Here, we resolve attempts to
5176 store the throw expression's nonexistent result. */
5177 gcc_assert (!call_param_p);
5178 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5179 return NULL_RTX;
5181 if (TREE_CODE (exp) == COMPOUND_EXPR)
5183 /* Perform first part of compound expression, then assign from second
5184 part. */
5185 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5186 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5187 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5188 nontemporal);
5190 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5192 /* For conditional expression, get safe form of the target. Then
5193 test the condition, doing the appropriate assignment on either
5194 side. This avoids the creation of unnecessary temporaries.
5195 For non-BLKmode, it is more efficient not to do this. */
5197 rtx_code_label *lab1 = gen_label_rtx (), *lab2 = gen_label_rtx ();
5199 do_pending_stack_adjust ();
5200 NO_DEFER_POP;
5201 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5202 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5203 nontemporal);
5204 emit_jump_insn (gen_jump (lab2));
5205 emit_barrier ();
5206 emit_label (lab1);
5207 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
5208 nontemporal);
5209 emit_label (lab2);
5210 OK_DEFER_POP;
5212 return NULL_RTX;
5214 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5215 /* If this is a scalar in a register that is stored in a wider mode
5216 than the declared mode, compute the result into its declared mode
5217 and then convert to the wider mode. Our value is the computed
5218 expression. */
5220 rtx inner_target = 0;
5222 /* We can do the conversion inside EXP, which will often result
5223 in some optimizations. Do the conversion in two steps: first
5224 change the signedness, if needed, then the extend. But don't
5225 do this if the type of EXP is a subtype of something else
5226 since then the conversion might involve more than just
5227 converting modes. */
5228 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5229 && TREE_TYPE (TREE_TYPE (exp)) == 0
5230 && GET_MODE_PRECISION (GET_MODE (target))
5231 == TYPE_PRECISION (TREE_TYPE (exp)))
5233 if (!SUBREG_CHECK_PROMOTED_SIGN (target,
5234 TYPE_UNSIGNED (TREE_TYPE (exp))))
5236 /* Some types, e.g. Fortran's logical*4, won't have a signed
5237 version, so use the mode instead. */
5238 tree ntype
5239 = (signed_or_unsigned_type_for
5240 (SUBREG_PROMOTED_SIGN (target), TREE_TYPE (exp)));
5241 if (ntype == NULL)
5242 ntype = lang_hooks.types.type_for_mode
5243 (TYPE_MODE (TREE_TYPE (exp)),
5244 SUBREG_PROMOTED_SIGN (target));
5246 exp = fold_convert_loc (loc, ntype, exp);
5249 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5250 (GET_MODE (SUBREG_REG (target)),
5251 SUBREG_PROMOTED_SIGN (target)),
5252 exp);
5254 inner_target = SUBREG_REG (target);
5257 temp = expand_expr (exp, inner_target, VOIDmode,
5258 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5260 /* If TEMP is a VOIDmode constant, use convert_modes to make
5261 sure that we properly convert it. */
5262 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5264 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5265 temp, SUBREG_PROMOTED_SIGN (target));
5266 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5267 GET_MODE (target), temp,
5268 SUBREG_PROMOTED_SIGN (target));
5271 convert_move (SUBREG_REG (target), temp,
5272 SUBREG_PROMOTED_SIGN (target));
5274 return NULL_RTX;
5276 else if ((TREE_CODE (exp) == STRING_CST
5277 || (TREE_CODE (exp) == MEM_REF
5278 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5279 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5280 == STRING_CST
5281 && integer_zerop (TREE_OPERAND (exp, 1))))
5282 && !nontemporal && !call_param_p
5283 && MEM_P (target))
5285 /* Optimize initialization of an array with a STRING_CST. */
5286 HOST_WIDE_INT exp_len, str_copy_len;
5287 rtx dest_mem;
5288 tree str = TREE_CODE (exp) == STRING_CST
5289 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5291 exp_len = int_expr_size (exp);
5292 if (exp_len <= 0)
5293 goto normal_expr;
5295 if (TREE_STRING_LENGTH (str) <= 0)
5296 goto normal_expr;
5298 str_copy_len = strlen (TREE_STRING_POINTER (str));
5299 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5300 goto normal_expr;
5302 str_copy_len = TREE_STRING_LENGTH (str);
5303 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5304 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5306 str_copy_len += STORE_MAX_PIECES - 1;
5307 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5309 str_copy_len = MIN (str_copy_len, exp_len);
5310 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5311 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5312 MEM_ALIGN (target), false))
5313 goto normal_expr;
5315 dest_mem = target;
5317 dest_mem = store_by_pieces (dest_mem,
5318 str_copy_len, builtin_strncpy_read_str,
5319 CONST_CAST (char *,
5320 TREE_STRING_POINTER (str)),
5321 MEM_ALIGN (target), false,
5322 exp_len > str_copy_len ? 1 : 0);
5323 if (exp_len > str_copy_len)
5324 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5325 GEN_INT (exp_len - str_copy_len),
5326 BLOCK_OP_NORMAL);
5327 return NULL_RTX;
5329 else
5331 rtx tmp_target;
5333 normal_expr:
5334 /* If we want to use a nontemporal store, force the value to
5335 register first. */
5336 tmp_target = nontemporal ? NULL_RTX : target;
5337 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5338 (call_param_p
5339 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5340 &alt_rtl, false);
5343 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5344 the same as that of TARGET, adjust the constant. This is needed, for
5345 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5346 only a word-sized value. */
5347 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5348 && TREE_CODE (exp) != ERROR_MARK
5349 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5350 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5351 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5353 /* If value was not generated in the target, store it there.
5354 Convert the value to TARGET's type first if necessary and emit the
5355 pending incrementations that have been queued when expanding EXP.
5356 Note that we cannot emit the whole queue blindly because this will
5357 effectively disable the POST_INC optimization later.
5359 If TEMP and TARGET compare equal according to rtx_equal_p, but
5360 one or both of them are volatile memory refs, we have to distinguish
5361 two cases:
5362 - expand_expr has used TARGET. In this case, we must not generate
5363 another copy. This can be detected by TARGET being equal according
5364 to == .
5365 - expand_expr has not used TARGET - that means that the source just
5366 happens to have the same RTX form. Since temp will have been created
5367 by expand_expr, it will compare unequal according to == .
5368 We must generate a copy in this case, to reach the correct number
5369 of volatile memory references. */
5371 if ((! rtx_equal_p (temp, target)
5372 || (temp != target && (side_effects_p (temp)
5373 || side_effects_p (target))))
5374 && TREE_CODE (exp) != ERROR_MARK
5375 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5376 but TARGET is not valid memory reference, TEMP will differ
5377 from TARGET although it is really the same location. */
5378 && !(alt_rtl
5379 && rtx_equal_p (alt_rtl, target)
5380 && !side_effects_p (alt_rtl)
5381 && !side_effects_p (target))
5382 /* If there's nothing to copy, don't bother. Don't call
5383 expr_size unless necessary, because some front-ends (C++)
5384 expr_size-hook must not be given objects that are not
5385 supposed to be bit-copied or bit-initialized. */
5386 && expr_size (exp) != const0_rtx)
5388 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5390 if (GET_MODE (target) == BLKmode)
5392 /* Handle calls that return BLKmode values in registers. */
5393 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5394 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5395 else
5396 store_bit_field (target,
5397 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5398 0, 0, 0, GET_MODE (temp), temp);
5400 else
5401 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5404 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5406 /* Handle copying a string constant into an array. The string
5407 constant may be shorter than the array. So copy just the string's
5408 actual length, and clear the rest. First get the size of the data
5409 type of the string, which is actually the size of the target. */
5410 rtx size = expr_size (exp);
5412 if (CONST_INT_P (size)
5413 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5414 emit_block_move (target, temp, size,
5415 (call_param_p
5416 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5417 else
5419 machine_mode pointer_mode
5420 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5421 machine_mode address_mode = get_address_mode (target);
5423 /* Compute the size of the data to copy from the string. */
5424 tree copy_size
5425 = size_binop_loc (loc, MIN_EXPR,
5426 make_tree (sizetype, size),
5427 size_int (TREE_STRING_LENGTH (exp)));
5428 rtx copy_size_rtx
5429 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5430 (call_param_p
5431 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5432 rtx_code_label *label = 0;
5434 /* Copy that much. */
5435 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5436 TYPE_UNSIGNED (sizetype));
5437 emit_block_move (target, temp, copy_size_rtx,
5438 (call_param_p
5439 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5441 /* Figure out how much is left in TARGET that we have to clear.
5442 Do all calculations in pointer_mode. */
5443 if (CONST_INT_P (copy_size_rtx))
5445 size = plus_constant (address_mode, size,
5446 -INTVAL (copy_size_rtx));
5447 target = adjust_address (target, BLKmode,
5448 INTVAL (copy_size_rtx));
5450 else
5452 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5453 copy_size_rtx, NULL_RTX, 0,
5454 OPTAB_LIB_WIDEN);
5456 if (GET_MODE (copy_size_rtx) != address_mode)
5457 copy_size_rtx = convert_to_mode (address_mode,
5458 copy_size_rtx,
5459 TYPE_UNSIGNED (sizetype));
5461 target = offset_address (target, copy_size_rtx,
5462 highest_pow2_factor (copy_size));
5463 label = gen_label_rtx ();
5464 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5465 GET_MODE (size), 0, label);
5468 if (size != const0_rtx)
5469 clear_storage (target, size, BLOCK_OP_NORMAL);
5471 if (label)
5472 emit_label (label);
5475 /* Handle calls that return values in multiple non-contiguous locations.
5476 The Irix 6 ABI has examples of this. */
5477 else if (GET_CODE (target) == PARALLEL)
5479 if (GET_CODE (temp) == PARALLEL)
5480 emit_group_move (target, temp);
5481 else
5482 emit_group_load (target, temp, TREE_TYPE (exp),
5483 int_size_in_bytes (TREE_TYPE (exp)));
5485 else if (GET_CODE (temp) == PARALLEL)
5486 emit_group_store (target, temp, TREE_TYPE (exp),
5487 int_size_in_bytes (TREE_TYPE (exp)));
5488 else if (GET_MODE (temp) == BLKmode)
5489 emit_block_move (target, temp, expr_size (exp),
5490 (call_param_p
5491 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5492 /* If we emit a nontemporal store, there is nothing else to do. */
5493 else if (nontemporal && emit_storent_insn (target, temp))
5495 else
5497 temp = force_operand (temp, target);
5498 if (temp != target)
5499 emit_move_insn (target, temp);
5503 return NULL_RTX;
5506 /* Return true if field F of structure TYPE is a flexible array. */
5508 static bool
5509 flexible_array_member_p (const_tree f, const_tree type)
5511 const_tree tf;
5513 tf = TREE_TYPE (f);
5514 return (DECL_CHAIN (f) == NULL
5515 && TREE_CODE (tf) == ARRAY_TYPE
5516 && TYPE_DOMAIN (tf)
5517 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5518 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5519 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5520 && int_size_in_bytes (type) >= 0);
5523 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5524 must have in order for it to completely initialize a value of type TYPE.
5525 Return -1 if the number isn't known.
5527 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5529 static HOST_WIDE_INT
5530 count_type_elements (const_tree type, bool for_ctor_p)
5532 switch (TREE_CODE (type))
5534 case ARRAY_TYPE:
5536 tree nelts;
5538 nelts = array_type_nelts (type);
5539 if (nelts && tree_fits_uhwi_p (nelts))
5541 unsigned HOST_WIDE_INT n;
5543 n = tree_to_uhwi (nelts) + 1;
5544 if (n == 0 || for_ctor_p)
5545 return n;
5546 else
5547 return n * count_type_elements (TREE_TYPE (type), false);
5549 return for_ctor_p ? -1 : 1;
5552 case RECORD_TYPE:
5554 unsigned HOST_WIDE_INT n;
5555 tree f;
5557 n = 0;
5558 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5559 if (TREE_CODE (f) == FIELD_DECL)
5561 if (!for_ctor_p)
5562 n += count_type_elements (TREE_TYPE (f), false);
5563 else if (!flexible_array_member_p (f, type))
5564 /* Don't count flexible arrays, which are not supposed
5565 to be initialized. */
5566 n += 1;
5569 return n;
5572 case UNION_TYPE:
5573 case QUAL_UNION_TYPE:
5575 tree f;
5576 HOST_WIDE_INT n, m;
5578 gcc_assert (!for_ctor_p);
5579 /* Estimate the number of scalars in each field and pick the
5580 maximum. Other estimates would do instead; the idea is simply
5581 to make sure that the estimate is not sensitive to the ordering
5582 of the fields. */
5583 n = 1;
5584 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5585 if (TREE_CODE (f) == FIELD_DECL)
5587 m = count_type_elements (TREE_TYPE (f), false);
5588 /* If the field doesn't span the whole union, add an extra
5589 scalar for the rest. */
5590 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5591 TYPE_SIZE (type)) != 1)
5592 m++;
5593 if (n < m)
5594 n = m;
5596 return n;
5599 case COMPLEX_TYPE:
5600 return 2;
5602 case VECTOR_TYPE:
5603 return TYPE_VECTOR_SUBPARTS (type);
5605 case INTEGER_TYPE:
5606 case REAL_TYPE:
5607 case FIXED_POINT_TYPE:
5608 case ENUMERAL_TYPE:
5609 case BOOLEAN_TYPE:
5610 case POINTER_TYPE:
5611 case OFFSET_TYPE:
5612 case REFERENCE_TYPE:
5613 case NULLPTR_TYPE:
5614 return 1;
5616 case ERROR_MARK:
5617 return 0;
5619 case VOID_TYPE:
5620 case METHOD_TYPE:
5621 case FUNCTION_TYPE:
5622 case LANG_TYPE:
5623 default:
5624 gcc_unreachable ();
5628 /* Helper for categorize_ctor_elements. Identical interface. */
5630 static bool
5631 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5632 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5634 unsigned HOST_WIDE_INT idx;
5635 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5636 tree value, purpose, elt_type;
5638 /* Whether CTOR is a valid constant initializer, in accordance with what
5639 initializer_constant_valid_p does. If inferred from the constructor
5640 elements, true until proven otherwise. */
5641 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5642 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5644 nz_elts = 0;
5645 init_elts = 0;
5646 num_fields = 0;
5647 elt_type = NULL_TREE;
5649 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5651 HOST_WIDE_INT mult = 1;
5653 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5655 tree lo_index = TREE_OPERAND (purpose, 0);
5656 tree hi_index = TREE_OPERAND (purpose, 1);
5658 if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
5659 mult = (tree_to_uhwi (hi_index)
5660 - tree_to_uhwi (lo_index) + 1);
5662 num_fields += mult;
5663 elt_type = TREE_TYPE (value);
5665 switch (TREE_CODE (value))
5667 case CONSTRUCTOR:
5669 HOST_WIDE_INT nz = 0, ic = 0;
5671 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5672 p_complete);
5674 nz_elts += mult * nz;
5675 init_elts += mult * ic;
5677 if (const_from_elts_p && const_p)
5678 const_p = const_elt_p;
5680 break;
5682 case INTEGER_CST:
5683 case REAL_CST:
5684 case FIXED_CST:
5685 if (!initializer_zerop (value))
5686 nz_elts += mult;
5687 init_elts += mult;
5688 break;
5690 case STRING_CST:
5691 nz_elts += mult * TREE_STRING_LENGTH (value);
5692 init_elts += mult * TREE_STRING_LENGTH (value);
5693 break;
5695 case COMPLEX_CST:
5696 if (!initializer_zerop (TREE_REALPART (value)))
5697 nz_elts += mult;
5698 if (!initializer_zerop (TREE_IMAGPART (value)))
5699 nz_elts += mult;
5700 init_elts += mult;
5701 break;
5703 case VECTOR_CST:
5705 unsigned i;
5706 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5708 tree v = VECTOR_CST_ELT (value, i);
5709 if (!initializer_zerop (v))
5710 nz_elts += mult;
5711 init_elts += mult;
5714 break;
5716 default:
5718 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5719 nz_elts += mult * tc;
5720 init_elts += mult * tc;
5722 if (const_from_elts_p && const_p)
5723 const_p = initializer_constant_valid_p (value, elt_type)
5724 != NULL_TREE;
5726 break;
5730 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5731 num_fields, elt_type))
5732 *p_complete = false;
5734 *p_nz_elts += nz_elts;
5735 *p_init_elts += init_elts;
5737 return const_p;
5740 /* Examine CTOR to discover:
5741 * how many scalar fields are set to nonzero values,
5742 and place it in *P_NZ_ELTS;
5743 * how many scalar fields in total are in CTOR,
5744 and place it in *P_ELT_COUNT.
5745 * whether the constructor is complete -- in the sense that every
5746 meaningful byte is explicitly given a value --
5747 and place it in *P_COMPLETE.
5749 Return whether or not CTOR is a valid static constant initializer, the same
5750 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5752 bool
5753 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5754 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5756 *p_nz_elts = 0;
5757 *p_init_elts = 0;
5758 *p_complete = true;
5760 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5763 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5764 of which had type LAST_TYPE. Each element was itself a complete
5765 initializer, in the sense that every meaningful byte was explicitly
5766 given a value. Return true if the same is true for the constructor
5767 as a whole. */
5769 bool
5770 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5771 const_tree last_type)
5773 if (TREE_CODE (type) == UNION_TYPE
5774 || TREE_CODE (type) == QUAL_UNION_TYPE)
5776 if (num_elts == 0)
5777 return false;
5779 gcc_assert (num_elts == 1 && last_type);
5781 /* ??? We could look at each element of the union, and find the
5782 largest element. Which would avoid comparing the size of the
5783 initialized element against any tail padding in the union.
5784 Doesn't seem worth the effort... */
5785 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5788 return count_type_elements (type, true) == num_elts;
5791 /* Return 1 if EXP contains mostly (3/4) zeros. */
5793 static int
5794 mostly_zeros_p (const_tree exp)
5796 if (TREE_CODE (exp) == CONSTRUCTOR)
5798 HOST_WIDE_INT nz_elts, init_elts;
5799 bool complete_p;
5801 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5802 return !complete_p || nz_elts < init_elts / 4;
5805 return initializer_zerop (exp);
5808 /* Return 1 if EXP contains all zeros. */
5810 static int
5811 all_zeros_p (const_tree exp)
5813 if (TREE_CODE (exp) == CONSTRUCTOR)
5815 HOST_WIDE_INT nz_elts, init_elts;
5816 bool complete_p;
5818 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5819 return nz_elts == 0;
5822 return initializer_zerop (exp);
5825 /* Helper function for store_constructor.
5826 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5827 CLEARED is as for store_constructor.
5828 ALIAS_SET is the alias set to use for any stores.
5830 This provides a recursive shortcut back to store_constructor when it isn't
5831 necessary to go through store_field. This is so that we can pass through
5832 the cleared field to let store_constructor know that we may not have to
5833 clear a substructure if the outer structure has already been cleared. */
5835 static void
5836 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5837 HOST_WIDE_INT bitpos, machine_mode mode,
5838 tree exp, int cleared, alias_set_type alias_set)
5840 if (TREE_CODE (exp) == CONSTRUCTOR
5841 /* We can only call store_constructor recursively if the size and
5842 bit position are on a byte boundary. */
5843 && bitpos % BITS_PER_UNIT == 0
5844 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5845 /* If we have a nonzero bitpos for a register target, then we just
5846 let store_field do the bitfield handling. This is unlikely to
5847 generate unnecessary clear instructions anyways. */
5848 && (bitpos == 0 || MEM_P (target)))
5850 if (MEM_P (target))
5851 target
5852 = adjust_address (target,
5853 GET_MODE (target) == BLKmode
5854 || 0 != (bitpos
5855 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5856 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5859 /* Update the alias set, if required. */
5860 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5861 && MEM_ALIAS_SET (target) != 0)
5863 target = copy_rtx (target);
5864 set_mem_alias_set (target, alias_set);
5867 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5869 else
5870 store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false);
5874 /* Returns the number of FIELD_DECLs in TYPE. */
5876 static int
5877 fields_length (const_tree type)
5879 tree t = TYPE_FIELDS (type);
5880 int count = 0;
5882 for (; t; t = DECL_CHAIN (t))
5883 if (TREE_CODE (t) == FIELD_DECL)
5884 ++count;
5886 return count;
5890 /* Store the value of constructor EXP into the rtx TARGET.
5891 TARGET is either a REG or a MEM; we know it cannot conflict, since
5892 safe_from_p has been called.
5893 CLEARED is true if TARGET is known to have been zero'd.
5894 SIZE is the number of bytes of TARGET we are allowed to modify: this
5895 may not be the same as the size of EXP if we are assigning to a field
5896 which has been packed to exclude padding bits. */
5898 static void
5899 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5901 tree type = TREE_TYPE (exp);
5902 #ifdef WORD_REGISTER_OPERATIONS
5903 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5904 #endif
5906 switch (TREE_CODE (type))
5908 case RECORD_TYPE:
5909 case UNION_TYPE:
5910 case QUAL_UNION_TYPE:
5912 unsigned HOST_WIDE_INT idx;
5913 tree field, value;
5915 /* If size is zero or the target is already cleared, do nothing. */
5916 if (size == 0 || cleared)
5917 cleared = 1;
5918 /* We either clear the aggregate or indicate the value is dead. */
5919 else if ((TREE_CODE (type) == UNION_TYPE
5920 || TREE_CODE (type) == QUAL_UNION_TYPE)
5921 && ! CONSTRUCTOR_ELTS (exp))
5922 /* If the constructor is empty, clear the union. */
5924 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5925 cleared = 1;
5928 /* If we are building a static constructor into a register,
5929 set the initial value as zero so we can fold the value into
5930 a constant. But if more than one register is involved,
5931 this probably loses. */
5932 else if (REG_P (target) && TREE_STATIC (exp)
5933 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5935 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5936 cleared = 1;
5939 /* If the constructor has fewer fields than the structure or
5940 if we are initializing the structure to mostly zeros, clear
5941 the whole structure first. Don't do this if TARGET is a
5942 register whose mode size isn't equal to SIZE since
5943 clear_storage can't handle this case. */
5944 else if (size > 0
5945 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
5946 != fields_length (type))
5947 || mostly_zeros_p (exp))
5948 && (!REG_P (target)
5949 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5950 == size)))
5952 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5953 cleared = 1;
5956 if (REG_P (target) && !cleared)
5957 emit_clobber (target);
5959 /* Store each element of the constructor into the
5960 corresponding field of TARGET. */
5961 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5963 machine_mode mode;
5964 HOST_WIDE_INT bitsize;
5965 HOST_WIDE_INT bitpos = 0;
5966 tree offset;
5967 rtx to_rtx = target;
5969 /* Just ignore missing fields. We cleared the whole
5970 structure, above, if any fields are missing. */
5971 if (field == 0)
5972 continue;
5974 if (cleared && initializer_zerop (value))
5975 continue;
5977 if (tree_fits_uhwi_p (DECL_SIZE (field)))
5978 bitsize = tree_to_uhwi (DECL_SIZE (field));
5979 else
5980 bitsize = -1;
5982 mode = DECL_MODE (field);
5983 if (DECL_BIT_FIELD (field))
5984 mode = VOIDmode;
5986 offset = DECL_FIELD_OFFSET (field);
5987 if (tree_fits_shwi_p (offset)
5988 && tree_fits_shwi_p (bit_position (field)))
5990 bitpos = int_bit_position (field);
5991 offset = 0;
5993 else
5994 bitpos = tree_to_shwi (DECL_FIELD_BIT_OFFSET (field));
5996 if (offset)
5998 machine_mode address_mode;
5999 rtx offset_rtx;
6001 offset
6002 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
6003 make_tree (TREE_TYPE (exp),
6004 target));
6006 offset_rtx = expand_normal (offset);
6007 gcc_assert (MEM_P (to_rtx));
6009 address_mode = get_address_mode (to_rtx);
6010 if (GET_MODE (offset_rtx) != address_mode)
6011 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
6013 to_rtx = offset_address (to_rtx, offset_rtx,
6014 highest_pow2_factor (offset));
6017 #ifdef WORD_REGISTER_OPERATIONS
6018 /* If this initializes a field that is smaller than a
6019 word, at the start of a word, try to widen it to a full
6020 word. This special case allows us to output C++ member
6021 function initializations in a form that the optimizers
6022 can understand. */
6023 if (REG_P (target)
6024 && bitsize < BITS_PER_WORD
6025 && bitpos % BITS_PER_WORD == 0
6026 && GET_MODE_CLASS (mode) == MODE_INT
6027 && TREE_CODE (value) == INTEGER_CST
6028 && exp_size >= 0
6029 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6031 tree type = TREE_TYPE (value);
6033 if (TYPE_PRECISION (type) < BITS_PER_WORD)
6035 type = lang_hooks.types.type_for_mode
6036 (word_mode, TYPE_UNSIGNED (type));
6037 value = fold_convert (type, value);
6040 if (BYTES_BIG_ENDIAN)
6041 value
6042 = fold_build2 (LSHIFT_EXPR, type, value,
6043 build_int_cst (type,
6044 BITS_PER_WORD - bitsize));
6045 bitsize = BITS_PER_WORD;
6046 mode = word_mode;
6048 #endif
6050 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6051 && DECL_NONADDRESSABLE_P (field))
6053 to_rtx = copy_rtx (to_rtx);
6054 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6057 store_constructor_field (to_rtx, bitsize, bitpos, mode,
6058 value, cleared,
6059 get_alias_set (TREE_TYPE (field)));
6061 break;
6063 case ARRAY_TYPE:
6065 tree value, index;
6066 unsigned HOST_WIDE_INT i;
6067 int need_to_clear;
6068 tree domain;
6069 tree elttype = TREE_TYPE (type);
6070 int const_bounds_p;
6071 HOST_WIDE_INT minelt = 0;
6072 HOST_WIDE_INT maxelt = 0;
6074 domain = TYPE_DOMAIN (type);
6075 const_bounds_p = (TYPE_MIN_VALUE (domain)
6076 && TYPE_MAX_VALUE (domain)
6077 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6078 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6080 /* If we have constant bounds for the range of the type, get them. */
6081 if (const_bounds_p)
6083 minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6084 maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6087 /* If the constructor has fewer elements than the array, clear
6088 the whole array first. Similarly if this is static
6089 constructor of a non-BLKmode object. */
6090 if (cleared)
6091 need_to_clear = 0;
6092 else if (REG_P (target) && TREE_STATIC (exp))
6093 need_to_clear = 1;
6094 else
6096 unsigned HOST_WIDE_INT idx;
6097 tree index, value;
6098 HOST_WIDE_INT count = 0, zero_count = 0;
6099 need_to_clear = ! const_bounds_p;
6101 /* This loop is a more accurate version of the loop in
6102 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6103 is also needed to check for missing elements. */
6104 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6106 HOST_WIDE_INT this_node_count;
6108 if (need_to_clear)
6109 break;
6111 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6113 tree lo_index = TREE_OPERAND (index, 0);
6114 tree hi_index = TREE_OPERAND (index, 1);
6116 if (! tree_fits_uhwi_p (lo_index)
6117 || ! tree_fits_uhwi_p (hi_index))
6119 need_to_clear = 1;
6120 break;
6123 this_node_count = (tree_to_uhwi (hi_index)
6124 - tree_to_uhwi (lo_index) + 1);
6126 else
6127 this_node_count = 1;
6129 count += this_node_count;
6130 if (mostly_zeros_p (value))
6131 zero_count += this_node_count;
6134 /* Clear the entire array first if there are any missing
6135 elements, or if the incidence of zero elements is >=
6136 75%. */
6137 if (! need_to_clear
6138 && (count < maxelt - minelt + 1
6139 || 4 * zero_count >= 3 * count))
6140 need_to_clear = 1;
6143 if (need_to_clear && size > 0)
6145 if (REG_P (target))
6146 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6147 else
6148 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6149 cleared = 1;
6152 if (!cleared && REG_P (target))
6153 /* Inform later passes that the old value is dead. */
6154 emit_clobber (target);
6156 /* Store each element of the constructor into the
6157 corresponding element of TARGET, determined by counting the
6158 elements. */
6159 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6161 machine_mode mode;
6162 HOST_WIDE_INT bitsize;
6163 HOST_WIDE_INT bitpos;
6164 rtx xtarget = target;
6166 if (cleared && initializer_zerop (value))
6167 continue;
6169 mode = TYPE_MODE (elttype);
6170 if (mode == BLKmode)
6171 bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6172 ? tree_to_uhwi (TYPE_SIZE (elttype))
6173 : -1);
6174 else
6175 bitsize = GET_MODE_BITSIZE (mode);
6177 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6179 tree lo_index = TREE_OPERAND (index, 0);
6180 tree hi_index = TREE_OPERAND (index, 1);
6181 rtx index_r, pos_rtx;
6182 HOST_WIDE_INT lo, hi, count;
6183 tree position;
6185 /* If the range is constant and "small", unroll the loop. */
6186 if (const_bounds_p
6187 && tree_fits_shwi_p (lo_index)
6188 && tree_fits_shwi_p (hi_index)
6189 && (lo = tree_to_shwi (lo_index),
6190 hi = tree_to_shwi (hi_index),
6191 count = hi - lo + 1,
6192 (!MEM_P (target)
6193 || count <= 2
6194 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6195 && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6196 <= 40 * 8)))))
6198 lo -= minelt; hi -= minelt;
6199 for (; lo <= hi; lo++)
6201 bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6203 if (MEM_P (target)
6204 && !MEM_KEEP_ALIAS_SET_P (target)
6205 && TREE_CODE (type) == ARRAY_TYPE
6206 && TYPE_NONALIASED_COMPONENT (type))
6208 target = copy_rtx (target);
6209 MEM_KEEP_ALIAS_SET_P (target) = 1;
6212 store_constructor_field
6213 (target, bitsize, bitpos, mode, value, cleared,
6214 get_alias_set (elttype));
6217 else
6219 rtx_code_label *loop_start = gen_label_rtx ();
6220 rtx_code_label *loop_end = gen_label_rtx ();
6221 tree exit_cond;
6223 expand_normal (hi_index);
6225 index = build_decl (EXPR_LOCATION (exp),
6226 VAR_DECL, NULL_TREE, domain);
6227 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6228 SET_DECL_RTL (index, index_r);
6229 store_expr (lo_index, index_r, 0, false);
6231 /* Build the head of the loop. */
6232 do_pending_stack_adjust ();
6233 emit_label (loop_start);
6235 /* Assign value to element index. */
6236 position =
6237 fold_convert (ssizetype,
6238 fold_build2 (MINUS_EXPR,
6239 TREE_TYPE (index),
6240 index,
6241 TYPE_MIN_VALUE (domain)));
6243 position =
6244 size_binop (MULT_EXPR, position,
6245 fold_convert (ssizetype,
6246 TYPE_SIZE_UNIT (elttype)));
6248 pos_rtx = expand_normal (position);
6249 xtarget = offset_address (target, pos_rtx,
6250 highest_pow2_factor (position));
6251 xtarget = adjust_address (xtarget, mode, 0);
6252 if (TREE_CODE (value) == CONSTRUCTOR)
6253 store_constructor (value, xtarget, cleared,
6254 bitsize / BITS_PER_UNIT);
6255 else
6256 store_expr (value, xtarget, 0, false);
6258 /* Generate a conditional jump to exit the loop. */
6259 exit_cond = build2 (LT_EXPR, integer_type_node,
6260 index, hi_index);
6261 jumpif (exit_cond, loop_end, -1);
6263 /* Update the loop counter, and jump to the head of
6264 the loop. */
6265 expand_assignment (index,
6266 build2 (PLUS_EXPR, TREE_TYPE (index),
6267 index, integer_one_node),
6268 false);
6270 emit_jump (loop_start);
6272 /* Build the end of the loop. */
6273 emit_label (loop_end);
6276 else if ((index != 0 && ! tree_fits_shwi_p (index))
6277 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6279 tree position;
6281 if (index == 0)
6282 index = ssize_int (1);
6284 if (minelt)
6285 index = fold_convert (ssizetype,
6286 fold_build2 (MINUS_EXPR,
6287 TREE_TYPE (index),
6288 index,
6289 TYPE_MIN_VALUE (domain)));
6291 position =
6292 size_binop (MULT_EXPR, index,
6293 fold_convert (ssizetype,
6294 TYPE_SIZE_UNIT (elttype)));
6295 xtarget = offset_address (target,
6296 expand_normal (position),
6297 highest_pow2_factor (position));
6298 xtarget = adjust_address (xtarget, mode, 0);
6299 store_expr (value, xtarget, 0, false);
6301 else
6303 if (index != 0)
6304 bitpos = ((tree_to_shwi (index) - minelt)
6305 * tree_to_uhwi (TYPE_SIZE (elttype)));
6306 else
6307 bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6309 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6310 && TREE_CODE (type) == ARRAY_TYPE
6311 && TYPE_NONALIASED_COMPONENT (type))
6313 target = copy_rtx (target);
6314 MEM_KEEP_ALIAS_SET_P (target) = 1;
6316 store_constructor_field (target, bitsize, bitpos, mode, value,
6317 cleared, get_alias_set (elttype));
6320 break;
6323 case VECTOR_TYPE:
6325 unsigned HOST_WIDE_INT idx;
6326 constructor_elt *ce;
6327 int i;
6328 int need_to_clear;
6329 int icode = CODE_FOR_nothing;
6330 tree elttype = TREE_TYPE (type);
6331 int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6332 machine_mode eltmode = TYPE_MODE (elttype);
6333 HOST_WIDE_INT bitsize;
6334 HOST_WIDE_INT bitpos;
6335 rtvec vector = NULL;
6336 unsigned n_elts;
6337 alias_set_type alias;
6339 gcc_assert (eltmode != BLKmode);
6341 n_elts = TYPE_VECTOR_SUBPARTS (type);
6342 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6344 machine_mode mode = GET_MODE (target);
6346 icode = (int) optab_handler (vec_init_optab, mode);
6347 /* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
6348 if (icode != CODE_FOR_nothing)
6350 tree value;
6352 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6353 if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
6355 icode = CODE_FOR_nothing;
6356 break;
6359 if (icode != CODE_FOR_nothing)
6361 unsigned int i;
6363 vector = rtvec_alloc (n_elts);
6364 for (i = 0; i < n_elts; i++)
6365 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6369 /* If the constructor has fewer elements than the vector,
6370 clear the whole array first. Similarly if this is static
6371 constructor of a non-BLKmode object. */
6372 if (cleared)
6373 need_to_clear = 0;
6374 else if (REG_P (target) && TREE_STATIC (exp))
6375 need_to_clear = 1;
6376 else
6378 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6379 tree value;
6381 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6383 int n_elts_here = tree_to_uhwi
6384 (int_const_binop (TRUNC_DIV_EXPR,
6385 TYPE_SIZE (TREE_TYPE (value)),
6386 TYPE_SIZE (elttype)));
6388 count += n_elts_here;
6389 if (mostly_zeros_p (value))
6390 zero_count += n_elts_here;
6393 /* Clear the entire vector first if there are any missing elements,
6394 or if the incidence of zero elements is >= 75%. */
6395 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6398 if (need_to_clear && size > 0 && !vector)
6400 if (REG_P (target))
6401 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6402 else
6403 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6404 cleared = 1;
6407 /* Inform later passes that the old value is dead. */
6408 if (!cleared && !vector && REG_P (target))
6409 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6411 if (MEM_P (target))
6412 alias = MEM_ALIAS_SET (target);
6413 else
6414 alias = get_alias_set (elttype);
6416 /* Store each element of the constructor into the corresponding
6417 element of TARGET, determined by counting the elements. */
6418 for (idx = 0, i = 0;
6419 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6420 idx++, i += bitsize / elt_size)
6422 HOST_WIDE_INT eltpos;
6423 tree value = ce->value;
6425 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6426 if (cleared && initializer_zerop (value))
6427 continue;
6429 if (ce->index)
6430 eltpos = tree_to_uhwi (ce->index);
6431 else
6432 eltpos = i;
6434 if (vector)
6436 /* vec_init<mode> should not be used if there are VECTOR_TYPE
6437 elements. */
6438 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6439 RTVEC_ELT (vector, eltpos)
6440 = expand_normal (value);
6442 else
6444 machine_mode value_mode =
6445 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6446 ? TYPE_MODE (TREE_TYPE (value))
6447 : eltmode;
6448 bitpos = eltpos * elt_size;
6449 store_constructor_field (target, bitsize, bitpos, value_mode,
6450 value, cleared, alias);
6454 if (vector)
6455 emit_insn (GEN_FCN (icode)
6456 (target,
6457 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6458 break;
6461 default:
6462 gcc_unreachable ();
6466 /* Store the value of EXP (an expression tree)
6467 into a subfield of TARGET which has mode MODE and occupies
6468 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6469 If MODE is VOIDmode, it means that we are storing into a bit-field.
6471 BITREGION_START is bitpos of the first bitfield in this region.
6472 BITREGION_END is the bitpos of the ending bitfield in this region.
6473 These two fields are 0, if the C++ memory model does not apply,
6474 or we are not interested in keeping track of bitfield regions.
6476 Always return const0_rtx unless we have something particular to
6477 return.
6479 ALIAS_SET is the alias set for the destination. This value will
6480 (in general) be different from that for TARGET, since TARGET is a
6481 reference to the containing structure.
6483 If NONTEMPORAL is true, try generating a nontemporal store. */
6485 static rtx
6486 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6487 unsigned HOST_WIDE_INT bitregion_start,
6488 unsigned HOST_WIDE_INT bitregion_end,
6489 machine_mode mode, tree exp,
6490 alias_set_type alias_set, bool nontemporal)
6492 if (TREE_CODE (exp) == ERROR_MARK)
6493 return const0_rtx;
6495 /* If we have nothing to store, do nothing unless the expression has
6496 side-effects. */
6497 if (bitsize == 0)
6498 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6500 if (GET_CODE (target) == CONCAT)
6502 /* We're storing into a struct containing a single __complex. */
6504 gcc_assert (!bitpos);
6505 return store_expr (exp, target, 0, nontemporal);
6508 /* If the structure is in a register or if the component
6509 is a bit field, we cannot use addressing to access it.
6510 Use bit-field techniques or SUBREG to store in it. */
6512 if (mode == VOIDmode
6513 || (mode != BLKmode && ! direct_store[(int) mode]
6514 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6515 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6516 || REG_P (target)
6517 || GET_CODE (target) == SUBREG
6518 /* If the field isn't aligned enough to store as an ordinary memref,
6519 store it as a bit field. */
6520 || (mode != BLKmode
6521 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6522 || bitpos % GET_MODE_ALIGNMENT (mode))
6523 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6524 || (bitpos % BITS_PER_UNIT != 0)))
6525 || (bitsize >= 0 && mode != BLKmode
6526 && GET_MODE_BITSIZE (mode) > bitsize)
6527 /* If the RHS and field are a constant size and the size of the
6528 RHS isn't the same size as the bitfield, we must use bitfield
6529 operations. */
6530 || (bitsize >= 0
6531 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6532 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6533 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6534 decl we must use bitfield operations. */
6535 || (bitsize >= 0
6536 && TREE_CODE (exp) == MEM_REF
6537 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6538 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6539 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6540 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6542 rtx temp;
6543 gimple nop_def;
6545 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6546 implies a mask operation. If the precision is the same size as
6547 the field we're storing into, that mask is redundant. This is
6548 particularly common with bit field assignments generated by the
6549 C front end. */
6550 nop_def = get_def_for_expr (exp, NOP_EXPR);
6551 if (nop_def)
6553 tree type = TREE_TYPE (exp);
6554 if (INTEGRAL_TYPE_P (type)
6555 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6556 && bitsize == TYPE_PRECISION (type))
6558 tree op = gimple_assign_rhs1 (nop_def);
6559 type = TREE_TYPE (op);
6560 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6561 exp = op;
6565 temp = expand_normal (exp);
6567 /* If BITSIZE is narrower than the size of the type of EXP
6568 we will be narrowing TEMP. Normally, what's wanted are the
6569 low-order bits. However, if EXP's type is a record and this is
6570 big-endian machine, we want the upper BITSIZE bits. */
6571 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6572 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6573 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6574 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6575 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6576 NULL_RTX, 1);
6578 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6579 if (mode != VOIDmode && mode != BLKmode
6580 && mode != TYPE_MODE (TREE_TYPE (exp)))
6581 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6583 /* If the modes of TEMP and TARGET are both BLKmode, both
6584 must be in memory and BITPOS must be aligned on a byte
6585 boundary. If so, we simply do a block copy. Likewise
6586 for a BLKmode-like TARGET. */
6587 if (GET_MODE (temp) == BLKmode
6588 && (GET_MODE (target) == BLKmode
6589 || (MEM_P (target)
6590 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6591 && (bitpos % BITS_PER_UNIT) == 0
6592 && (bitsize % BITS_PER_UNIT) == 0)))
6594 gcc_assert (MEM_P (target) && MEM_P (temp)
6595 && (bitpos % BITS_PER_UNIT) == 0);
6597 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6598 emit_block_move (target, temp,
6599 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6600 / BITS_PER_UNIT),
6601 BLOCK_OP_NORMAL);
6603 return const0_rtx;
6606 /* Handle calls that return values in multiple non-contiguous locations.
6607 The Irix 6 ABI has examples of this. */
6608 if (GET_CODE (temp) == PARALLEL)
6610 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6611 rtx temp_target;
6612 if (mode == BLKmode || mode == VOIDmode)
6613 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6614 temp_target = gen_reg_rtx (mode);
6615 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6616 temp = temp_target;
6618 else if (mode == BLKmode)
6620 /* Handle calls that return BLKmode values in registers. */
6621 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6623 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6624 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6625 temp = temp_target;
6627 else
6629 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6630 rtx temp_target;
6631 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6632 temp_target = gen_reg_rtx (mode);
6633 temp_target
6634 = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
6635 temp_target, mode, mode);
6636 temp = temp_target;
6640 /* Store the value in the bitfield. */
6641 store_bit_field (target, bitsize, bitpos,
6642 bitregion_start, bitregion_end,
6643 mode, temp);
6645 return const0_rtx;
6647 else
6649 /* Now build a reference to just the desired component. */
6650 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6652 if (to_rtx == target)
6653 to_rtx = copy_rtx (to_rtx);
6655 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6656 set_mem_alias_set (to_rtx, alias_set);
6658 return store_expr (exp, to_rtx, 0, nontemporal);
6662 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6663 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6664 codes and find the ultimate containing object, which we return.
6666 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6667 bit position, and *PUNSIGNEDP to the signedness of the field.
6668 If the position of the field is variable, we store a tree
6669 giving the variable offset (in units) in *POFFSET.
6670 This offset is in addition to the bit position.
6671 If the position is not variable, we store 0 in *POFFSET.
6673 If any of the extraction expressions is volatile,
6674 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6676 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6677 Otherwise, it is a mode that can be used to access the field.
6679 If the field describes a variable-sized object, *PMODE is set to
6680 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6681 this case, but the address of the object can be found.
6683 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6684 look through nodes that serve as markers of a greater alignment than
6685 the one that can be deduced from the expression. These nodes make it
6686 possible for front-ends to prevent temporaries from being created by
6687 the middle-end on alignment considerations. For that purpose, the
6688 normal operating mode at high-level is to always pass FALSE so that
6689 the ultimate containing object is really returned; moreover, the
6690 associated predicate handled_component_p will always return TRUE
6691 on these nodes, thus indicating that they are essentially handled
6692 by get_inner_reference. TRUE should only be passed when the caller
6693 is scanning the expression in order to build another representation
6694 and specifically knows how to handle these nodes; as such, this is
6695 the normal operating mode in the RTL expanders. */
6697 tree
6698 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6699 HOST_WIDE_INT *pbitpos, tree *poffset,
6700 machine_mode *pmode, int *punsignedp,
6701 int *pvolatilep, bool keep_aligning)
6703 tree size_tree = 0;
6704 machine_mode mode = VOIDmode;
6705 bool blkmode_bitfield = false;
6706 tree offset = size_zero_node;
6707 offset_int bit_offset = 0;
6709 /* First get the mode, signedness, and size. We do this from just the
6710 outermost expression. */
6711 *pbitsize = -1;
6712 if (TREE_CODE (exp) == COMPONENT_REF)
6714 tree field = TREE_OPERAND (exp, 1);
6715 size_tree = DECL_SIZE (field);
6716 if (flag_strict_volatile_bitfields > 0
6717 && TREE_THIS_VOLATILE (exp)
6718 && DECL_BIT_FIELD_TYPE (field)
6719 && DECL_MODE (field) != BLKmode)
6720 /* Volatile bitfields should be accessed in the mode of the
6721 field's type, not the mode computed based on the bit
6722 size. */
6723 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6724 else if (!DECL_BIT_FIELD (field))
6725 mode = DECL_MODE (field);
6726 else if (DECL_MODE (field) == BLKmode)
6727 blkmode_bitfield = true;
6729 *punsignedp = DECL_UNSIGNED (field);
6731 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6733 size_tree = TREE_OPERAND (exp, 1);
6734 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6735 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6737 /* For vector types, with the correct size of access, use the mode of
6738 inner type. */
6739 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6740 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6741 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6742 mode = TYPE_MODE (TREE_TYPE (exp));
6744 else
6746 mode = TYPE_MODE (TREE_TYPE (exp));
6747 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6749 if (mode == BLKmode)
6750 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6751 else
6752 *pbitsize = GET_MODE_BITSIZE (mode);
6755 if (size_tree != 0)
6757 if (! tree_fits_uhwi_p (size_tree))
6758 mode = BLKmode, *pbitsize = -1;
6759 else
6760 *pbitsize = tree_to_uhwi (size_tree);
6763 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6764 and find the ultimate containing object. */
6765 while (1)
6767 switch (TREE_CODE (exp))
6769 case BIT_FIELD_REF:
6770 bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
6771 break;
6773 case COMPONENT_REF:
6775 tree field = TREE_OPERAND (exp, 1);
6776 tree this_offset = component_ref_field_offset (exp);
6778 /* If this field hasn't been filled in yet, don't go past it.
6779 This should only happen when folding expressions made during
6780 type construction. */
6781 if (this_offset == 0)
6782 break;
6784 offset = size_binop (PLUS_EXPR, offset, this_offset);
6785 bit_offset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
6787 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6789 break;
6791 case ARRAY_REF:
6792 case ARRAY_RANGE_REF:
6794 tree index = TREE_OPERAND (exp, 1);
6795 tree low_bound = array_ref_low_bound (exp);
6796 tree unit_size = array_ref_element_size (exp);
6798 /* We assume all arrays have sizes that are a multiple of a byte.
6799 First subtract the lower bound, if any, in the type of the
6800 index, then convert to sizetype and multiply by the size of
6801 the array element. */
6802 if (! integer_zerop (low_bound))
6803 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6804 index, low_bound);
6806 offset = size_binop (PLUS_EXPR, offset,
6807 size_binop (MULT_EXPR,
6808 fold_convert (sizetype, index),
6809 unit_size));
6811 break;
6813 case REALPART_EXPR:
6814 break;
6816 case IMAGPART_EXPR:
6817 bit_offset += *pbitsize;
6818 break;
6820 case VIEW_CONVERT_EXPR:
6821 if (keep_aligning && STRICT_ALIGNMENT
6822 && (TYPE_ALIGN (TREE_TYPE (exp))
6823 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6824 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6825 < BIGGEST_ALIGNMENT)
6826 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6827 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6828 goto done;
6829 break;
6831 case MEM_REF:
6832 /* Hand back the decl for MEM[&decl, off]. */
6833 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6835 tree off = TREE_OPERAND (exp, 1);
6836 if (!integer_zerop (off))
6838 offset_int boff, coff = mem_ref_offset (exp);
6839 boff = wi::lshift (coff, LOG2_BITS_PER_UNIT);
6840 bit_offset += boff;
6842 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6844 goto done;
6846 default:
6847 goto done;
6850 /* If any reference in the chain is volatile, the effect is volatile. */
6851 if (TREE_THIS_VOLATILE (exp))
6852 *pvolatilep = 1;
6854 exp = TREE_OPERAND (exp, 0);
6856 done:
6858 /* If OFFSET is constant, see if we can return the whole thing as a
6859 constant bit position. Make sure to handle overflow during
6860 this conversion. */
6861 if (TREE_CODE (offset) == INTEGER_CST)
6863 offset_int tem = wi::sext (wi::to_offset (offset),
6864 TYPE_PRECISION (sizetype));
6865 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
6866 tem += bit_offset;
6867 if (wi::fits_shwi_p (tem))
6869 *pbitpos = tem.to_shwi ();
6870 *poffset = offset = NULL_TREE;
6874 /* Otherwise, split it up. */
6875 if (offset)
6877 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6878 if (wi::neg_p (bit_offset))
6880 offset_int mask = wi::mask <offset_int> (LOG2_BITS_PER_UNIT, false);
6881 offset_int tem = bit_offset.and_not (mask);
6882 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6883 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6884 bit_offset -= tem;
6885 tem = wi::arshift (tem, LOG2_BITS_PER_UNIT);
6886 offset = size_binop (PLUS_EXPR, offset,
6887 wide_int_to_tree (sizetype, tem));
6890 *pbitpos = bit_offset.to_shwi ();
6891 *poffset = offset;
6894 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6895 if (mode == VOIDmode
6896 && blkmode_bitfield
6897 && (*pbitpos % BITS_PER_UNIT) == 0
6898 && (*pbitsize % BITS_PER_UNIT) == 0)
6899 *pmode = BLKmode;
6900 else
6901 *pmode = mode;
6903 return exp;
6906 /* Return a tree of sizetype representing the size, in bytes, of the element
6907 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6909 tree
6910 array_ref_element_size (tree exp)
6912 tree aligned_size = TREE_OPERAND (exp, 3);
6913 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6914 location_t loc = EXPR_LOCATION (exp);
6916 /* If a size was specified in the ARRAY_REF, it's the size measured
6917 in alignment units of the element type. So multiply by that value. */
6918 if (aligned_size)
6920 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6921 sizetype from another type of the same width and signedness. */
6922 if (TREE_TYPE (aligned_size) != sizetype)
6923 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6924 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6925 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6928 /* Otherwise, take the size from that of the element type. Substitute
6929 any PLACEHOLDER_EXPR that we have. */
6930 else
6931 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6934 /* Return a tree representing the lower bound of the array mentioned in
6935 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6937 tree
6938 array_ref_low_bound (tree exp)
6940 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6942 /* If a lower bound is specified in EXP, use it. */
6943 if (TREE_OPERAND (exp, 2))
6944 return TREE_OPERAND (exp, 2);
6946 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6947 substituting for a PLACEHOLDER_EXPR as needed. */
6948 if (domain_type && TYPE_MIN_VALUE (domain_type))
6949 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6951 /* Otherwise, return a zero of the appropriate type. */
6952 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6955 /* Returns true if REF is an array reference to an array at the end of
6956 a structure. If this is the case, the array may be allocated larger
6957 than its upper bound implies. */
6959 bool
6960 array_at_struct_end_p (tree ref)
6962 if (TREE_CODE (ref) != ARRAY_REF
6963 && TREE_CODE (ref) != ARRAY_RANGE_REF)
6964 return false;
6966 while (handled_component_p (ref))
6968 /* If the reference chain contains a component reference to a
6969 non-union type and there follows another field the reference
6970 is not at the end of a structure. */
6971 if (TREE_CODE (ref) == COMPONENT_REF
6972 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
6974 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
6975 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
6976 nextf = DECL_CHAIN (nextf);
6977 if (nextf)
6978 return false;
6981 ref = TREE_OPERAND (ref, 0);
6984 /* If the reference is based on a declared entity, the size of the array
6985 is constrained by its given domain. */
6986 if (DECL_P (ref))
6987 return false;
6989 return true;
6992 /* Return a tree representing the upper bound of the array mentioned in
6993 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6995 tree
6996 array_ref_up_bound (tree exp)
6998 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
7000 /* If there is a domain type and it has an upper bound, use it, substituting
7001 for a PLACEHOLDER_EXPR as needed. */
7002 if (domain_type && TYPE_MAX_VALUE (domain_type))
7003 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
7005 /* Otherwise fail. */
7006 return NULL_TREE;
7009 /* Return a tree representing the offset, in bytes, of the field referenced
7010 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
7012 tree
7013 component_ref_field_offset (tree exp)
7015 tree aligned_offset = TREE_OPERAND (exp, 2);
7016 tree field = TREE_OPERAND (exp, 1);
7017 location_t loc = EXPR_LOCATION (exp);
7019 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
7020 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
7021 value. */
7022 if (aligned_offset)
7024 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
7025 sizetype from another type of the same width and signedness. */
7026 if (TREE_TYPE (aligned_offset) != sizetype)
7027 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
7028 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
7029 size_int (DECL_OFFSET_ALIGN (field)
7030 / BITS_PER_UNIT));
7033 /* Otherwise, take the offset from that of the field. Substitute
7034 any PLACEHOLDER_EXPR that we have. */
7035 else
7036 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
7039 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7041 static unsigned HOST_WIDE_INT
7042 target_align (const_tree target)
7044 /* We might have a chain of nested references with intermediate misaligning
7045 bitfields components, so need to recurse to find out. */
7047 unsigned HOST_WIDE_INT this_align, outer_align;
7049 switch (TREE_CODE (target))
7051 case BIT_FIELD_REF:
7052 return 1;
7054 case COMPONENT_REF:
7055 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7056 outer_align = target_align (TREE_OPERAND (target, 0));
7057 return MIN (this_align, outer_align);
7059 case ARRAY_REF:
7060 case ARRAY_RANGE_REF:
7061 this_align = TYPE_ALIGN (TREE_TYPE (target));
7062 outer_align = target_align (TREE_OPERAND (target, 0));
7063 return MIN (this_align, outer_align);
7065 CASE_CONVERT:
7066 case NON_LVALUE_EXPR:
7067 case VIEW_CONVERT_EXPR:
7068 this_align = TYPE_ALIGN (TREE_TYPE (target));
7069 outer_align = target_align (TREE_OPERAND (target, 0));
7070 return MAX (this_align, outer_align);
7072 default:
7073 return TYPE_ALIGN (TREE_TYPE (target));
7078 /* Given an rtx VALUE that may contain additions and multiplications, return
7079 an equivalent value that just refers to a register, memory, or constant.
7080 This is done by generating instructions to perform the arithmetic and
7081 returning a pseudo-register containing the value.
7083 The returned value may be a REG, SUBREG, MEM or constant. */
7086 force_operand (rtx value, rtx target)
7088 rtx op1, op2;
7089 /* Use subtarget as the target for operand 0 of a binary operation. */
7090 rtx subtarget = get_subtarget (target);
7091 enum rtx_code code = GET_CODE (value);
7093 /* Check for subreg applied to an expression produced by loop optimizer. */
7094 if (code == SUBREG
7095 && !REG_P (SUBREG_REG (value))
7096 && !MEM_P (SUBREG_REG (value)))
7098 value
7099 = simplify_gen_subreg (GET_MODE (value),
7100 force_reg (GET_MODE (SUBREG_REG (value)),
7101 force_operand (SUBREG_REG (value),
7102 NULL_RTX)),
7103 GET_MODE (SUBREG_REG (value)),
7104 SUBREG_BYTE (value));
7105 code = GET_CODE (value);
7108 /* Check for a PIC address load. */
7109 if ((code == PLUS || code == MINUS)
7110 && XEXP (value, 0) == pic_offset_table_rtx
7111 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7112 || GET_CODE (XEXP (value, 1)) == LABEL_REF
7113 || GET_CODE (XEXP (value, 1)) == CONST))
7115 if (!subtarget)
7116 subtarget = gen_reg_rtx (GET_MODE (value));
7117 emit_move_insn (subtarget, value);
7118 return subtarget;
7121 if (ARITHMETIC_P (value))
7123 op2 = XEXP (value, 1);
7124 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7125 subtarget = 0;
7126 if (code == MINUS && CONST_INT_P (op2))
7128 code = PLUS;
7129 op2 = negate_rtx (GET_MODE (value), op2);
7132 /* Check for an addition with OP2 a constant integer and our first
7133 operand a PLUS of a virtual register and something else. In that
7134 case, we want to emit the sum of the virtual register and the
7135 constant first and then add the other value. This allows virtual
7136 register instantiation to simply modify the constant rather than
7137 creating another one around this addition. */
7138 if (code == PLUS && CONST_INT_P (op2)
7139 && GET_CODE (XEXP (value, 0)) == PLUS
7140 && REG_P (XEXP (XEXP (value, 0), 0))
7141 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7142 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7144 rtx temp = expand_simple_binop (GET_MODE (value), code,
7145 XEXP (XEXP (value, 0), 0), op2,
7146 subtarget, 0, OPTAB_LIB_WIDEN);
7147 return expand_simple_binop (GET_MODE (value), code, temp,
7148 force_operand (XEXP (XEXP (value,
7149 0), 1), 0),
7150 target, 0, OPTAB_LIB_WIDEN);
7153 op1 = force_operand (XEXP (value, 0), subtarget);
7154 op2 = force_operand (op2, NULL_RTX);
7155 switch (code)
7157 case MULT:
7158 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7159 case DIV:
7160 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7161 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7162 target, 1, OPTAB_LIB_WIDEN);
7163 else
7164 return expand_divmod (0,
7165 FLOAT_MODE_P (GET_MODE (value))
7166 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7167 GET_MODE (value), op1, op2, target, 0);
7168 case MOD:
7169 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7170 target, 0);
7171 case UDIV:
7172 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7173 target, 1);
7174 case UMOD:
7175 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7176 target, 1);
7177 case ASHIFTRT:
7178 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7179 target, 0, OPTAB_LIB_WIDEN);
7180 default:
7181 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7182 target, 1, OPTAB_LIB_WIDEN);
7185 if (UNARY_P (value))
7187 if (!target)
7188 target = gen_reg_rtx (GET_MODE (value));
7189 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7190 switch (code)
7192 case ZERO_EXTEND:
7193 case SIGN_EXTEND:
7194 case TRUNCATE:
7195 case FLOAT_EXTEND:
7196 case FLOAT_TRUNCATE:
7197 convert_move (target, op1, code == ZERO_EXTEND);
7198 return target;
7200 case FIX:
7201 case UNSIGNED_FIX:
7202 expand_fix (target, op1, code == UNSIGNED_FIX);
7203 return target;
7205 case FLOAT:
7206 case UNSIGNED_FLOAT:
7207 expand_float (target, op1, code == UNSIGNED_FLOAT);
7208 return target;
7210 default:
7211 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7215 #ifdef INSN_SCHEDULING
7216 /* On machines that have insn scheduling, we want all memory reference to be
7217 explicit, so we need to deal with such paradoxical SUBREGs. */
7218 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7219 value
7220 = simplify_gen_subreg (GET_MODE (value),
7221 force_reg (GET_MODE (SUBREG_REG (value)),
7222 force_operand (SUBREG_REG (value),
7223 NULL_RTX)),
7224 GET_MODE (SUBREG_REG (value)),
7225 SUBREG_BYTE (value));
7226 #endif
7228 return value;
7231 /* Subroutine of expand_expr: return nonzero iff there is no way that
7232 EXP can reference X, which is being modified. TOP_P is nonzero if this
7233 call is going to be used to determine whether we need a temporary
7234 for EXP, as opposed to a recursive call to this function.
7236 It is always safe for this routine to return zero since it merely
7237 searches for optimization opportunities. */
7240 safe_from_p (const_rtx x, tree exp, int top_p)
7242 rtx exp_rtl = 0;
7243 int i, nops;
7245 if (x == 0
7246 /* If EXP has varying size, we MUST use a target since we currently
7247 have no way of allocating temporaries of variable size
7248 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7249 So we assume here that something at a higher level has prevented a
7250 clash. This is somewhat bogus, but the best we can do. Only
7251 do this when X is BLKmode and when we are at the top level. */
7252 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7253 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7254 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7255 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7256 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7257 != INTEGER_CST)
7258 && GET_MODE (x) == BLKmode)
7259 /* If X is in the outgoing argument area, it is always safe. */
7260 || (MEM_P (x)
7261 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7262 || (GET_CODE (XEXP (x, 0)) == PLUS
7263 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7264 return 1;
7266 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7267 find the underlying pseudo. */
7268 if (GET_CODE (x) == SUBREG)
7270 x = SUBREG_REG (x);
7271 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7272 return 0;
7275 /* Now look at our tree code and possibly recurse. */
7276 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7278 case tcc_declaration:
7279 exp_rtl = DECL_RTL_IF_SET (exp);
7280 break;
7282 case tcc_constant:
7283 return 1;
7285 case tcc_exceptional:
7286 if (TREE_CODE (exp) == TREE_LIST)
7288 while (1)
7290 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7291 return 0;
7292 exp = TREE_CHAIN (exp);
7293 if (!exp)
7294 return 1;
7295 if (TREE_CODE (exp) != TREE_LIST)
7296 return safe_from_p (x, exp, 0);
7299 else if (TREE_CODE (exp) == CONSTRUCTOR)
7301 constructor_elt *ce;
7302 unsigned HOST_WIDE_INT idx;
7304 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7305 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7306 || !safe_from_p (x, ce->value, 0))
7307 return 0;
7308 return 1;
7310 else if (TREE_CODE (exp) == ERROR_MARK)
7311 return 1; /* An already-visited SAVE_EXPR? */
7312 else
7313 return 0;
7315 case tcc_statement:
7316 /* The only case we look at here is the DECL_INITIAL inside a
7317 DECL_EXPR. */
7318 return (TREE_CODE (exp) != DECL_EXPR
7319 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7320 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7321 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7323 case tcc_binary:
7324 case tcc_comparison:
7325 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7326 return 0;
7327 /* Fall through. */
7329 case tcc_unary:
7330 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7332 case tcc_expression:
7333 case tcc_reference:
7334 case tcc_vl_exp:
7335 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7336 the expression. If it is set, we conflict iff we are that rtx or
7337 both are in memory. Otherwise, we check all operands of the
7338 expression recursively. */
7340 switch (TREE_CODE (exp))
7342 case ADDR_EXPR:
7343 /* If the operand is static or we are static, we can't conflict.
7344 Likewise if we don't conflict with the operand at all. */
7345 if (staticp (TREE_OPERAND (exp, 0))
7346 || TREE_STATIC (exp)
7347 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7348 return 1;
7350 /* Otherwise, the only way this can conflict is if we are taking
7351 the address of a DECL a that address if part of X, which is
7352 very rare. */
7353 exp = TREE_OPERAND (exp, 0);
7354 if (DECL_P (exp))
7356 if (!DECL_RTL_SET_P (exp)
7357 || !MEM_P (DECL_RTL (exp)))
7358 return 0;
7359 else
7360 exp_rtl = XEXP (DECL_RTL (exp), 0);
7362 break;
7364 case MEM_REF:
7365 if (MEM_P (x)
7366 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7367 get_alias_set (exp)))
7368 return 0;
7369 break;
7371 case CALL_EXPR:
7372 /* Assume that the call will clobber all hard registers and
7373 all of memory. */
7374 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7375 || MEM_P (x))
7376 return 0;
7377 break;
7379 case WITH_CLEANUP_EXPR:
7380 case CLEANUP_POINT_EXPR:
7381 /* Lowered by gimplify.c. */
7382 gcc_unreachable ();
7384 case SAVE_EXPR:
7385 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7387 default:
7388 break;
7391 /* If we have an rtx, we do not need to scan our operands. */
7392 if (exp_rtl)
7393 break;
7395 nops = TREE_OPERAND_LENGTH (exp);
7396 for (i = 0; i < nops; i++)
7397 if (TREE_OPERAND (exp, i) != 0
7398 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7399 return 0;
7401 break;
7403 case tcc_type:
7404 /* Should never get a type here. */
7405 gcc_unreachable ();
7408 /* If we have an rtl, find any enclosed object. Then see if we conflict
7409 with it. */
7410 if (exp_rtl)
7412 if (GET_CODE (exp_rtl) == SUBREG)
7414 exp_rtl = SUBREG_REG (exp_rtl);
7415 if (REG_P (exp_rtl)
7416 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7417 return 0;
7420 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7421 are memory and they conflict. */
7422 return ! (rtx_equal_p (x, exp_rtl)
7423 || (MEM_P (x) && MEM_P (exp_rtl)
7424 && true_dependence (exp_rtl, VOIDmode, x)));
7427 /* If we reach here, it is safe. */
7428 return 1;
7432 /* Return the highest power of two that EXP is known to be a multiple of.
7433 This is used in updating alignment of MEMs in array references. */
7435 unsigned HOST_WIDE_INT
7436 highest_pow2_factor (const_tree exp)
7438 unsigned HOST_WIDE_INT ret;
7439 int trailing_zeros = tree_ctz (exp);
7440 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7441 return BIGGEST_ALIGNMENT;
7442 ret = (unsigned HOST_WIDE_INT) 1 << trailing_zeros;
7443 if (ret > BIGGEST_ALIGNMENT)
7444 return BIGGEST_ALIGNMENT;
7445 return ret;
7448 /* Similar, except that the alignment requirements of TARGET are
7449 taken into account. Assume it is at least as aligned as its
7450 type, unless it is a COMPONENT_REF in which case the layout of
7451 the structure gives the alignment. */
7453 static unsigned HOST_WIDE_INT
7454 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7456 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7457 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7459 return MAX (factor, talign);
7462 #ifdef HAVE_conditional_move
7463 /* Convert the tree comparison code TCODE to the rtl one where the
7464 signedness is UNSIGNEDP. */
7466 static enum rtx_code
7467 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7469 enum rtx_code code;
7470 switch (tcode)
7472 case EQ_EXPR:
7473 code = EQ;
7474 break;
7475 case NE_EXPR:
7476 code = NE;
7477 break;
7478 case LT_EXPR:
7479 code = unsignedp ? LTU : LT;
7480 break;
7481 case LE_EXPR:
7482 code = unsignedp ? LEU : LE;
7483 break;
7484 case GT_EXPR:
7485 code = unsignedp ? GTU : GT;
7486 break;
7487 case GE_EXPR:
7488 code = unsignedp ? GEU : GE;
7489 break;
7490 case UNORDERED_EXPR:
7491 code = UNORDERED;
7492 break;
7493 case ORDERED_EXPR:
7494 code = ORDERED;
7495 break;
7496 case UNLT_EXPR:
7497 code = UNLT;
7498 break;
7499 case UNLE_EXPR:
7500 code = UNLE;
7501 break;
7502 case UNGT_EXPR:
7503 code = UNGT;
7504 break;
7505 case UNGE_EXPR:
7506 code = UNGE;
7507 break;
7508 case UNEQ_EXPR:
7509 code = UNEQ;
7510 break;
7511 case LTGT_EXPR:
7512 code = LTGT;
7513 break;
7515 default:
7516 gcc_unreachable ();
7518 return code;
7520 #endif
7522 /* Subroutine of expand_expr. Expand the two operands of a binary
7523 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7524 The value may be stored in TARGET if TARGET is nonzero. The
7525 MODIFIER argument is as documented by expand_expr. */
7527 static void
7528 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7529 enum expand_modifier modifier)
7531 if (! safe_from_p (target, exp1, 1))
7532 target = 0;
7533 if (operand_equal_p (exp0, exp1, 0))
7535 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7536 *op1 = copy_rtx (*op0);
7538 else
7540 /* If we need to preserve evaluation order, copy exp0 into its own
7541 temporary variable so that it can't be clobbered by exp1. */
7542 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7543 exp0 = save_expr (exp0);
7544 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7545 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7550 /* Return a MEM that contains constant EXP. DEFER is as for
7551 output_constant_def and MODIFIER is as for expand_expr. */
7553 static rtx
7554 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7556 rtx mem;
7558 mem = output_constant_def (exp, defer);
7559 if (modifier != EXPAND_INITIALIZER)
7560 mem = use_anchored_address (mem);
7561 return mem;
7564 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7565 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7567 static rtx
7568 expand_expr_addr_expr_1 (tree exp, rtx target, machine_mode tmode,
7569 enum expand_modifier modifier, addr_space_t as)
7571 rtx result, subtarget;
7572 tree inner, offset;
7573 HOST_WIDE_INT bitsize, bitpos;
7574 int volatilep, unsignedp;
7575 machine_mode mode1;
7577 /* If we are taking the address of a constant and are at the top level,
7578 we have to use output_constant_def since we can't call force_const_mem
7579 at top level. */
7580 /* ??? This should be considered a front-end bug. We should not be
7581 generating ADDR_EXPR of something that isn't an LVALUE. The only
7582 exception here is STRING_CST. */
7583 if (CONSTANT_CLASS_P (exp))
7585 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7586 if (modifier < EXPAND_SUM)
7587 result = force_operand (result, target);
7588 return result;
7591 /* Everything must be something allowed by is_gimple_addressable. */
7592 switch (TREE_CODE (exp))
7594 case INDIRECT_REF:
7595 /* This case will happen via recursion for &a->b. */
7596 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7598 case MEM_REF:
7600 tree tem = TREE_OPERAND (exp, 0);
7601 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7602 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7603 return expand_expr (tem, target, tmode, modifier);
7606 case CONST_DECL:
7607 /* Expand the initializer like constants above. */
7608 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7609 0, modifier), 0);
7610 if (modifier < EXPAND_SUM)
7611 result = force_operand (result, target);
7612 return result;
7614 case REALPART_EXPR:
7615 /* The real part of the complex number is always first, therefore
7616 the address is the same as the address of the parent object. */
7617 offset = 0;
7618 bitpos = 0;
7619 inner = TREE_OPERAND (exp, 0);
7620 break;
7622 case IMAGPART_EXPR:
7623 /* The imaginary part of the complex number is always second.
7624 The expression is therefore always offset by the size of the
7625 scalar type. */
7626 offset = 0;
7627 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7628 inner = TREE_OPERAND (exp, 0);
7629 break;
7631 case COMPOUND_LITERAL_EXPR:
7632 /* Allow COMPOUND_LITERAL_EXPR in initializers, if e.g.
7633 rtl_for_decl_init is called on DECL_INITIAL with
7634 COMPOUNT_LITERAL_EXPRs in it, they aren't gimplified. */
7635 if (modifier == EXPAND_INITIALIZER
7636 && COMPOUND_LITERAL_EXPR_DECL (exp))
7637 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7638 target, tmode, modifier, as);
7639 /* FALLTHRU */
7640 default:
7641 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7642 expand_expr, as that can have various side effects; LABEL_DECLs for
7643 example, may not have their DECL_RTL set yet. Expand the rtl of
7644 CONSTRUCTORs too, which should yield a memory reference for the
7645 constructor's contents. Assume language specific tree nodes can
7646 be expanded in some interesting way. */
7647 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7648 if (DECL_P (exp)
7649 || TREE_CODE (exp) == CONSTRUCTOR
7650 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7652 result = expand_expr (exp, target, tmode,
7653 modifier == EXPAND_INITIALIZER
7654 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7656 /* If the DECL isn't in memory, then the DECL wasn't properly
7657 marked TREE_ADDRESSABLE, which will be either a front-end
7658 or a tree optimizer bug. */
7660 if (TREE_ADDRESSABLE (exp)
7661 && ! MEM_P (result)
7662 && ! targetm.calls.allocate_stack_slots_for_args ())
7664 error ("local frame unavailable (naked function?)");
7665 return result;
7667 else
7668 gcc_assert (MEM_P (result));
7669 result = XEXP (result, 0);
7671 /* ??? Is this needed anymore? */
7672 if (DECL_P (exp))
7673 TREE_USED (exp) = 1;
7675 if (modifier != EXPAND_INITIALIZER
7676 && modifier != EXPAND_CONST_ADDRESS
7677 && modifier != EXPAND_SUM)
7678 result = force_operand (result, target);
7679 return result;
7682 /* Pass FALSE as the last argument to get_inner_reference although
7683 we are expanding to RTL. The rationale is that we know how to
7684 handle "aligning nodes" here: we can just bypass them because
7685 they won't change the final object whose address will be returned
7686 (they actually exist only for that purpose). */
7687 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7688 &mode1, &unsignedp, &volatilep, false);
7689 break;
7692 /* We must have made progress. */
7693 gcc_assert (inner != exp);
7695 subtarget = offset || bitpos ? NULL_RTX : target;
7696 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7697 inner alignment, force the inner to be sufficiently aligned. */
7698 if (CONSTANT_CLASS_P (inner)
7699 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7701 inner = copy_node (inner);
7702 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7703 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7704 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7706 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7708 if (offset)
7710 rtx tmp;
7712 if (modifier != EXPAND_NORMAL)
7713 result = force_operand (result, NULL);
7714 tmp = expand_expr (offset, NULL_RTX, tmode,
7715 modifier == EXPAND_INITIALIZER
7716 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7718 /* expand_expr is allowed to return an object in a mode other
7719 than TMODE. If it did, we need to convert. */
7720 if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
7721 tmp = convert_modes (tmode, GET_MODE (tmp),
7722 tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
7723 result = convert_memory_address_addr_space (tmode, result, as);
7724 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7726 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7727 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7728 else
7730 subtarget = bitpos ? NULL_RTX : target;
7731 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7732 1, OPTAB_LIB_WIDEN);
7736 if (bitpos)
7738 /* Someone beforehand should have rejected taking the address
7739 of such an object. */
7740 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7742 result = convert_memory_address_addr_space (tmode, result, as);
7743 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7744 if (modifier < EXPAND_SUM)
7745 result = force_operand (result, target);
7748 return result;
7751 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7752 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7754 static rtx
7755 expand_expr_addr_expr (tree exp, rtx target, machine_mode tmode,
7756 enum expand_modifier modifier)
7758 addr_space_t as = ADDR_SPACE_GENERIC;
7759 machine_mode address_mode = Pmode;
7760 machine_mode pointer_mode = ptr_mode;
7761 machine_mode rmode;
7762 rtx result;
7764 /* Target mode of VOIDmode says "whatever's natural". */
7765 if (tmode == VOIDmode)
7766 tmode = TYPE_MODE (TREE_TYPE (exp));
7768 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7770 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7771 address_mode = targetm.addr_space.address_mode (as);
7772 pointer_mode = targetm.addr_space.pointer_mode (as);
7775 /* We can get called with some Weird Things if the user does silliness
7776 like "(short) &a". In that case, convert_memory_address won't do
7777 the right thing, so ignore the given target mode. */
7778 if (tmode != address_mode && tmode != pointer_mode)
7779 tmode = address_mode;
7781 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7782 tmode, modifier, as);
7784 /* Despite expand_expr claims concerning ignoring TMODE when not
7785 strictly convenient, stuff breaks if we don't honor it. Note
7786 that combined with the above, we only do this for pointer modes. */
7787 rmode = GET_MODE (result);
7788 if (rmode == VOIDmode)
7789 rmode = tmode;
7790 if (rmode != tmode)
7791 result = convert_memory_address_addr_space (tmode, result, as);
7793 return result;
7796 /* Generate code for computing CONSTRUCTOR EXP.
7797 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7798 is TRUE, instead of creating a temporary variable in memory
7799 NULL is returned and the caller needs to handle it differently. */
7801 static rtx
7802 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7803 bool avoid_temp_mem)
7805 tree type = TREE_TYPE (exp);
7806 machine_mode mode = TYPE_MODE (type);
7808 /* Try to avoid creating a temporary at all. This is possible
7809 if all of the initializer is zero.
7810 FIXME: try to handle all [0..255] initializers we can handle
7811 with memset. */
7812 if (TREE_STATIC (exp)
7813 && !TREE_ADDRESSABLE (exp)
7814 && target != 0 && mode == BLKmode
7815 && all_zeros_p (exp))
7817 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7818 return target;
7821 /* All elts simple constants => refer to a constant in memory. But
7822 if this is a non-BLKmode mode, let it store a field at a time
7823 since that should make a CONST_INT, CONST_WIDE_INT or
7824 CONST_DOUBLE when we fold. Likewise, if we have a target we can
7825 use, it is best to store directly into the target unless the type
7826 is large enough that memcpy will be used. If we are making an
7827 initializer and all operands are constant, put it in memory as
7828 well.
7830 FIXME: Avoid trying to fill vector constructors piece-meal.
7831 Output them with output_constant_def below unless we're sure
7832 they're zeros. This should go away when vector initializers
7833 are treated like VECTOR_CST instead of arrays. */
7834 if ((TREE_STATIC (exp)
7835 && ((mode == BLKmode
7836 && ! (target != 0 && safe_from_p (target, exp, 1)))
7837 || TREE_ADDRESSABLE (exp)
7838 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
7839 && (! MOVE_BY_PIECES_P
7840 (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
7841 TYPE_ALIGN (type)))
7842 && ! mostly_zeros_p (exp))))
7843 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7844 && TREE_CONSTANT (exp)))
7846 rtx constructor;
7848 if (avoid_temp_mem)
7849 return NULL_RTX;
7851 constructor = expand_expr_constant (exp, 1, modifier);
7853 if (modifier != EXPAND_CONST_ADDRESS
7854 && modifier != EXPAND_INITIALIZER
7855 && modifier != EXPAND_SUM)
7856 constructor = validize_mem (constructor);
7858 return constructor;
7861 /* Handle calls that pass values in multiple non-contiguous
7862 locations. The Irix 6 ABI has examples of this. */
7863 if (target == 0 || ! safe_from_p (target, exp, 1)
7864 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7866 if (avoid_temp_mem)
7867 return NULL_RTX;
7869 target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
7872 store_constructor (exp, target, 0, int_expr_size (exp));
7873 return target;
7877 /* expand_expr: generate code for computing expression EXP.
7878 An rtx for the computed value is returned. The value is never null.
7879 In the case of a void EXP, const0_rtx is returned.
7881 The value may be stored in TARGET if TARGET is nonzero.
7882 TARGET is just a suggestion; callers must assume that
7883 the rtx returned may not be the same as TARGET.
7885 If TARGET is CONST0_RTX, it means that the value will be ignored.
7887 If TMODE is not VOIDmode, it suggests generating the
7888 result in mode TMODE. But this is done only when convenient.
7889 Otherwise, TMODE is ignored and the value generated in its natural mode.
7890 TMODE is just a suggestion; callers must assume that
7891 the rtx returned may not have mode TMODE.
7893 Note that TARGET may have neither TMODE nor MODE. In that case, it
7894 probably will not be used.
7896 If MODIFIER is EXPAND_SUM then when EXP is an addition
7897 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7898 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7899 products as above, or REG or MEM, or constant.
7900 Ordinarily in such cases we would output mul or add instructions
7901 and then return a pseudo reg containing the sum.
7903 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7904 it also marks a label as absolutely required (it can't be dead).
7905 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7906 This is used for outputting expressions used in initializers.
7908 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7909 with a constant address even if that address is not normally legitimate.
7910 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7912 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7913 a call parameter. Such targets require special care as we haven't yet
7914 marked TARGET so that it's safe from being trashed by libcalls. We
7915 don't want to use TARGET for anything but the final result;
7916 Intermediate values must go elsewhere. Additionally, calls to
7917 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7919 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7920 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7921 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7922 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7923 recursively.
7925 If INNER_REFERENCE_P is true, we are expanding an inner reference.
7926 In this case, we don't adjust a returned MEM rtx that wouldn't be
7927 sufficiently aligned for its mode; instead, it's up to the caller
7928 to deal with it afterwards. This is used to make sure that unaligned
7929 base objects for which out-of-bounds accesses are supported, for
7930 example record types with trailing arrays, aren't realigned behind
7931 the back of the caller.
7932 The normal operating mode is to pass FALSE for this parameter. */
7935 expand_expr_real (tree exp, rtx target, machine_mode tmode,
7936 enum expand_modifier modifier, rtx *alt_rtl,
7937 bool inner_reference_p)
7939 rtx ret;
7941 /* Handle ERROR_MARK before anybody tries to access its type. */
7942 if (TREE_CODE (exp) == ERROR_MARK
7943 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7945 ret = CONST0_RTX (tmode);
7946 return ret ? ret : const0_rtx;
7949 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
7950 inner_reference_p);
7951 return ret;
7954 /* Try to expand the conditional expression which is represented by
7955 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
7956 return the rtl reg which repsents the result. Otherwise return
7957 NULL_RTL. */
7959 static rtx
7960 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
7961 tree treeop1 ATTRIBUTE_UNUSED,
7962 tree treeop2 ATTRIBUTE_UNUSED)
7964 #ifdef HAVE_conditional_move
7965 rtx insn;
7966 rtx op00, op01, op1, op2;
7967 enum rtx_code comparison_code;
7968 machine_mode comparison_mode;
7969 gimple srcstmt;
7970 rtx temp;
7971 tree type = TREE_TYPE (treeop1);
7972 int unsignedp = TYPE_UNSIGNED (type);
7973 machine_mode mode = TYPE_MODE (type);
7974 machine_mode orig_mode = mode;
7976 /* If we cannot do a conditional move on the mode, try doing it
7977 with the promoted mode. */
7978 if (!can_conditionally_move_p (mode))
7980 mode = promote_mode (type, mode, &unsignedp);
7981 if (!can_conditionally_move_p (mode))
7982 return NULL_RTX;
7983 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
7985 else
7986 temp = assign_temp (type, 0, 1);
7988 start_sequence ();
7989 expand_operands (treeop1, treeop2,
7990 temp, &op1, &op2, EXPAND_NORMAL);
7992 if (TREE_CODE (treeop0) == SSA_NAME
7993 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
7995 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
7996 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
7997 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
7998 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
7999 comparison_mode = TYPE_MODE (type);
8000 unsignedp = TYPE_UNSIGNED (type);
8001 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8003 else if (TREE_CODE_CLASS (TREE_CODE (treeop0)) == tcc_comparison)
8005 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
8006 enum tree_code cmpcode = TREE_CODE (treeop0);
8007 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
8008 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
8009 unsignedp = TYPE_UNSIGNED (type);
8010 comparison_mode = TYPE_MODE (type);
8011 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8013 else
8015 op00 = expand_normal (treeop0);
8016 op01 = const0_rtx;
8017 comparison_code = NE;
8018 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
8021 if (GET_MODE (op1) != mode)
8022 op1 = gen_lowpart (mode, op1);
8024 if (GET_MODE (op2) != mode)
8025 op2 = gen_lowpart (mode, op2);
8027 /* Try to emit the conditional move. */
8028 insn = emit_conditional_move (temp, comparison_code,
8029 op00, op01, comparison_mode,
8030 op1, op2, mode,
8031 unsignedp);
8033 /* If we could do the conditional move, emit the sequence,
8034 and return. */
8035 if (insn)
8037 rtx_insn *seq = get_insns ();
8038 end_sequence ();
8039 emit_insn (seq);
8040 return convert_modes (orig_mode, mode, temp, 0);
8043 /* Otherwise discard the sequence and fall back to code with
8044 branches. */
8045 end_sequence ();
8046 #endif
8047 return NULL_RTX;
8051 expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
8052 enum expand_modifier modifier)
8054 rtx op0, op1, op2, temp;
8055 tree type;
8056 int unsignedp;
8057 machine_mode mode;
8058 enum tree_code code = ops->code;
8059 optab this_optab;
8060 rtx subtarget, original_target;
8061 int ignore;
8062 bool reduce_bit_field;
8063 location_t loc = ops->location;
8064 tree treeop0, treeop1, treeop2;
8065 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8066 ? reduce_to_bit_field_precision ((expr), \
8067 target, \
8068 type) \
8069 : (expr))
8071 type = ops->type;
8072 mode = TYPE_MODE (type);
8073 unsignedp = TYPE_UNSIGNED (type);
8075 treeop0 = ops->op0;
8076 treeop1 = ops->op1;
8077 treeop2 = ops->op2;
8079 /* We should be called only on simple (binary or unary) expressions,
8080 exactly those that are valid in gimple expressions that aren't
8081 GIMPLE_SINGLE_RHS (or invalid). */
8082 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8083 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8084 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8086 ignore = (target == const0_rtx
8087 || ((CONVERT_EXPR_CODE_P (code)
8088 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8089 && TREE_CODE (type) == VOID_TYPE));
8091 /* We should be called only if we need the result. */
8092 gcc_assert (!ignore);
8094 /* An operation in what may be a bit-field type needs the
8095 result to be reduced to the precision of the bit-field type,
8096 which is narrower than that of the type's mode. */
8097 reduce_bit_field = (INTEGRAL_TYPE_P (type)
8098 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8100 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8101 target = 0;
8103 /* Use subtarget as the target for operand 0 of a binary operation. */
8104 subtarget = get_subtarget (target);
8105 original_target = target;
8107 switch (code)
8109 case NON_LVALUE_EXPR:
8110 case PAREN_EXPR:
8111 CASE_CONVERT:
8112 if (treeop0 == error_mark_node)
8113 return const0_rtx;
8115 if (TREE_CODE (type) == UNION_TYPE)
8117 tree valtype = TREE_TYPE (treeop0);
8119 /* If both input and output are BLKmode, this conversion isn't doing
8120 anything except possibly changing memory attribute. */
8121 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8123 rtx result = expand_expr (treeop0, target, tmode,
8124 modifier);
8126 result = copy_rtx (result);
8127 set_mem_attributes (result, type, 0);
8128 return result;
8131 if (target == 0)
8133 if (TYPE_MODE (type) != BLKmode)
8134 target = gen_reg_rtx (TYPE_MODE (type));
8135 else
8136 target = assign_temp (type, 1, 1);
8139 if (MEM_P (target))
8140 /* Store data into beginning of memory target. */
8141 store_expr (treeop0,
8142 adjust_address (target, TYPE_MODE (valtype), 0),
8143 modifier == EXPAND_STACK_PARM,
8144 false);
8146 else
8148 gcc_assert (REG_P (target));
8150 /* Store this field into a union of the proper type. */
8151 store_field (target,
8152 MIN ((int_size_in_bytes (TREE_TYPE
8153 (treeop0))
8154 * BITS_PER_UNIT),
8155 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8156 0, 0, 0, TYPE_MODE (valtype), treeop0, 0, false);
8159 /* Return the entire union. */
8160 return target;
8163 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8165 op0 = expand_expr (treeop0, target, VOIDmode,
8166 modifier);
8168 /* If the signedness of the conversion differs and OP0 is
8169 a promoted SUBREG, clear that indication since we now
8170 have to do the proper extension. */
8171 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8172 && GET_CODE (op0) == SUBREG)
8173 SUBREG_PROMOTED_VAR_P (op0) = 0;
8175 return REDUCE_BIT_FIELD (op0);
8178 op0 = expand_expr (treeop0, NULL_RTX, mode,
8179 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8180 if (GET_MODE (op0) == mode)
8183 /* If OP0 is a constant, just convert it into the proper mode. */
8184 else if (CONSTANT_P (op0))
8186 tree inner_type = TREE_TYPE (treeop0);
8187 machine_mode inner_mode = GET_MODE (op0);
8189 if (inner_mode == VOIDmode)
8190 inner_mode = TYPE_MODE (inner_type);
8192 if (modifier == EXPAND_INITIALIZER)
8193 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8194 subreg_lowpart_offset (mode,
8195 inner_mode));
8196 else
8197 op0= convert_modes (mode, inner_mode, op0,
8198 TYPE_UNSIGNED (inner_type));
8201 else if (modifier == EXPAND_INITIALIZER)
8202 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8204 else if (target == 0)
8205 op0 = convert_to_mode (mode, op0,
8206 TYPE_UNSIGNED (TREE_TYPE
8207 (treeop0)));
8208 else
8210 convert_move (target, op0,
8211 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8212 op0 = target;
8215 return REDUCE_BIT_FIELD (op0);
8217 case ADDR_SPACE_CONVERT_EXPR:
8219 tree treeop0_type = TREE_TYPE (treeop0);
8220 addr_space_t as_to;
8221 addr_space_t as_from;
8223 gcc_assert (POINTER_TYPE_P (type));
8224 gcc_assert (POINTER_TYPE_P (treeop0_type));
8226 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8227 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8229 /* Conversions between pointers to the same address space should
8230 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8231 gcc_assert (as_to != as_from);
8233 /* Ask target code to handle conversion between pointers
8234 to overlapping address spaces. */
8235 if (targetm.addr_space.subset_p (as_to, as_from)
8236 || targetm.addr_space.subset_p (as_from, as_to))
8238 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8239 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8240 gcc_assert (op0);
8241 return op0;
8244 /* For disjoint address spaces, converting anything but
8245 a null pointer invokes undefined behaviour. We simply
8246 always return a null pointer here. */
8247 return CONST0_RTX (mode);
8250 case POINTER_PLUS_EXPR:
8251 /* Even though the sizetype mode and the pointer's mode can be different
8252 expand is able to handle this correctly and get the correct result out
8253 of the PLUS_EXPR code. */
8254 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8255 if sizetype precision is smaller than pointer precision. */
8256 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8257 treeop1 = fold_convert_loc (loc, type,
8258 fold_convert_loc (loc, ssizetype,
8259 treeop1));
8260 /* If sizetype precision is larger than pointer precision, truncate the
8261 offset to have matching modes. */
8262 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8263 treeop1 = fold_convert_loc (loc, type, treeop1);
8265 case PLUS_EXPR:
8266 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8267 something else, make sure we add the register to the constant and
8268 then to the other thing. This case can occur during strength
8269 reduction and doing it this way will produce better code if the
8270 frame pointer or argument pointer is eliminated.
8272 fold-const.c will ensure that the constant is always in the inner
8273 PLUS_EXPR, so the only case we need to do anything about is if
8274 sp, ap, or fp is our second argument, in which case we must swap
8275 the innermost first argument and our second argument. */
8277 if (TREE_CODE (treeop0) == PLUS_EXPR
8278 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8279 && TREE_CODE (treeop1) == VAR_DECL
8280 && (DECL_RTL (treeop1) == frame_pointer_rtx
8281 || DECL_RTL (treeop1) == stack_pointer_rtx
8282 || DECL_RTL (treeop1) == arg_pointer_rtx))
8284 gcc_unreachable ();
8287 /* If the result is to be ptr_mode and we are adding an integer to
8288 something, we might be forming a constant. So try to use
8289 plus_constant. If it produces a sum and we can't accept it,
8290 use force_operand. This allows P = &ARR[const] to generate
8291 efficient code on machines where a SYMBOL_REF is not a valid
8292 address.
8294 If this is an EXPAND_SUM call, always return the sum. */
8295 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8296 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8298 if (modifier == EXPAND_STACK_PARM)
8299 target = 0;
8300 if (TREE_CODE (treeop0) == INTEGER_CST
8301 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8302 && TREE_CONSTANT (treeop1))
8304 rtx constant_part;
8305 HOST_WIDE_INT wc;
8306 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
8308 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8309 EXPAND_SUM);
8310 /* Use wi::shwi to ensure that the constant is
8311 truncated according to the mode of OP1, then sign extended
8312 to a HOST_WIDE_INT. Using the constant directly can result
8313 in non-canonical RTL in a 64x32 cross compile. */
8314 wc = TREE_INT_CST_LOW (treeop0);
8315 constant_part =
8316 immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8317 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8318 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8319 op1 = force_operand (op1, target);
8320 return REDUCE_BIT_FIELD (op1);
8323 else if (TREE_CODE (treeop1) == INTEGER_CST
8324 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8325 && TREE_CONSTANT (treeop0))
8327 rtx constant_part;
8328 HOST_WIDE_INT wc;
8329 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
8331 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8332 (modifier == EXPAND_INITIALIZER
8333 ? EXPAND_INITIALIZER : EXPAND_SUM));
8334 if (! CONSTANT_P (op0))
8336 op1 = expand_expr (treeop1, NULL_RTX,
8337 VOIDmode, modifier);
8338 /* Return a PLUS if modifier says it's OK. */
8339 if (modifier == EXPAND_SUM
8340 || modifier == EXPAND_INITIALIZER)
8341 return simplify_gen_binary (PLUS, mode, op0, op1);
8342 goto binop2;
8344 /* Use wi::shwi to ensure that the constant is
8345 truncated according to the mode of OP1, then sign extended
8346 to a HOST_WIDE_INT. Using the constant directly can result
8347 in non-canonical RTL in a 64x32 cross compile. */
8348 wc = TREE_INT_CST_LOW (treeop1);
8349 constant_part
8350 = immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8351 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8352 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8353 op0 = force_operand (op0, target);
8354 return REDUCE_BIT_FIELD (op0);
8358 /* Use TER to expand pointer addition of a negated value
8359 as pointer subtraction. */
8360 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8361 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8362 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8363 && TREE_CODE (treeop1) == SSA_NAME
8364 && TYPE_MODE (TREE_TYPE (treeop0))
8365 == TYPE_MODE (TREE_TYPE (treeop1)))
8367 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8368 if (def)
8370 treeop1 = gimple_assign_rhs1 (def);
8371 code = MINUS_EXPR;
8372 goto do_minus;
8376 /* No sense saving up arithmetic to be done
8377 if it's all in the wrong mode to form part of an address.
8378 And force_operand won't know whether to sign-extend or
8379 zero-extend. */
8380 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8381 || mode != ptr_mode)
8383 expand_operands (treeop0, treeop1,
8384 subtarget, &op0, &op1, EXPAND_NORMAL);
8385 if (op0 == const0_rtx)
8386 return op1;
8387 if (op1 == const0_rtx)
8388 return op0;
8389 goto binop2;
8392 expand_operands (treeop0, treeop1,
8393 subtarget, &op0, &op1, modifier);
8394 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8396 case MINUS_EXPR:
8397 do_minus:
8398 /* For initializers, we are allowed to return a MINUS of two
8399 symbolic constants. Here we handle all cases when both operands
8400 are constant. */
8401 /* Handle difference of two symbolic constants,
8402 for the sake of an initializer. */
8403 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8404 && really_constant_p (treeop0)
8405 && really_constant_p (treeop1))
8407 expand_operands (treeop0, treeop1,
8408 NULL_RTX, &op0, &op1, modifier);
8410 /* If the last operand is a CONST_INT, use plus_constant of
8411 the negated constant. Else make the MINUS. */
8412 if (CONST_INT_P (op1))
8413 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8414 -INTVAL (op1)));
8415 else
8416 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8419 /* No sense saving up arithmetic to be done
8420 if it's all in the wrong mode to form part of an address.
8421 And force_operand won't know whether to sign-extend or
8422 zero-extend. */
8423 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8424 || mode != ptr_mode)
8425 goto binop;
8427 expand_operands (treeop0, treeop1,
8428 subtarget, &op0, &op1, modifier);
8430 /* Convert A - const to A + (-const). */
8431 if (CONST_INT_P (op1))
8433 op1 = negate_rtx (mode, op1);
8434 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8437 goto binop2;
8439 case WIDEN_MULT_PLUS_EXPR:
8440 case WIDEN_MULT_MINUS_EXPR:
8441 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8442 op2 = expand_normal (treeop2);
8443 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8444 target, unsignedp);
8445 return target;
8447 case WIDEN_MULT_EXPR:
8448 /* If first operand is constant, swap them.
8449 Thus the following special case checks need only
8450 check the second operand. */
8451 if (TREE_CODE (treeop0) == INTEGER_CST)
8453 tree t1 = treeop0;
8454 treeop0 = treeop1;
8455 treeop1 = t1;
8458 /* First, check if we have a multiplication of one signed and one
8459 unsigned operand. */
8460 if (TREE_CODE (treeop1) != INTEGER_CST
8461 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8462 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8464 machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8465 this_optab = usmul_widen_optab;
8466 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8467 != CODE_FOR_nothing)
8469 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8470 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8471 EXPAND_NORMAL);
8472 else
8473 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8474 EXPAND_NORMAL);
8475 /* op0 and op1 might still be constant, despite the above
8476 != INTEGER_CST check. Handle it. */
8477 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8479 op0 = convert_modes (innermode, mode, op0, true);
8480 op1 = convert_modes (innermode, mode, op1, false);
8481 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8482 target, unsignedp));
8484 goto binop3;
8487 /* Check for a multiplication with matching signedness. */
8488 else if ((TREE_CODE (treeop1) == INTEGER_CST
8489 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8490 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8491 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8493 tree op0type = TREE_TYPE (treeop0);
8494 machine_mode innermode = TYPE_MODE (op0type);
8495 bool zextend_p = TYPE_UNSIGNED (op0type);
8496 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8497 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8499 if (TREE_CODE (treeop0) != INTEGER_CST)
8501 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8502 != CODE_FOR_nothing)
8504 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8505 EXPAND_NORMAL);
8506 /* op0 and op1 might still be constant, despite the above
8507 != INTEGER_CST check. Handle it. */
8508 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8510 widen_mult_const:
8511 op0 = convert_modes (innermode, mode, op0, zextend_p);
8513 = convert_modes (innermode, mode, op1,
8514 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8515 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8516 target,
8517 unsignedp));
8519 temp = expand_widening_mult (mode, op0, op1, target,
8520 unsignedp, this_optab);
8521 return REDUCE_BIT_FIELD (temp);
8523 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8524 != CODE_FOR_nothing
8525 && innermode == word_mode)
8527 rtx htem, hipart;
8528 op0 = expand_normal (treeop0);
8529 if (TREE_CODE (treeop1) == INTEGER_CST)
8530 op1 = convert_modes (innermode, mode,
8531 expand_normal (treeop1),
8532 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8533 else
8534 op1 = expand_normal (treeop1);
8535 /* op0 and op1 might still be constant, despite the above
8536 != INTEGER_CST check. Handle it. */
8537 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8538 goto widen_mult_const;
8539 temp = expand_binop (mode, other_optab, op0, op1, target,
8540 unsignedp, OPTAB_LIB_WIDEN);
8541 hipart = gen_highpart (innermode, temp);
8542 htem = expand_mult_highpart_adjust (innermode, hipart,
8543 op0, op1, hipart,
8544 zextend_p);
8545 if (htem != hipart)
8546 emit_move_insn (hipart, htem);
8547 return REDUCE_BIT_FIELD (temp);
8551 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8552 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8553 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8554 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8556 case FMA_EXPR:
8558 optab opt = fma_optab;
8559 gimple def0, def2;
8561 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8562 call. */
8563 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8565 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8566 tree call_expr;
8568 gcc_assert (fn != NULL_TREE);
8569 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8570 return expand_builtin (call_expr, target, subtarget, mode, false);
8573 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8574 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8576 op0 = op2 = NULL;
8578 if (def0 && def2
8579 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8581 opt = fnms_optab;
8582 op0 = expand_normal (gimple_assign_rhs1 (def0));
8583 op2 = expand_normal (gimple_assign_rhs1 (def2));
8585 else if (def0
8586 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8588 opt = fnma_optab;
8589 op0 = expand_normal (gimple_assign_rhs1 (def0));
8591 else if (def2
8592 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8594 opt = fms_optab;
8595 op2 = expand_normal (gimple_assign_rhs1 (def2));
8598 if (op0 == NULL)
8599 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8600 if (op2 == NULL)
8601 op2 = expand_normal (treeop2);
8602 op1 = expand_normal (treeop1);
8604 return expand_ternary_op (TYPE_MODE (type), opt,
8605 op0, op1, op2, target, 0);
8608 case MULT_EXPR:
8609 /* If this is a fixed-point operation, then we cannot use the code
8610 below because "expand_mult" doesn't support sat/no-sat fixed-point
8611 multiplications. */
8612 if (ALL_FIXED_POINT_MODE_P (mode))
8613 goto binop;
8615 /* If first operand is constant, swap them.
8616 Thus the following special case checks need only
8617 check the second operand. */
8618 if (TREE_CODE (treeop0) == INTEGER_CST)
8620 tree t1 = treeop0;
8621 treeop0 = treeop1;
8622 treeop1 = t1;
8625 /* Attempt to return something suitable for generating an
8626 indexed address, for machines that support that. */
8628 if (modifier == EXPAND_SUM && mode == ptr_mode
8629 && tree_fits_shwi_p (treeop1))
8631 tree exp1 = treeop1;
8633 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8634 EXPAND_SUM);
8636 if (!REG_P (op0))
8637 op0 = force_operand (op0, NULL_RTX);
8638 if (!REG_P (op0))
8639 op0 = copy_to_mode_reg (mode, op0);
8641 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8642 gen_int_mode (tree_to_shwi (exp1),
8643 TYPE_MODE (TREE_TYPE (exp1)))));
8646 if (modifier == EXPAND_STACK_PARM)
8647 target = 0;
8649 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8650 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8652 case TRUNC_DIV_EXPR:
8653 case FLOOR_DIV_EXPR:
8654 case CEIL_DIV_EXPR:
8655 case ROUND_DIV_EXPR:
8656 case EXACT_DIV_EXPR:
8657 /* If this is a fixed-point operation, then we cannot use the code
8658 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8659 divisions. */
8660 if (ALL_FIXED_POINT_MODE_P (mode))
8661 goto binop;
8663 if (modifier == EXPAND_STACK_PARM)
8664 target = 0;
8665 /* Possible optimization: compute the dividend with EXPAND_SUM
8666 then if the divisor is constant can optimize the case
8667 where some terms of the dividend have coeffs divisible by it. */
8668 expand_operands (treeop0, treeop1,
8669 subtarget, &op0, &op1, EXPAND_NORMAL);
8670 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8672 case RDIV_EXPR:
8673 goto binop;
8675 case MULT_HIGHPART_EXPR:
8676 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8677 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8678 gcc_assert (temp);
8679 return temp;
8681 case TRUNC_MOD_EXPR:
8682 case FLOOR_MOD_EXPR:
8683 case CEIL_MOD_EXPR:
8684 case ROUND_MOD_EXPR:
8685 if (modifier == EXPAND_STACK_PARM)
8686 target = 0;
8687 expand_operands (treeop0, treeop1,
8688 subtarget, &op0, &op1, EXPAND_NORMAL);
8689 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8691 case FIXED_CONVERT_EXPR:
8692 op0 = expand_normal (treeop0);
8693 if (target == 0 || modifier == EXPAND_STACK_PARM)
8694 target = gen_reg_rtx (mode);
8696 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8697 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8698 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8699 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8700 else
8701 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8702 return target;
8704 case FIX_TRUNC_EXPR:
8705 op0 = expand_normal (treeop0);
8706 if (target == 0 || modifier == EXPAND_STACK_PARM)
8707 target = gen_reg_rtx (mode);
8708 expand_fix (target, op0, unsignedp);
8709 return target;
8711 case FLOAT_EXPR:
8712 op0 = expand_normal (treeop0);
8713 if (target == 0 || modifier == EXPAND_STACK_PARM)
8714 target = gen_reg_rtx (mode);
8715 /* expand_float can't figure out what to do if FROM has VOIDmode.
8716 So give it the correct mode. With -O, cse will optimize this. */
8717 if (GET_MODE (op0) == VOIDmode)
8718 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8719 op0);
8720 expand_float (target, op0,
8721 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8722 return target;
8724 case NEGATE_EXPR:
8725 op0 = expand_expr (treeop0, subtarget,
8726 VOIDmode, EXPAND_NORMAL);
8727 if (modifier == EXPAND_STACK_PARM)
8728 target = 0;
8729 temp = expand_unop (mode,
8730 optab_for_tree_code (NEGATE_EXPR, type,
8731 optab_default),
8732 op0, target, 0);
8733 gcc_assert (temp);
8734 return REDUCE_BIT_FIELD (temp);
8736 case ABS_EXPR:
8737 op0 = expand_expr (treeop0, subtarget,
8738 VOIDmode, EXPAND_NORMAL);
8739 if (modifier == EXPAND_STACK_PARM)
8740 target = 0;
8742 /* ABS_EXPR is not valid for complex arguments. */
8743 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8744 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8746 /* Unsigned abs is simply the operand. Testing here means we don't
8747 risk generating incorrect code below. */
8748 if (TYPE_UNSIGNED (type))
8749 return op0;
8751 return expand_abs (mode, op0, target, unsignedp,
8752 safe_from_p (target, treeop0, 1));
8754 case MAX_EXPR:
8755 case MIN_EXPR:
8756 target = original_target;
8757 if (target == 0
8758 || modifier == EXPAND_STACK_PARM
8759 || (MEM_P (target) && MEM_VOLATILE_P (target))
8760 || GET_MODE (target) != mode
8761 || (REG_P (target)
8762 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8763 target = gen_reg_rtx (mode);
8764 expand_operands (treeop0, treeop1,
8765 target, &op0, &op1, EXPAND_NORMAL);
8767 /* First try to do it with a special MIN or MAX instruction.
8768 If that does not win, use a conditional jump to select the proper
8769 value. */
8770 this_optab = optab_for_tree_code (code, type, optab_default);
8771 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8772 OPTAB_WIDEN);
8773 if (temp != 0)
8774 return temp;
8776 /* At this point, a MEM target is no longer useful; we will get better
8777 code without it. */
8779 if (! REG_P (target))
8780 target = gen_reg_rtx (mode);
8782 /* If op1 was placed in target, swap op0 and op1. */
8783 if (target != op0 && target == op1)
8785 temp = op0;
8786 op0 = op1;
8787 op1 = temp;
8790 /* We generate better code and avoid problems with op1 mentioning
8791 target by forcing op1 into a pseudo if it isn't a constant. */
8792 if (! CONSTANT_P (op1))
8793 op1 = force_reg (mode, op1);
8796 enum rtx_code comparison_code;
8797 rtx cmpop1 = op1;
8799 if (code == MAX_EXPR)
8800 comparison_code = unsignedp ? GEU : GE;
8801 else
8802 comparison_code = unsignedp ? LEU : LE;
8804 /* Canonicalize to comparisons against 0. */
8805 if (op1 == const1_rtx)
8807 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8808 or (a != 0 ? a : 1) for unsigned.
8809 For MIN we are safe converting (a <= 1 ? a : 1)
8810 into (a <= 0 ? a : 1) */
8811 cmpop1 = const0_rtx;
8812 if (code == MAX_EXPR)
8813 comparison_code = unsignedp ? NE : GT;
8815 if (op1 == constm1_rtx && !unsignedp)
8817 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8818 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8819 cmpop1 = const0_rtx;
8820 if (code == MIN_EXPR)
8821 comparison_code = LT;
8823 #ifdef HAVE_conditional_move
8824 /* Use a conditional move if possible. */
8825 if (can_conditionally_move_p (mode))
8827 rtx insn;
8829 start_sequence ();
8831 /* Try to emit the conditional move. */
8832 insn = emit_conditional_move (target, comparison_code,
8833 op0, cmpop1, mode,
8834 op0, op1, mode,
8835 unsignedp);
8837 /* If we could do the conditional move, emit the sequence,
8838 and return. */
8839 if (insn)
8841 rtx_insn *seq = get_insns ();
8842 end_sequence ();
8843 emit_insn (seq);
8844 return target;
8847 /* Otherwise discard the sequence and fall back to code with
8848 branches. */
8849 end_sequence ();
8851 #endif
8852 if (target != op0)
8853 emit_move_insn (target, op0);
8855 temp = gen_label_rtx ();
8856 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8857 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8858 -1);
8860 emit_move_insn (target, op1);
8861 emit_label (temp);
8862 return target;
8864 case BIT_NOT_EXPR:
8865 op0 = expand_expr (treeop0, subtarget,
8866 VOIDmode, EXPAND_NORMAL);
8867 if (modifier == EXPAND_STACK_PARM)
8868 target = 0;
8869 /* In case we have to reduce the result to bitfield precision
8870 for unsigned bitfield expand this as XOR with a proper constant
8871 instead. */
8872 if (reduce_bit_field && TYPE_UNSIGNED (type))
8874 wide_int mask = wi::mask (TYPE_PRECISION (type),
8875 false, GET_MODE_PRECISION (mode));
8877 temp = expand_binop (mode, xor_optab, op0,
8878 immed_wide_int_const (mask, mode),
8879 target, 1, OPTAB_LIB_WIDEN);
8881 else
8882 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8883 gcc_assert (temp);
8884 return temp;
8886 /* ??? Can optimize bitwise operations with one arg constant.
8887 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8888 and (a bitwise1 b) bitwise2 b (etc)
8889 but that is probably not worth while. */
8891 case BIT_AND_EXPR:
8892 case BIT_IOR_EXPR:
8893 case BIT_XOR_EXPR:
8894 goto binop;
8896 case LROTATE_EXPR:
8897 case RROTATE_EXPR:
8898 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8899 || (GET_MODE_PRECISION (TYPE_MODE (type))
8900 == TYPE_PRECISION (type)));
8901 /* fall through */
8903 case LSHIFT_EXPR:
8904 case RSHIFT_EXPR:
8905 /* If this is a fixed-point operation, then we cannot use the code
8906 below because "expand_shift" doesn't support sat/no-sat fixed-point
8907 shifts. */
8908 if (ALL_FIXED_POINT_MODE_P (mode))
8909 goto binop;
8911 if (! safe_from_p (subtarget, treeop1, 1))
8912 subtarget = 0;
8913 if (modifier == EXPAND_STACK_PARM)
8914 target = 0;
8915 op0 = expand_expr (treeop0, subtarget,
8916 VOIDmode, EXPAND_NORMAL);
8917 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8918 unsignedp);
8919 if (code == LSHIFT_EXPR)
8920 temp = REDUCE_BIT_FIELD (temp);
8921 return temp;
8923 /* Could determine the answer when only additive constants differ. Also,
8924 the addition of one can be handled by changing the condition. */
8925 case LT_EXPR:
8926 case LE_EXPR:
8927 case GT_EXPR:
8928 case GE_EXPR:
8929 case EQ_EXPR:
8930 case NE_EXPR:
8931 case UNORDERED_EXPR:
8932 case ORDERED_EXPR:
8933 case UNLT_EXPR:
8934 case UNLE_EXPR:
8935 case UNGT_EXPR:
8936 case UNGE_EXPR:
8937 case UNEQ_EXPR:
8938 case LTGT_EXPR:
8939 temp = do_store_flag (ops,
8940 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8941 tmode != VOIDmode ? tmode : mode);
8942 if (temp)
8943 return temp;
8945 /* Use a compare and a jump for BLKmode comparisons, or for function
8946 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8948 if ((target == 0
8949 || modifier == EXPAND_STACK_PARM
8950 || ! safe_from_p (target, treeop0, 1)
8951 || ! safe_from_p (target, treeop1, 1)
8952 /* Make sure we don't have a hard reg (such as function's return
8953 value) live across basic blocks, if not optimizing. */
8954 || (!optimize && REG_P (target)
8955 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8956 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8958 emit_move_insn (target, const0_rtx);
8960 op1 = gen_label_rtx ();
8961 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8963 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8964 emit_move_insn (target, constm1_rtx);
8965 else
8966 emit_move_insn (target, const1_rtx);
8968 emit_label (op1);
8969 return target;
8971 case COMPLEX_EXPR:
8972 /* Get the rtx code of the operands. */
8973 op0 = expand_normal (treeop0);
8974 op1 = expand_normal (treeop1);
8976 if (!target)
8977 target = gen_reg_rtx (TYPE_MODE (type));
8978 else
8979 /* If target overlaps with op1, then either we need to force
8980 op1 into a pseudo (if target also overlaps with op0),
8981 or write the complex parts in reverse order. */
8982 switch (GET_CODE (target))
8984 case CONCAT:
8985 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
8987 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
8989 complex_expr_force_op1:
8990 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
8991 emit_move_insn (temp, op1);
8992 op1 = temp;
8993 break;
8995 complex_expr_swap_order:
8996 /* Move the imaginary (op1) and real (op0) parts to their
8997 location. */
8998 write_complex_part (target, op1, true);
8999 write_complex_part (target, op0, false);
9001 return target;
9003 break;
9004 case MEM:
9005 temp = adjust_address_nv (target,
9006 GET_MODE_INNER (GET_MODE (target)), 0);
9007 if (reg_overlap_mentioned_p (temp, op1))
9009 machine_mode imode = GET_MODE_INNER (GET_MODE (target));
9010 temp = adjust_address_nv (target, imode,
9011 GET_MODE_SIZE (imode));
9012 if (reg_overlap_mentioned_p (temp, op0))
9013 goto complex_expr_force_op1;
9014 goto complex_expr_swap_order;
9016 break;
9017 default:
9018 if (reg_overlap_mentioned_p (target, op1))
9020 if (reg_overlap_mentioned_p (target, op0))
9021 goto complex_expr_force_op1;
9022 goto complex_expr_swap_order;
9024 break;
9027 /* Move the real (op0) and imaginary (op1) parts to their location. */
9028 write_complex_part (target, op0, false);
9029 write_complex_part (target, op1, true);
9031 return target;
9033 case WIDEN_SUM_EXPR:
9035 tree oprnd0 = treeop0;
9036 tree oprnd1 = treeop1;
9038 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9039 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
9040 target, unsignedp);
9041 return target;
9044 case REDUC_MAX_EXPR:
9045 case REDUC_MIN_EXPR:
9046 case REDUC_PLUS_EXPR:
9048 op0 = expand_normal (treeop0);
9049 this_optab = optab_for_tree_code (code, type, optab_default);
9050 machine_mode vec_mode = TYPE_MODE (TREE_TYPE (treeop0));
9052 if (optab_handler (this_optab, vec_mode) != CODE_FOR_nothing)
9054 struct expand_operand ops[2];
9055 enum insn_code icode = optab_handler (this_optab, vec_mode);
9057 create_output_operand (&ops[0], target, mode);
9058 create_input_operand (&ops[1], op0, vec_mode);
9059 if (maybe_expand_insn (icode, 2, ops))
9061 target = ops[0].value;
9062 if (GET_MODE (target) != mode)
9063 return gen_lowpart (tmode, target);
9064 return target;
9067 /* Fall back to optab with vector result, and then extract scalar. */
9068 this_optab = scalar_reduc_to_vector (this_optab, type);
9069 temp = expand_unop (vec_mode, this_optab, op0, NULL_RTX, unsignedp);
9070 gcc_assert (temp);
9071 /* The tree code produces a scalar result, but (somewhat by convention)
9072 the optab produces a vector with the result in element 0 if
9073 little-endian, or element N-1 if big-endian. So pull the scalar
9074 result out of that element. */
9075 int index = BYTES_BIG_ENDIAN ? GET_MODE_NUNITS (vec_mode) - 1 : 0;
9076 int bitsize = GET_MODE_BITSIZE (GET_MODE_INNER (vec_mode));
9077 temp = extract_bit_field (temp, bitsize, bitsize * index, unsignedp,
9078 target, mode, mode);
9079 gcc_assert (temp);
9080 return temp;
9083 case VEC_RSHIFT_EXPR:
9085 target = expand_vec_shift_expr (ops, target);
9086 return target;
9089 case VEC_UNPACK_HI_EXPR:
9090 case VEC_UNPACK_LO_EXPR:
9092 op0 = expand_normal (treeop0);
9093 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9094 target, unsignedp);
9095 gcc_assert (temp);
9096 return temp;
9099 case VEC_UNPACK_FLOAT_HI_EXPR:
9100 case VEC_UNPACK_FLOAT_LO_EXPR:
9102 op0 = expand_normal (treeop0);
9103 /* The signedness is determined from input operand. */
9104 temp = expand_widen_pattern_expr
9105 (ops, op0, NULL_RTX, NULL_RTX,
9106 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9108 gcc_assert (temp);
9109 return temp;
9112 case VEC_WIDEN_MULT_HI_EXPR:
9113 case VEC_WIDEN_MULT_LO_EXPR:
9114 case VEC_WIDEN_MULT_EVEN_EXPR:
9115 case VEC_WIDEN_MULT_ODD_EXPR:
9116 case VEC_WIDEN_LSHIFT_HI_EXPR:
9117 case VEC_WIDEN_LSHIFT_LO_EXPR:
9118 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9119 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9120 target, unsignedp);
9121 gcc_assert (target);
9122 return target;
9124 case VEC_PACK_TRUNC_EXPR:
9125 case VEC_PACK_SAT_EXPR:
9126 case VEC_PACK_FIX_TRUNC_EXPR:
9127 mode = TYPE_MODE (TREE_TYPE (treeop0));
9128 goto binop;
9130 case VEC_PERM_EXPR:
9131 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9132 op2 = expand_normal (treeop2);
9134 /* Careful here: if the target doesn't support integral vector modes,
9135 a constant selection vector could wind up smooshed into a normal
9136 integral constant. */
9137 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9139 tree sel_type = TREE_TYPE (treeop2);
9140 machine_mode vmode
9141 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9142 TYPE_VECTOR_SUBPARTS (sel_type));
9143 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9144 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9145 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9147 else
9148 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9150 temp = expand_vec_perm (mode, op0, op1, op2, target);
9151 gcc_assert (temp);
9152 return temp;
9154 case DOT_PROD_EXPR:
9156 tree oprnd0 = treeop0;
9157 tree oprnd1 = treeop1;
9158 tree oprnd2 = treeop2;
9159 rtx op2;
9161 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9162 op2 = expand_normal (oprnd2);
9163 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9164 target, unsignedp);
9165 return target;
9168 case SAD_EXPR:
9170 tree oprnd0 = treeop0;
9171 tree oprnd1 = treeop1;
9172 tree oprnd2 = treeop2;
9173 rtx op2;
9175 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9176 op2 = expand_normal (oprnd2);
9177 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9178 target, unsignedp);
9179 return target;
9182 case REALIGN_LOAD_EXPR:
9184 tree oprnd0 = treeop0;
9185 tree oprnd1 = treeop1;
9186 tree oprnd2 = treeop2;
9187 rtx op2;
9189 this_optab = optab_for_tree_code (code, type, optab_default);
9190 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9191 op2 = expand_normal (oprnd2);
9192 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9193 target, unsignedp);
9194 gcc_assert (temp);
9195 return temp;
9198 case COND_EXPR:
9199 /* A COND_EXPR with its type being VOID_TYPE represents a
9200 conditional jump and is handled in
9201 expand_gimple_cond_expr. */
9202 gcc_assert (!VOID_TYPE_P (type));
9204 /* Note that COND_EXPRs whose type is a structure or union
9205 are required to be constructed to contain assignments of
9206 a temporary variable, so that we can evaluate them here
9207 for side effect only. If type is void, we must do likewise. */
9209 gcc_assert (!TREE_ADDRESSABLE (type)
9210 && !ignore
9211 && TREE_TYPE (treeop1) != void_type_node
9212 && TREE_TYPE (treeop2) != void_type_node);
9214 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9215 if (temp)
9216 return temp;
9218 /* If we are not to produce a result, we have no target. Otherwise,
9219 if a target was specified use it; it will not be used as an
9220 intermediate target unless it is safe. If no target, use a
9221 temporary. */
9223 if (modifier != EXPAND_STACK_PARM
9224 && original_target
9225 && safe_from_p (original_target, treeop0, 1)
9226 && GET_MODE (original_target) == mode
9227 && !MEM_P (original_target))
9228 temp = original_target;
9229 else
9230 temp = assign_temp (type, 0, 1);
9232 do_pending_stack_adjust ();
9233 NO_DEFER_POP;
9234 op0 = gen_label_rtx ();
9235 op1 = gen_label_rtx ();
9236 jumpifnot (treeop0, op0, -1);
9237 store_expr (treeop1, temp,
9238 modifier == EXPAND_STACK_PARM,
9239 false);
9241 emit_jump_insn (gen_jump (op1));
9242 emit_barrier ();
9243 emit_label (op0);
9244 store_expr (treeop2, temp,
9245 modifier == EXPAND_STACK_PARM,
9246 false);
9248 emit_label (op1);
9249 OK_DEFER_POP;
9250 return temp;
9252 case VEC_COND_EXPR:
9253 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9254 return target;
9256 default:
9257 gcc_unreachable ();
9260 /* Here to do an ordinary binary operator. */
9261 binop:
9262 expand_operands (treeop0, treeop1,
9263 subtarget, &op0, &op1, EXPAND_NORMAL);
9264 binop2:
9265 this_optab = optab_for_tree_code (code, type, optab_default);
9266 binop3:
9267 if (modifier == EXPAND_STACK_PARM)
9268 target = 0;
9269 temp = expand_binop (mode, this_optab, op0, op1, target,
9270 unsignedp, OPTAB_LIB_WIDEN);
9271 gcc_assert (temp);
9272 /* Bitwise operations do not need bitfield reduction as we expect their
9273 operands being properly truncated. */
9274 if (code == BIT_XOR_EXPR
9275 || code == BIT_AND_EXPR
9276 || code == BIT_IOR_EXPR)
9277 return temp;
9278 return REDUCE_BIT_FIELD (temp);
9280 #undef REDUCE_BIT_FIELD
9283 /* Return TRUE if expression STMT is suitable for replacement.
9284 Never consider memory loads as replaceable, because those don't ever lead
9285 into constant expressions. */
9287 static bool
9288 stmt_is_replaceable_p (gimple stmt)
9290 if (ssa_is_replaceable_p (stmt))
9292 /* Don't move around loads. */
9293 if (!gimple_assign_single_p (stmt)
9294 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9295 return true;
9297 return false;
9301 expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
9302 enum expand_modifier modifier, rtx *alt_rtl,
9303 bool inner_reference_p)
9305 rtx op0, op1, temp, decl_rtl;
9306 tree type;
9307 int unsignedp;
9308 machine_mode mode;
9309 enum tree_code code = TREE_CODE (exp);
9310 rtx subtarget, original_target;
9311 int ignore;
9312 tree context;
9313 bool reduce_bit_field;
9314 location_t loc = EXPR_LOCATION (exp);
9315 struct separate_ops ops;
9316 tree treeop0, treeop1, treeop2;
9317 tree ssa_name = NULL_TREE;
9318 gimple g;
9320 type = TREE_TYPE (exp);
9321 mode = TYPE_MODE (type);
9322 unsignedp = TYPE_UNSIGNED (type);
9324 treeop0 = treeop1 = treeop2 = NULL_TREE;
9325 if (!VL_EXP_CLASS_P (exp))
9326 switch (TREE_CODE_LENGTH (code))
9328 default:
9329 case 3: treeop2 = TREE_OPERAND (exp, 2);
9330 case 2: treeop1 = TREE_OPERAND (exp, 1);
9331 case 1: treeop0 = TREE_OPERAND (exp, 0);
9332 case 0: break;
9334 ops.code = code;
9335 ops.type = type;
9336 ops.op0 = treeop0;
9337 ops.op1 = treeop1;
9338 ops.op2 = treeop2;
9339 ops.location = loc;
9341 ignore = (target == const0_rtx
9342 || ((CONVERT_EXPR_CODE_P (code)
9343 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9344 && TREE_CODE (type) == VOID_TYPE));
9346 /* An operation in what may be a bit-field type needs the
9347 result to be reduced to the precision of the bit-field type,
9348 which is narrower than that of the type's mode. */
9349 reduce_bit_field = (!ignore
9350 && INTEGRAL_TYPE_P (type)
9351 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9353 /* If we are going to ignore this result, we need only do something
9354 if there is a side-effect somewhere in the expression. If there
9355 is, short-circuit the most common cases here. Note that we must
9356 not call expand_expr with anything but const0_rtx in case this
9357 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9359 if (ignore)
9361 if (! TREE_SIDE_EFFECTS (exp))
9362 return const0_rtx;
9364 /* Ensure we reference a volatile object even if value is ignored, but
9365 don't do this if all we are doing is taking its address. */
9366 if (TREE_THIS_VOLATILE (exp)
9367 && TREE_CODE (exp) != FUNCTION_DECL
9368 && mode != VOIDmode && mode != BLKmode
9369 && modifier != EXPAND_CONST_ADDRESS)
9371 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9372 if (MEM_P (temp))
9373 copy_to_reg (temp);
9374 return const0_rtx;
9377 if (TREE_CODE_CLASS (code) == tcc_unary
9378 || code == BIT_FIELD_REF
9379 || code == COMPONENT_REF
9380 || code == INDIRECT_REF)
9381 return expand_expr (treeop0, const0_rtx, VOIDmode,
9382 modifier);
9384 else if (TREE_CODE_CLASS (code) == tcc_binary
9385 || TREE_CODE_CLASS (code) == tcc_comparison
9386 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9388 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9389 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9390 return const0_rtx;
9393 target = 0;
9396 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9397 target = 0;
9399 /* Use subtarget as the target for operand 0 of a binary operation. */
9400 subtarget = get_subtarget (target);
9401 original_target = target;
9403 switch (code)
9405 case LABEL_DECL:
9407 tree function = decl_function_context (exp);
9409 temp = label_rtx (exp);
9410 temp = gen_rtx_LABEL_REF (Pmode, temp);
9412 if (function != current_function_decl
9413 && function != 0)
9414 LABEL_REF_NONLOCAL_P (temp) = 1;
9416 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9417 return temp;
9420 case SSA_NAME:
9421 /* ??? ivopts calls expander, without any preparation from
9422 out-of-ssa. So fake instructions as if this was an access to the
9423 base variable. This unnecessarily allocates a pseudo, see how we can
9424 reuse it, if partition base vars have it set already. */
9425 if (!currently_expanding_to_rtl)
9427 tree var = SSA_NAME_VAR (exp);
9428 if (var && DECL_RTL_SET_P (var))
9429 return DECL_RTL (var);
9430 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9431 LAST_VIRTUAL_REGISTER + 1);
9434 g = get_gimple_for_ssa_name (exp);
9435 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9436 if (g == NULL
9437 && modifier == EXPAND_INITIALIZER
9438 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9439 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9440 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9441 g = SSA_NAME_DEF_STMT (exp);
9442 if (g)
9444 rtx r;
9445 ops.code = gimple_assign_rhs_code (g);
9446 switch (get_gimple_rhs_class (ops.code))
9448 case GIMPLE_TERNARY_RHS:
9449 ops.op2 = gimple_assign_rhs3 (g);
9450 /* Fallthru */
9451 case GIMPLE_BINARY_RHS:
9452 ops.op1 = gimple_assign_rhs2 (g);
9453 /* Fallthru */
9454 case GIMPLE_UNARY_RHS:
9455 ops.op0 = gimple_assign_rhs1 (g);
9456 ops.type = TREE_TYPE (gimple_assign_lhs (g));
9457 ops.location = gimple_location (g);
9458 r = expand_expr_real_2 (&ops, target, tmode, modifier);
9459 break;
9460 case GIMPLE_SINGLE_RHS:
9462 location_t saved_loc = curr_insn_location ();
9463 set_curr_insn_location (gimple_location (g));
9464 r = expand_expr_real (gimple_assign_rhs1 (g), target,
9465 tmode, modifier, NULL, inner_reference_p);
9466 set_curr_insn_location (saved_loc);
9467 break;
9469 default:
9470 gcc_unreachable ();
9472 if (REG_P (r) && !REG_EXPR (r))
9473 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9474 return r;
9477 ssa_name = exp;
9478 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9479 exp = SSA_NAME_VAR (ssa_name);
9480 goto expand_decl_rtl;
9482 case PARM_DECL:
9483 case VAR_DECL:
9484 /* If a static var's type was incomplete when the decl was written,
9485 but the type is complete now, lay out the decl now. */
9486 if (DECL_SIZE (exp) == 0
9487 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9488 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9489 layout_decl (exp, 0);
9491 /* ... fall through ... */
9493 case FUNCTION_DECL:
9494 case RESULT_DECL:
9495 decl_rtl = DECL_RTL (exp);
9496 expand_decl_rtl:
9497 gcc_assert (decl_rtl);
9498 decl_rtl = copy_rtx (decl_rtl);
9499 /* Record writes to register variables. */
9500 if (modifier == EXPAND_WRITE
9501 && REG_P (decl_rtl)
9502 && HARD_REGISTER_P (decl_rtl))
9503 add_to_hard_reg_set (&crtl->asm_clobbers,
9504 GET_MODE (decl_rtl), REGNO (decl_rtl));
9506 /* Ensure variable marked as used even if it doesn't go through
9507 a parser. If it hasn't be used yet, write out an external
9508 definition. */
9509 TREE_USED (exp) = 1;
9511 /* Show we haven't gotten RTL for this yet. */
9512 temp = 0;
9514 /* Variables inherited from containing functions should have
9515 been lowered by this point. */
9516 context = decl_function_context (exp);
9517 gcc_assert (SCOPE_FILE_SCOPE_P (context)
9518 || context == current_function_decl
9519 || TREE_STATIC (exp)
9520 || DECL_EXTERNAL (exp)
9521 /* ??? C++ creates functions that are not TREE_STATIC. */
9522 || TREE_CODE (exp) == FUNCTION_DECL);
9524 /* This is the case of an array whose size is to be determined
9525 from its initializer, while the initializer is still being parsed.
9526 ??? We aren't parsing while expanding anymore. */
9528 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9529 temp = validize_mem (decl_rtl);
9531 /* If DECL_RTL is memory, we are in the normal case and the
9532 address is not valid, get the address into a register. */
9534 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9536 if (alt_rtl)
9537 *alt_rtl = decl_rtl;
9538 decl_rtl = use_anchored_address (decl_rtl);
9539 if (modifier != EXPAND_CONST_ADDRESS
9540 && modifier != EXPAND_SUM
9541 && !memory_address_addr_space_p (DECL_MODE (exp),
9542 XEXP (decl_rtl, 0),
9543 MEM_ADDR_SPACE (decl_rtl)))
9544 temp = replace_equiv_address (decl_rtl,
9545 copy_rtx (XEXP (decl_rtl, 0)));
9548 /* If we got something, return it. But first, set the alignment
9549 if the address is a register. */
9550 if (temp != 0)
9552 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9553 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9555 return temp;
9558 /* If the mode of DECL_RTL does not match that of the decl,
9559 there are two cases: we are dealing with a BLKmode value
9560 that is returned in a register, or we are dealing with
9561 a promoted value. In the latter case, return a SUBREG
9562 of the wanted mode, but mark it so that we know that it
9563 was already extended. */
9564 if (REG_P (decl_rtl)
9565 && DECL_MODE (exp) != BLKmode
9566 && GET_MODE (decl_rtl) != DECL_MODE (exp))
9568 machine_mode pmode;
9570 /* Get the signedness to be used for this variable. Ensure we get
9571 the same mode we got when the variable was declared. */
9572 if (code == SSA_NAME
9573 && (g = SSA_NAME_DEF_STMT (ssa_name))
9574 && gimple_code (g) == GIMPLE_CALL
9575 && !gimple_call_internal_p (g))
9576 pmode = promote_function_mode (type, mode, &unsignedp,
9577 gimple_call_fntype (g),
9579 else
9580 pmode = promote_decl_mode (exp, &unsignedp);
9581 gcc_assert (GET_MODE (decl_rtl) == pmode);
9583 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9584 SUBREG_PROMOTED_VAR_P (temp) = 1;
9585 SUBREG_PROMOTED_SET (temp, unsignedp);
9586 return temp;
9589 return decl_rtl;
9591 case INTEGER_CST:
9592 /* Given that TYPE_PRECISION (type) is not always equal to
9593 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9594 the former to the latter according to the signedness of the
9595 type. */
9596 temp = immed_wide_int_const (wide_int::from
9597 (exp,
9598 GET_MODE_PRECISION (TYPE_MODE (type)),
9599 TYPE_SIGN (type)),
9600 TYPE_MODE (type));
9601 return temp;
9603 case VECTOR_CST:
9605 tree tmp = NULL_TREE;
9606 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9607 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9608 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9609 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9610 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9611 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9612 return const_vector_from_tree (exp);
9613 if (GET_MODE_CLASS (mode) == MODE_INT)
9615 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9616 if (type_for_mode)
9617 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9619 if (!tmp)
9621 vec<constructor_elt, va_gc> *v;
9622 unsigned i;
9623 vec_alloc (v, VECTOR_CST_NELTS (exp));
9624 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9625 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9626 tmp = build_constructor (type, v);
9628 return expand_expr (tmp, ignore ? const0_rtx : target,
9629 tmode, modifier);
9632 case CONST_DECL:
9633 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9635 case REAL_CST:
9636 /* If optimized, generate immediate CONST_DOUBLE
9637 which will be turned into memory by reload if necessary.
9639 We used to force a register so that loop.c could see it. But
9640 this does not allow gen_* patterns to perform optimizations with
9641 the constants. It also produces two insns in cases like "x = 1.0;".
9642 On most machines, floating-point constants are not permitted in
9643 many insns, so we'd end up copying it to a register in any case.
9645 Now, we do the copying in expand_binop, if appropriate. */
9646 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9647 TYPE_MODE (TREE_TYPE (exp)));
9649 case FIXED_CST:
9650 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9651 TYPE_MODE (TREE_TYPE (exp)));
9653 case COMPLEX_CST:
9654 /* Handle evaluating a complex constant in a CONCAT target. */
9655 if (original_target && GET_CODE (original_target) == CONCAT)
9657 machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9658 rtx rtarg, itarg;
9660 rtarg = XEXP (original_target, 0);
9661 itarg = XEXP (original_target, 1);
9663 /* Move the real and imaginary parts separately. */
9664 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9665 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9667 if (op0 != rtarg)
9668 emit_move_insn (rtarg, op0);
9669 if (op1 != itarg)
9670 emit_move_insn (itarg, op1);
9672 return original_target;
9675 /* ... fall through ... */
9677 case STRING_CST:
9678 temp = expand_expr_constant (exp, 1, modifier);
9680 /* temp contains a constant address.
9681 On RISC machines where a constant address isn't valid,
9682 make some insns to get that address into a register. */
9683 if (modifier != EXPAND_CONST_ADDRESS
9684 && modifier != EXPAND_INITIALIZER
9685 && modifier != EXPAND_SUM
9686 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9687 MEM_ADDR_SPACE (temp)))
9688 return replace_equiv_address (temp,
9689 copy_rtx (XEXP (temp, 0)));
9690 return temp;
9692 case SAVE_EXPR:
9694 tree val = treeop0;
9695 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
9696 inner_reference_p);
9698 if (!SAVE_EXPR_RESOLVED_P (exp))
9700 /* We can indeed still hit this case, typically via builtin
9701 expanders calling save_expr immediately before expanding
9702 something. Assume this means that we only have to deal
9703 with non-BLKmode values. */
9704 gcc_assert (GET_MODE (ret) != BLKmode);
9706 val = build_decl (curr_insn_location (),
9707 VAR_DECL, NULL, TREE_TYPE (exp));
9708 DECL_ARTIFICIAL (val) = 1;
9709 DECL_IGNORED_P (val) = 1;
9710 treeop0 = val;
9711 TREE_OPERAND (exp, 0) = treeop0;
9712 SAVE_EXPR_RESOLVED_P (exp) = 1;
9714 if (!CONSTANT_P (ret))
9715 ret = copy_to_reg (ret);
9716 SET_DECL_RTL (val, ret);
9719 return ret;
9723 case CONSTRUCTOR:
9724 /* If we don't need the result, just ensure we evaluate any
9725 subexpressions. */
9726 if (ignore)
9728 unsigned HOST_WIDE_INT idx;
9729 tree value;
9731 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9732 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9734 return const0_rtx;
9737 return expand_constructor (exp, target, modifier, false);
9739 case TARGET_MEM_REF:
9741 addr_space_t as
9742 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9743 enum insn_code icode;
9744 unsigned int align;
9746 op0 = addr_for_mem_ref (exp, as, true);
9747 op0 = memory_address_addr_space (mode, op0, as);
9748 temp = gen_rtx_MEM (mode, op0);
9749 set_mem_attributes (temp, exp, 0);
9750 set_mem_addr_space (temp, as);
9751 align = get_object_alignment (exp);
9752 if (modifier != EXPAND_WRITE
9753 && modifier != EXPAND_MEMORY
9754 && mode != BLKmode
9755 && align < GET_MODE_ALIGNMENT (mode)
9756 /* If the target does not have special handling for unaligned
9757 loads of mode then it can use regular moves for them. */
9758 && ((icode = optab_handler (movmisalign_optab, mode))
9759 != CODE_FOR_nothing))
9761 struct expand_operand ops[2];
9763 /* We've already validated the memory, and we're creating a
9764 new pseudo destination. The predicates really can't fail,
9765 nor can the generator. */
9766 create_output_operand (&ops[0], NULL_RTX, mode);
9767 create_fixed_operand (&ops[1], temp);
9768 expand_insn (icode, 2, ops);
9769 temp = ops[0].value;
9771 return temp;
9774 case MEM_REF:
9776 addr_space_t as
9777 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9778 machine_mode address_mode;
9779 tree base = TREE_OPERAND (exp, 0);
9780 gimple def_stmt;
9781 enum insn_code icode;
9782 unsigned align;
9783 /* Handle expansion of non-aliased memory with non-BLKmode. That
9784 might end up in a register. */
9785 if (mem_ref_refers_to_non_mem_p (exp))
9787 HOST_WIDE_INT offset = mem_ref_offset (exp).to_short_addr ();
9788 base = TREE_OPERAND (base, 0);
9789 if (offset == 0
9790 && tree_fits_uhwi_p (TYPE_SIZE (type))
9791 && (GET_MODE_BITSIZE (DECL_MODE (base))
9792 == tree_to_uhwi (TYPE_SIZE (type))))
9793 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
9794 target, tmode, modifier);
9795 if (TYPE_MODE (type) == BLKmode)
9797 temp = assign_stack_temp (DECL_MODE (base),
9798 GET_MODE_SIZE (DECL_MODE (base)));
9799 store_expr (base, temp, 0, false);
9800 temp = adjust_address (temp, BLKmode, offset);
9801 set_mem_size (temp, int_size_in_bytes (type));
9802 return temp;
9804 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
9805 bitsize_int (offset * BITS_PER_UNIT));
9806 return expand_expr (exp, target, tmode, modifier);
9808 address_mode = targetm.addr_space.address_mode (as);
9809 base = TREE_OPERAND (exp, 0);
9810 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9812 tree mask = gimple_assign_rhs2 (def_stmt);
9813 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9814 gimple_assign_rhs1 (def_stmt), mask);
9815 TREE_OPERAND (exp, 0) = base;
9817 align = get_object_alignment (exp);
9818 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9819 op0 = memory_address_addr_space (mode, op0, as);
9820 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9822 rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
9823 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9824 op0 = memory_address_addr_space (mode, op0, as);
9826 temp = gen_rtx_MEM (mode, op0);
9827 set_mem_attributes (temp, exp, 0);
9828 set_mem_addr_space (temp, as);
9829 if (TREE_THIS_VOLATILE (exp))
9830 MEM_VOLATILE_P (temp) = 1;
9831 if (modifier != EXPAND_WRITE
9832 && modifier != EXPAND_MEMORY
9833 && !inner_reference_p
9834 && mode != BLKmode
9835 && align < GET_MODE_ALIGNMENT (mode))
9837 if ((icode = optab_handler (movmisalign_optab, mode))
9838 != CODE_FOR_nothing)
9840 struct expand_operand ops[2];
9842 /* We've already validated the memory, and we're creating a
9843 new pseudo destination. The predicates really can't fail,
9844 nor can the generator. */
9845 create_output_operand (&ops[0], NULL_RTX, mode);
9846 create_fixed_operand (&ops[1], temp);
9847 expand_insn (icode, 2, ops);
9848 temp = ops[0].value;
9850 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9851 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9852 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9853 (modifier == EXPAND_STACK_PARM
9854 ? NULL_RTX : target),
9855 mode, mode);
9857 return temp;
9860 case ARRAY_REF:
9863 tree array = treeop0;
9864 tree index = treeop1;
9865 tree init;
9867 /* Fold an expression like: "foo"[2].
9868 This is not done in fold so it won't happen inside &.
9869 Don't fold if this is for wide characters since it's too
9870 difficult to do correctly and this is a very rare case. */
9872 if (modifier != EXPAND_CONST_ADDRESS
9873 && modifier != EXPAND_INITIALIZER
9874 && modifier != EXPAND_MEMORY)
9876 tree t = fold_read_from_constant_string (exp);
9878 if (t)
9879 return expand_expr (t, target, tmode, modifier);
9882 /* If this is a constant index into a constant array,
9883 just get the value from the array. Handle both the cases when
9884 we have an explicit constructor and when our operand is a variable
9885 that was declared const. */
9887 if (modifier != EXPAND_CONST_ADDRESS
9888 && modifier != EXPAND_INITIALIZER
9889 && modifier != EXPAND_MEMORY
9890 && TREE_CODE (array) == CONSTRUCTOR
9891 && ! TREE_SIDE_EFFECTS (array)
9892 && TREE_CODE (index) == INTEGER_CST)
9894 unsigned HOST_WIDE_INT ix;
9895 tree field, value;
9897 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9898 field, value)
9899 if (tree_int_cst_equal (field, index))
9901 if (!TREE_SIDE_EFFECTS (value))
9902 return expand_expr (fold (value), target, tmode, modifier);
9903 break;
9907 else if (optimize >= 1
9908 && modifier != EXPAND_CONST_ADDRESS
9909 && modifier != EXPAND_INITIALIZER
9910 && modifier != EXPAND_MEMORY
9911 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9912 && TREE_CODE (index) == INTEGER_CST
9913 && (TREE_CODE (array) == VAR_DECL
9914 || TREE_CODE (array) == CONST_DECL)
9915 && (init = ctor_for_folding (array)) != error_mark_node)
9917 if (init == NULL_TREE)
9919 tree value = build_zero_cst (type);
9920 if (TREE_CODE (value) == CONSTRUCTOR)
9922 /* If VALUE is a CONSTRUCTOR, this optimization is only
9923 useful if this doesn't store the CONSTRUCTOR into
9924 memory. If it does, it is more efficient to just
9925 load the data from the array directly. */
9926 rtx ret = expand_constructor (value, target,
9927 modifier, true);
9928 if (ret == NULL_RTX)
9929 value = NULL_TREE;
9932 if (value)
9933 return expand_expr (value, target, tmode, modifier);
9935 else if (TREE_CODE (init) == CONSTRUCTOR)
9937 unsigned HOST_WIDE_INT ix;
9938 tree field, value;
9940 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
9941 field, value)
9942 if (tree_int_cst_equal (field, index))
9944 if (TREE_SIDE_EFFECTS (value))
9945 break;
9947 if (TREE_CODE (value) == CONSTRUCTOR)
9949 /* If VALUE is a CONSTRUCTOR, this
9950 optimization is only useful if
9951 this doesn't store the CONSTRUCTOR
9952 into memory. If it does, it is more
9953 efficient to just load the data from
9954 the array directly. */
9955 rtx ret = expand_constructor (value, target,
9956 modifier, true);
9957 if (ret == NULL_RTX)
9958 break;
9961 return
9962 expand_expr (fold (value), target, tmode, modifier);
9965 else if (TREE_CODE (init) == STRING_CST)
9967 tree low_bound = array_ref_low_bound (exp);
9968 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
9970 /* Optimize the special case of a zero lower bound.
9972 We convert the lower bound to sizetype to avoid problems
9973 with constant folding. E.g. suppose the lower bound is
9974 1 and its mode is QI. Without the conversion
9975 (ARRAY + (INDEX - (unsigned char)1))
9976 becomes
9977 (ARRAY + (-(unsigned char)1) + INDEX)
9978 which becomes
9979 (ARRAY + 255 + INDEX). Oops! */
9980 if (!integer_zerop (low_bound))
9981 index1 = size_diffop_loc (loc, index1,
9982 fold_convert_loc (loc, sizetype,
9983 low_bound));
9985 if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
9987 tree type = TREE_TYPE (TREE_TYPE (init));
9988 machine_mode mode = TYPE_MODE (type);
9990 if (GET_MODE_CLASS (mode) == MODE_INT
9991 && GET_MODE_SIZE (mode) == 1)
9992 return gen_int_mode (TREE_STRING_POINTER (init)
9993 [TREE_INT_CST_LOW (index1)],
9994 mode);
9999 goto normal_inner_ref;
10001 case COMPONENT_REF:
10002 /* If the operand is a CONSTRUCTOR, we can just extract the
10003 appropriate field if it is present. */
10004 if (TREE_CODE (treeop0) == CONSTRUCTOR)
10006 unsigned HOST_WIDE_INT idx;
10007 tree field, value;
10009 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
10010 idx, field, value)
10011 if (field == treeop1
10012 /* We can normally use the value of the field in the
10013 CONSTRUCTOR. However, if this is a bitfield in
10014 an integral mode that we can fit in a HOST_WIDE_INT,
10015 we must mask only the number of bits in the bitfield,
10016 since this is done implicitly by the constructor. If
10017 the bitfield does not meet either of those conditions,
10018 we can't do this optimization. */
10019 && (! DECL_BIT_FIELD (field)
10020 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
10021 && (GET_MODE_PRECISION (DECL_MODE (field))
10022 <= HOST_BITS_PER_WIDE_INT))))
10024 if (DECL_BIT_FIELD (field)
10025 && modifier == EXPAND_STACK_PARM)
10026 target = 0;
10027 op0 = expand_expr (value, target, tmode, modifier);
10028 if (DECL_BIT_FIELD (field))
10030 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
10031 machine_mode imode = TYPE_MODE (TREE_TYPE (field));
10033 if (TYPE_UNSIGNED (TREE_TYPE (field)))
10035 op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
10036 imode);
10037 op0 = expand_and (imode, op0, op1, target);
10039 else
10041 int count = GET_MODE_PRECISION (imode) - bitsize;
10043 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
10044 target, 0);
10045 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
10046 target, 0);
10050 return op0;
10053 goto normal_inner_ref;
10055 case BIT_FIELD_REF:
10056 case ARRAY_RANGE_REF:
10057 normal_inner_ref:
10059 machine_mode mode1, mode2;
10060 HOST_WIDE_INT bitsize, bitpos;
10061 tree offset;
10062 int volatilep = 0, must_force_mem;
10063 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
10064 &mode1, &unsignedp, &volatilep, true);
10065 rtx orig_op0, memloc;
10066 bool mem_attrs_from_type = false;
10068 /* If we got back the original object, something is wrong. Perhaps
10069 we are evaluating an expression too early. In any event, don't
10070 infinitely recurse. */
10071 gcc_assert (tem != exp);
10073 /* If TEM's type is a union of variable size, pass TARGET to the inner
10074 computation, since it will need a temporary and TARGET is known
10075 to have to do. This occurs in unchecked conversion in Ada. */
10076 orig_op0 = op0
10077 = expand_expr_real (tem,
10078 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10079 && COMPLETE_TYPE_P (TREE_TYPE (tem))
10080 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10081 != INTEGER_CST)
10082 && modifier != EXPAND_STACK_PARM
10083 ? target : NULL_RTX),
10084 VOIDmode,
10085 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10086 NULL, true);
10088 /* If the field has a mode, we want to access it in the
10089 field's mode, not the computed mode.
10090 If a MEM has VOIDmode (external with incomplete type),
10091 use BLKmode for it instead. */
10092 if (MEM_P (op0))
10094 if (mode1 != VOIDmode)
10095 op0 = adjust_address (op0, mode1, 0);
10096 else if (GET_MODE (op0) == VOIDmode)
10097 op0 = adjust_address (op0, BLKmode, 0);
10100 mode2
10101 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10103 /* If we have either an offset, a BLKmode result, or a reference
10104 outside the underlying object, we must force it to memory.
10105 Such a case can occur in Ada if we have unchecked conversion
10106 of an expression from a scalar type to an aggregate type or
10107 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10108 passed a partially uninitialized object or a view-conversion
10109 to a larger size. */
10110 must_force_mem = (offset
10111 || mode1 == BLKmode
10112 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
10114 /* Handle CONCAT first. */
10115 if (GET_CODE (op0) == CONCAT && !must_force_mem)
10117 if (bitpos == 0
10118 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
10119 return op0;
10120 if (bitpos == 0
10121 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10122 && bitsize)
10124 op0 = XEXP (op0, 0);
10125 mode2 = GET_MODE (op0);
10127 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10128 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
10129 && bitpos
10130 && bitsize)
10132 op0 = XEXP (op0, 1);
10133 bitpos = 0;
10134 mode2 = GET_MODE (op0);
10136 else
10137 /* Otherwise force into memory. */
10138 must_force_mem = 1;
10141 /* If this is a constant, put it in a register if it is a legitimate
10142 constant and we don't need a memory reference. */
10143 if (CONSTANT_P (op0)
10144 && mode2 != BLKmode
10145 && targetm.legitimate_constant_p (mode2, op0)
10146 && !must_force_mem)
10147 op0 = force_reg (mode2, op0);
10149 /* Otherwise, if this is a constant, try to force it to the constant
10150 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10151 is a legitimate constant. */
10152 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10153 op0 = validize_mem (memloc);
10155 /* Otherwise, if this is a constant or the object is not in memory
10156 and need be, put it there. */
10157 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10159 memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10160 emit_move_insn (memloc, op0);
10161 op0 = memloc;
10162 mem_attrs_from_type = true;
10165 if (offset)
10167 machine_mode address_mode;
10168 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10169 EXPAND_SUM);
10171 gcc_assert (MEM_P (op0));
10173 address_mode = get_address_mode (op0);
10174 if (GET_MODE (offset_rtx) != address_mode)
10175 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10177 /* See the comment in expand_assignment for the rationale. */
10178 if (mode1 != VOIDmode
10179 && bitpos != 0
10180 && bitsize > 0
10181 && (bitpos % bitsize) == 0
10182 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
10183 && MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
10185 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10186 bitpos = 0;
10189 op0 = offset_address (op0, offset_rtx,
10190 highest_pow2_factor (offset));
10193 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10194 record its alignment as BIGGEST_ALIGNMENT. */
10195 if (MEM_P (op0) && bitpos == 0 && offset != 0
10196 && is_aligning_offset (offset, tem))
10197 set_mem_align (op0, BIGGEST_ALIGNMENT);
10199 /* Don't forget about volatility even if this is a bitfield. */
10200 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10202 if (op0 == orig_op0)
10203 op0 = copy_rtx (op0);
10205 MEM_VOLATILE_P (op0) = 1;
10208 /* In cases where an aligned union has an unaligned object
10209 as a field, we might be extracting a BLKmode value from
10210 an integer-mode (e.g., SImode) object. Handle this case
10211 by doing the extract into an object as wide as the field
10212 (which we know to be the width of a basic mode), then
10213 storing into memory, and changing the mode to BLKmode. */
10214 if (mode1 == VOIDmode
10215 || REG_P (op0) || GET_CODE (op0) == SUBREG
10216 || (mode1 != BLKmode && ! direct_load[(int) mode1]
10217 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10218 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10219 && modifier != EXPAND_CONST_ADDRESS
10220 && modifier != EXPAND_INITIALIZER
10221 && modifier != EXPAND_MEMORY)
10222 /* If the bitfield is volatile and the bitsize
10223 is narrower than the access size of the bitfield,
10224 we need to extract bitfields from the access. */
10225 || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10226 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10227 && mode1 != BLKmode
10228 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)
10229 /* If the field isn't aligned enough to fetch as a memref,
10230 fetch it as a bit field. */
10231 || (mode1 != BLKmode
10232 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10233 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10234 || (MEM_P (op0)
10235 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10236 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10237 && modifier != EXPAND_MEMORY
10238 && ((modifier == EXPAND_CONST_ADDRESS
10239 || modifier == EXPAND_INITIALIZER)
10240 ? STRICT_ALIGNMENT
10241 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10242 || (bitpos % BITS_PER_UNIT != 0)))
10243 /* If the type and the field are a constant size and the
10244 size of the type isn't the same size as the bitfield,
10245 we must use bitfield operations. */
10246 || (bitsize >= 0
10247 && TYPE_SIZE (TREE_TYPE (exp))
10248 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10249 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10250 bitsize)))
10252 machine_mode ext_mode = mode;
10254 if (ext_mode == BLKmode
10255 && ! (target != 0 && MEM_P (op0)
10256 && MEM_P (target)
10257 && bitpos % BITS_PER_UNIT == 0))
10258 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10260 if (ext_mode == BLKmode)
10262 if (target == 0)
10263 target = assign_temp (type, 1, 1);
10265 /* ??? Unlike the similar test a few lines below, this one is
10266 very likely obsolete. */
10267 if (bitsize == 0)
10268 return target;
10270 /* In this case, BITPOS must start at a byte boundary and
10271 TARGET, if specified, must be a MEM. */
10272 gcc_assert (MEM_P (op0)
10273 && (!target || MEM_P (target))
10274 && !(bitpos % BITS_PER_UNIT));
10276 emit_block_move (target,
10277 adjust_address (op0, VOIDmode,
10278 bitpos / BITS_PER_UNIT),
10279 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10280 / BITS_PER_UNIT),
10281 (modifier == EXPAND_STACK_PARM
10282 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10284 return target;
10287 /* If we have nothing to extract, the result will be 0 for targets
10288 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10289 return 0 for the sake of consistency, as reading a zero-sized
10290 bitfield is valid in Ada and the value is fully specified. */
10291 if (bitsize == 0)
10292 return const0_rtx;
10294 op0 = validize_mem (op0);
10296 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10297 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10299 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10300 (modifier == EXPAND_STACK_PARM
10301 ? NULL_RTX : target),
10302 ext_mode, ext_mode);
10304 /* If the result is a record type and BITSIZE is narrower than
10305 the mode of OP0, an integral mode, and this is a big endian
10306 machine, we must put the field into the high-order bits. */
10307 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10308 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10309 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10310 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10311 GET_MODE_BITSIZE (GET_MODE (op0))
10312 - bitsize, op0, 1);
10314 /* If the result type is BLKmode, store the data into a temporary
10315 of the appropriate type, but with the mode corresponding to the
10316 mode for the data we have (op0's mode). */
10317 if (mode == BLKmode)
10319 rtx new_rtx
10320 = assign_stack_temp_for_type (ext_mode,
10321 GET_MODE_BITSIZE (ext_mode),
10322 type);
10323 emit_move_insn (new_rtx, op0);
10324 op0 = copy_rtx (new_rtx);
10325 PUT_MODE (op0, BLKmode);
10328 return op0;
10331 /* If the result is BLKmode, use that to access the object
10332 now as well. */
10333 if (mode == BLKmode)
10334 mode1 = BLKmode;
10336 /* Get a reference to just this component. */
10337 if (modifier == EXPAND_CONST_ADDRESS
10338 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10339 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10340 else
10341 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10343 if (op0 == orig_op0)
10344 op0 = copy_rtx (op0);
10346 /* If op0 is a temporary because of forcing to memory, pass only the
10347 type to set_mem_attributes so that the original expression is never
10348 marked as ADDRESSABLE through MEM_EXPR of the temporary. */
10349 if (mem_attrs_from_type)
10350 set_mem_attributes (op0, type, 0);
10351 else
10352 set_mem_attributes (op0, exp, 0);
10354 if (REG_P (XEXP (op0, 0)))
10355 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10357 MEM_VOLATILE_P (op0) |= volatilep;
10358 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10359 || modifier == EXPAND_CONST_ADDRESS
10360 || modifier == EXPAND_INITIALIZER)
10361 return op0;
10363 if (target == 0)
10364 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10366 convert_move (target, op0, unsignedp);
10367 return target;
10370 case OBJ_TYPE_REF:
10371 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10373 case CALL_EXPR:
10374 /* All valid uses of __builtin_va_arg_pack () are removed during
10375 inlining. */
10376 if (CALL_EXPR_VA_ARG_PACK (exp))
10377 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10379 tree fndecl = get_callee_fndecl (exp), attr;
10381 if (fndecl
10382 && (attr = lookup_attribute ("error",
10383 DECL_ATTRIBUTES (fndecl))) != NULL)
10384 error ("%Kcall to %qs declared with attribute error: %s",
10385 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10386 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10387 if (fndecl
10388 && (attr = lookup_attribute ("warning",
10389 DECL_ATTRIBUTES (fndecl))) != NULL)
10390 warning_at (tree_nonartificial_location (exp),
10391 0, "%Kcall to %qs declared with attribute warning: %s",
10392 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10393 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10395 /* Check for a built-in function. */
10396 if (fndecl && DECL_BUILT_IN (fndecl))
10398 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10399 return expand_builtin (exp, target, subtarget, tmode, ignore);
10402 return expand_call (exp, target, ignore);
10404 case VIEW_CONVERT_EXPR:
10405 op0 = NULL_RTX;
10407 /* If we are converting to BLKmode, try to avoid an intermediate
10408 temporary by fetching an inner memory reference. */
10409 if (mode == BLKmode
10410 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10411 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10412 && handled_component_p (treeop0))
10414 machine_mode mode1;
10415 HOST_WIDE_INT bitsize, bitpos;
10416 tree offset;
10417 int unsignedp;
10418 int volatilep = 0;
10419 tree tem
10420 = get_inner_reference (treeop0, &bitsize, &bitpos,
10421 &offset, &mode1, &unsignedp, &volatilep,
10422 true);
10423 rtx orig_op0;
10425 /* ??? We should work harder and deal with non-zero offsets. */
10426 if (!offset
10427 && (bitpos % BITS_PER_UNIT) == 0
10428 && bitsize >= 0
10429 && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10431 /* See the normal_inner_ref case for the rationale. */
10432 orig_op0
10433 = expand_expr_real (tem,
10434 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10435 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10436 != INTEGER_CST)
10437 && modifier != EXPAND_STACK_PARM
10438 ? target : NULL_RTX),
10439 VOIDmode,
10440 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10441 NULL, true);
10443 if (MEM_P (orig_op0))
10445 op0 = orig_op0;
10447 /* Get a reference to just this component. */
10448 if (modifier == EXPAND_CONST_ADDRESS
10449 || modifier == EXPAND_SUM
10450 || modifier == EXPAND_INITIALIZER)
10451 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10452 else
10453 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10455 if (op0 == orig_op0)
10456 op0 = copy_rtx (op0);
10458 set_mem_attributes (op0, treeop0, 0);
10459 if (REG_P (XEXP (op0, 0)))
10460 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10462 MEM_VOLATILE_P (op0) |= volatilep;
10467 if (!op0)
10468 op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
10469 NULL, inner_reference_p);
10471 /* If the input and output modes are both the same, we are done. */
10472 if (mode == GET_MODE (op0))
10474 /* If neither mode is BLKmode, and both modes are the same size
10475 then we can use gen_lowpart. */
10476 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10477 && (GET_MODE_PRECISION (mode)
10478 == GET_MODE_PRECISION (GET_MODE (op0)))
10479 && !COMPLEX_MODE_P (GET_MODE (op0)))
10481 if (GET_CODE (op0) == SUBREG)
10482 op0 = force_reg (GET_MODE (op0), op0);
10483 temp = gen_lowpart_common (mode, op0);
10484 if (temp)
10485 op0 = temp;
10486 else
10488 if (!REG_P (op0) && !MEM_P (op0))
10489 op0 = force_reg (GET_MODE (op0), op0);
10490 op0 = gen_lowpart (mode, op0);
10493 /* If both types are integral, convert from one mode to the other. */
10494 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10495 op0 = convert_modes (mode, GET_MODE (op0), op0,
10496 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10497 /* If the output type is a bit-field type, do an extraction. */
10498 else if (reduce_bit_field)
10499 return extract_bit_field (op0, TYPE_PRECISION (type), 0,
10500 TYPE_UNSIGNED (type), NULL_RTX,
10501 mode, mode);
10502 /* As a last resort, spill op0 to memory, and reload it in a
10503 different mode. */
10504 else if (!MEM_P (op0))
10506 /* If the operand is not a MEM, force it into memory. Since we
10507 are going to be changing the mode of the MEM, don't call
10508 force_const_mem for constants because we don't allow pool
10509 constants to change mode. */
10510 tree inner_type = TREE_TYPE (treeop0);
10512 gcc_assert (!TREE_ADDRESSABLE (exp));
10514 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10515 target
10516 = assign_stack_temp_for_type
10517 (TYPE_MODE (inner_type),
10518 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10520 emit_move_insn (target, op0);
10521 op0 = target;
10524 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10525 output type is such that the operand is known to be aligned, indicate
10526 that it is. Otherwise, we need only be concerned about alignment for
10527 non-BLKmode results. */
10528 if (MEM_P (op0))
10530 enum insn_code icode;
10532 if (TYPE_ALIGN_OK (type))
10534 /* ??? Copying the MEM without substantially changing it might
10535 run afoul of the code handling volatile memory references in
10536 store_expr, which assumes that TARGET is returned unmodified
10537 if it has been used. */
10538 op0 = copy_rtx (op0);
10539 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10541 else if (modifier != EXPAND_WRITE
10542 && modifier != EXPAND_MEMORY
10543 && !inner_reference_p
10544 && mode != BLKmode
10545 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10547 /* If the target does have special handling for unaligned
10548 loads of mode then use them. */
10549 if ((icode = optab_handler (movmisalign_optab, mode))
10550 != CODE_FOR_nothing)
10552 rtx reg, insn;
10554 op0 = adjust_address (op0, mode, 0);
10555 /* We've already validated the memory, and we're creating a
10556 new pseudo destination. The predicates really can't
10557 fail. */
10558 reg = gen_reg_rtx (mode);
10560 /* Nor can the insn generator. */
10561 insn = GEN_FCN (icode) (reg, op0);
10562 emit_insn (insn);
10563 return reg;
10565 else if (STRICT_ALIGNMENT)
10567 tree inner_type = TREE_TYPE (treeop0);
10568 HOST_WIDE_INT temp_size
10569 = MAX (int_size_in_bytes (inner_type),
10570 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10571 rtx new_rtx
10572 = assign_stack_temp_for_type (mode, temp_size, type);
10573 rtx new_with_op0_mode
10574 = adjust_address (new_rtx, GET_MODE (op0), 0);
10576 gcc_assert (!TREE_ADDRESSABLE (exp));
10578 if (GET_MODE (op0) == BLKmode)
10579 emit_block_move (new_with_op0_mode, op0,
10580 GEN_INT (GET_MODE_SIZE (mode)),
10581 (modifier == EXPAND_STACK_PARM
10582 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10583 else
10584 emit_move_insn (new_with_op0_mode, op0);
10586 op0 = new_rtx;
10590 op0 = adjust_address (op0, mode, 0);
10593 return op0;
10595 case MODIFY_EXPR:
10597 tree lhs = treeop0;
10598 tree rhs = treeop1;
10599 gcc_assert (ignore);
10601 /* Check for |= or &= of a bitfield of size one into another bitfield
10602 of size 1. In this case, (unless we need the result of the
10603 assignment) we can do this more efficiently with a
10604 test followed by an assignment, if necessary.
10606 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10607 things change so we do, this code should be enhanced to
10608 support it. */
10609 if (TREE_CODE (lhs) == COMPONENT_REF
10610 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10611 || TREE_CODE (rhs) == BIT_AND_EXPR)
10612 && TREE_OPERAND (rhs, 0) == lhs
10613 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10614 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10615 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10617 rtx_code_label *label = gen_label_rtx ();
10618 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10619 do_jump (TREE_OPERAND (rhs, 1),
10620 value ? label : 0,
10621 value ? 0 : label, -1);
10622 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10623 false);
10624 do_pending_stack_adjust ();
10625 emit_label (label);
10626 return const0_rtx;
10629 expand_assignment (lhs, rhs, false);
10630 return const0_rtx;
10633 case ADDR_EXPR:
10634 return expand_expr_addr_expr (exp, target, tmode, modifier);
10636 case REALPART_EXPR:
10637 op0 = expand_normal (treeop0);
10638 return read_complex_part (op0, false);
10640 case IMAGPART_EXPR:
10641 op0 = expand_normal (treeop0);
10642 return read_complex_part (op0, true);
10644 case RETURN_EXPR:
10645 case LABEL_EXPR:
10646 case GOTO_EXPR:
10647 case SWITCH_EXPR:
10648 case ASM_EXPR:
10649 /* Expanded in cfgexpand.c. */
10650 gcc_unreachable ();
10652 case TRY_CATCH_EXPR:
10653 case CATCH_EXPR:
10654 case EH_FILTER_EXPR:
10655 case TRY_FINALLY_EXPR:
10656 /* Lowered by tree-eh.c. */
10657 gcc_unreachable ();
10659 case WITH_CLEANUP_EXPR:
10660 case CLEANUP_POINT_EXPR:
10661 case TARGET_EXPR:
10662 case CASE_LABEL_EXPR:
10663 case VA_ARG_EXPR:
10664 case BIND_EXPR:
10665 case INIT_EXPR:
10666 case CONJ_EXPR:
10667 case COMPOUND_EXPR:
10668 case PREINCREMENT_EXPR:
10669 case PREDECREMENT_EXPR:
10670 case POSTINCREMENT_EXPR:
10671 case POSTDECREMENT_EXPR:
10672 case LOOP_EXPR:
10673 case EXIT_EXPR:
10674 case COMPOUND_LITERAL_EXPR:
10675 /* Lowered by gimplify.c. */
10676 gcc_unreachable ();
10678 case FDESC_EXPR:
10679 /* Function descriptors are not valid except for as
10680 initialization constants, and should not be expanded. */
10681 gcc_unreachable ();
10683 case WITH_SIZE_EXPR:
10684 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10685 have pulled out the size to use in whatever context it needed. */
10686 return expand_expr_real (treeop0, original_target, tmode,
10687 modifier, alt_rtl, inner_reference_p);
10689 default:
10690 return expand_expr_real_2 (&ops, target, tmode, modifier);
10694 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10695 signedness of TYPE), possibly returning the result in TARGET. */
10696 static rtx
10697 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10699 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10700 if (target && GET_MODE (target) != GET_MODE (exp))
10701 target = 0;
10702 /* For constant values, reduce using build_int_cst_type. */
10703 if (CONST_INT_P (exp))
10705 HOST_WIDE_INT value = INTVAL (exp);
10706 tree t = build_int_cst_type (type, value);
10707 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10709 else if (TYPE_UNSIGNED (type))
10711 machine_mode mode = GET_MODE (exp);
10712 rtx mask = immed_wide_int_const
10713 (wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
10714 return expand_and (mode, exp, mask, target);
10716 else
10718 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10719 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10720 exp, count, target, 0);
10721 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10722 exp, count, target, 0);
10726 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10727 when applied to the address of EXP produces an address known to be
10728 aligned more than BIGGEST_ALIGNMENT. */
10730 static int
10731 is_aligning_offset (const_tree offset, const_tree exp)
10733 /* Strip off any conversions. */
10734 while (CONVERT_EXPR_P (offset))
10735 offset = TREE_OPERAND (offset, 0);
10737 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10738 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10739 if (TREE_CODE (offset) != BIT_AND_EXPR
10740 || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
10741 || compare_tree_int (TREE_OPERAND (offset, 1),
10742 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10743 || exact_log2 (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1) < 0)
10744 return 0;
10746 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10747 It must be NEGATE_EXPR. Then strip any more conversions. */
10748 offset = TREE_OPERAND (offset, 0);
10749 while (CONVERT_EXPR_P (offset))
10750 offset = TREE_OPERAND (offset, 0);
10752 if (TREE_CODE (offset) != NEGATE_EXPR)
10753 return 0;
10755 offset = TREE_OPERAND (offset, 0);
10756 while (CONVERT_EXPR_P (offset))
10757 offset = TREE_OPERAND (offset, 0);
10759 /* This must now be the address of EXP. */
10760 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10763 /* Return the tree node if an ARG corresponds to a string constant or zero
10764 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10765 in bytes within the string that ARG is accessing. The type of the
10766 offset will be `sizetype'. */
10768 tree
10769 string_constant (tree arg, tree *ptr_offset)
10771 tree array, offset, lower_bound;
10772 STRIP_NOPS (arg);
10774 if (TREE_CODE (arg) == ADDR_EXPR)
10776 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10778 *ptr_offset = size_zero_node;
10779 return TREE_OPERAND (arg, 0);
10781 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10783 array = TREE_OPERAND (arg, 0);
10784 offset = size_zero_node;
10786 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10788 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10789 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10790 if (TREE_CODE (array) != STRING_CST
10791 && TREE_CODE (array) != VAR_DECL)
10792 return 0;
10794 /* Check if the array has a nonzero lower bound. */
10795 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10796 if (!integer_zerop (lower_bound))
10798 /* If the offset and base aren't both constants, return 0. */
10799 if (TREE_CODE (lower_bound) != INTEGER_CST)
10800 return 0;
10801 if (TREE_CODE (offset) != INTEGER_CST)
10802 return 0;
10803 /* Adjust offset by the lower bound. */
10804 offset = size_diffop (fold_convert (sizetype, offset),
10805 fold_convert (sizetype, lower_bound));
10808 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10810 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10811 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10812 if (TREE_CODE (array) != ADDR_EXPR)
10813 return 0;
10814 array = TREE_OPERAND (array, 0);
10815 if (TREE_CODE (array) != STRING_CST
10816 && TREE_CODE (array) != VAR_DECL)
10817 return 0;
10819 else
10820 return 0;
10822 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10824 tree arg0 = TREE_OPERAND (arg, 0);
10825 tree arg1 = TREE_OPERAND (arg, 1);
10827 STRIP_NOPS (arg0);
10828 STRIP_NOPS (arg1);
10830 if (TREE_CODE (arg0) == ADDR_EXPR
10831 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10832 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10834 array = TREE_OPERAND (arg0, 0);
10835 offset = arg1;
10837 else if (TREE_CODE (arg1) == ADDR_EXPR
10838 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10839 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10841 array = TREE_OPERAND (arg1, 0);
10842 offset = arg0;
10844 else
10845 return 0;
10847 else
10848 return 0;
10850 if (TREE_CODE (array) == STRING_CST)
10852 *ptr_offset = fold_convert (sizetype, offset);
10853 return array;
10855 else if (TREE_CODE (array) == VAR_DECL
10856 || TREE_CODE (array) == CONST_DECL)
10858 int length;
10859 tree init = ctor_for_folding (array);
10861 /* Variables initialized to string literals can be handled too. */
10862 if (init == error_mark_node
10863 || !init
10864 || TREE_CODE (init) != STRING_CST)
10865 return 0;
10867 /* Avoid const char foo[4] = "abcde"; */
10868 if (DECL_SIZE_UNIT (array) == NULL_TREE
10869 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10870 || (length = TREE_STRING_LENGTH (init)) <= 0
10871 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10872 return 0;
10874 /* If variable is bigger than the string literal, OFFSET must be constant
10875 and inside of the bounds of the string literal. */
10876 offset = fold_convert (sizetype, offset);
10877 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10878 && (! tree_fits_uhwi_p (offset)
10879 || compare_tree_int (offset, length) >= 0))
10880 return 0;
10882 *ptr_offset = offset;
10883 return init;
10886 return 0;
10889 /* Generate code to calculate OPS, and exploded expression
10890 using a store-flag instruction and return an rtx for the result.
10891 OPS reflects a comparison.
10893 If TARGET is nonzero, store the result there if convenient.
10895 Return zero if there is no suitable set-flag instruction
10896 available on this machine.
10898 Once expand_expr has been called on the arguments of the comparison,
10899 we are committed to doing the store flag, since it is not safe to
10900 re-evaluate the expression. We emit the store-flag insn by calling
10901 emit_store_flag, but only expand the arguments if we have a reason
10902 to believe that emit_store_flag will be successful. If we think that
10903 it will, but it isn't, we have to simulate the store-flag with a
10904 set/jump/set sequence. */
10906 static rtx
10907 do_store_flag (sepops ops, rtx target, machine_mode mode)
10909 enum rtx_code code;
10910 tree arg0, arg1, type;
10911 tree tem;
10912 machine_mode operand_mode;
10913 int unsignedp;
10914 rtx op0, op1;
10915 rtx subtarget = target;
10916 location_t loc = ops->location;
10918 arg0 = ops->op0;
10919 arg1 = ops->op1;
10921 /* Don't crash if the comparison was erroneous. */
10922 if (arg0 == error_mark_node || arg1 == error_mark_node)
10923 return const0_rtx;
10925 type = TREE_TYPE (arg0);
10926 operand_mode = TYPE_MODE (type);
10927 unsignedp = TYPE_UNSIGNED (type);
10929 /* We won't bother with BLKmode store-flag operations because it would mean
10930 passing a lot of information to emit_store_flag. */
10931 if (operand_mode == BLKmode)
10932 return 0;
10934 /* We won't bother with store-flag operations involving function pointers
10935 when function pointers must be canonicalized before comparisons. */
10936 #ifdef HAVE_canonicalize_funcptr_for_compare
10937 if (HAVE_canonicalize_funcptr_for_compare
10938 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
10939 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
10940 == FUNCTION_TYPE))
10941 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
10942 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
10943 == FUNCTION_TYPE))))
10944 return 0;
10945 #endif
10947 STRIP_NOPS (arg0);
10948 STRIP_NOPS (arg1);
10950 /* For vector typed comparisons emit code to generate the desired
10951 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10952 expander for this. */
10953 if (TREE_CODE (ops->type) == VECTOR_TYPE)
10955 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10956 tree if_true = constant_boolean_node (true, ops->type);
10957 tree if_false = constant_boolean_node (false, ops->type);
10958 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10961 /* Get the rtx comparison code to use. We know that EXP is a comparison
10962 operation of some type. Some comparisons against 1 and -1 can be
10963 converted to comparisons with zero. Do so here so that the tests
10964 below will be aware that we have a comparison with zero. These
10965 tests will not catch constants in the first operand, but constants
10966 are rarely passed as the first operand. */
10968 switch (ops->code)
10970 case EQ_EXPR:
10971 code = EQ;
10972 break;
10973 case NE_EXPR:
10974 code = NE;
10975 break;
10976 case LT_EXPR:
10977 if (integer_onep (arg1))
10978 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10979 else
10980 code = unsignedp ? LTU : LT;
10981 break;
10982 case LE_EXPR:
10983 if (! unsignedp && integer_all_onesp (arg1))
10984 arg1 = integer_zero_node, code = LT;
10985 else
10986 code = unsignedp ? LEU : LE;
10987 break;
10988 case GT_EXPR:
10989 if (! unsignedp && integer_all_onesp (arg1))
10990 arg1 = integer_zero_node, code = GE;
10991 else
10992 code = unsignedp ? GTU : GT;
10993 break;
10994 case GE_EXPR:
10995 if (integer_onep (arg1))
10996 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10997 else
10998 code = unsignedp ? GEU : GE;
10999 break;
11001 case UNORDERED_EXPR:
11002 code = UNORDERED;
11003 break;
11004 case ORDERED_EXPR:
11005 code = ORDERED;
11006 break;
11007 case UNLT_EXPR:
11008 code = UNLT;
11009 break;
11010 case UNLE_EXPR:
11011 code = UNLE;
11012 break;
11013 case UNGT_EXPR:
11014 code = UNGT;
11015 break;
11016 case UNGE_EXPR:
11017 code = UNGE;
11018 break;
11019 case UNEQ_EXPR:
11020 code = UNEQ;
11021 break;
11022 case LTGT_EXPR:
11023 code = LTGT;
11024 break;
11026 default:
11027 gcc_unreachable ();
11030 /* Put a constant second. */
11031 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
11032 || TREE_CODE (arg0) == FIXED_CST)
11034 tem = arg0; arg0 = arg1; arg1 = tem;
11035 code = swap_condition (code);
11038 /* If this is an equality or inequality test of a single bit, we can
11039 do this by shifting the bit being tested to the low-order bit and
11040 masking the result with the constant 1. If the condition was EQ,
11041 we xor it with 1. This does not require an scc insn and is faster
11042 than an scc insn even if we have it.
11044 The code to make this transformation was moved into fold_single_bit_test,
11045 so we just call into the folder and expand its result. */
11047 if ((code == NE || code == EQ)
11048 && integer_zerop (arg1)
11049 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
11051 gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
11052 if (srcstmt
11053 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
11055 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
11056 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
11057 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
11058 gimple_assign_rhs1 (srcstmt),
11059 gimple_assign_rhs2 (srcstmt));
11060 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
11061 if (temp)
11062 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
11066 if (! get_subtarget (target)
11067 || GET_MODE (subtarget) != operand_mode)
11068 subtarget = 0;
11070 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
11072 if (target == 0)
11073 target = gen_reg_rtx (mode);
11075 /* Try a cstore if possible. */
11076 return emit_store_flag_force (target, code, op0, op1,
11077 operand_mode, unsignedp,
11078 (TYPE_PRECISION (ops->type) == 1
11079 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
11083 /* Stubs in case we haven't got a casesi insn. */
11084 #ifndef HAVE_casesi
11085 # define HAVE_casesi 0
11086 # define gen_casesi(a, b, c, d, e) (0)
11087 # define CODE_FOR_casesi CODE_FOR_nothing
11088 #endif
11090 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11091 0 otherwise (i.e. if there is no casesi instruction).
11093 DEFAULT_PROBABILITY is the probability of jumping to the default
11094 label. */
11096 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
11097 rtx table_label, rtx default_label, rtx fallback_label,
11098 int default_probability)
11100 struct expand_operand ops[5];
11101 machine_mode index_mode = SImode;
11102 rtx op1, op2, index;
11104 if (! HAVE_casesi)
11105 return 0;
11107 /* Convert the index to SImode. */
11108 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11110 machine_mode omode = TYPE_MODE (index_type);
11111 rtx rangertx = expand_normal (range);
11113 /* We must handle the endpoints in the original mode. */
11114 index_expr = build2 (MINUS_EXPR, index_type,
11115 index_expr, minval);
11116 minval = integer_zero_node;
11117 index = expand_normal (index_expr);
11118 if (default_label)
11119 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11120 omode, 1, default_label,
11121 default_probability);
11122 /* Now we can safely truncate. */
11123 index = convert_to_mode (index_mode, index, 0);
11125 else
11127 if (TYPE_MODE (index_type) != index_mode)
11129 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
11130 index_expr = fold_convert (index_type, index_expr);
11133 index = expand_normal (index_expr);
11136 do_pending_stack_adjust ();
11138 op1 = expand_normal (minval);
11139 op2 = expand_normal (range);
11141 create_input_operand (&ops[0], index, index_mode);
11142 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11143 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11144 create_fixed_operand (&ops[3], table_label);
11145 create_fixed_operand (&ops[4], (default_label
11146 ? default_label
11147 : fallback_label));
11148 expand_jump_insn (CODE_FOR_casesi, 5, ops);
11149 return 1;
11152 /* Attempt to generate a tablejump instruction; same concept. */
11153 #ifndef HAVE_tablejump
11154 #define HAVE_tablejump 0
11155 #define gen_tablejump(x, y) (0)
11156 #endif
11158 /* Subroutine of the next function.
11160 INDEX is the value being switched on, with the lowest value
11161 in the table already subtracted.
11162 MODE is its expected mode (needed if INDEX is constant).
11163 RANGE is the length of the jump table.
11164 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11166 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11167 index value is out of range.
11168 DEFAULT_PROBABILITY is the probability of jumping to
11169 the default label. */
11171 static void
11172 do_tablejump (rtx index, machine_mode mode, rtx range, rtx table_label,
11173 rtx default_label, int default_probability)
11175 rtx temp, vector;
11177 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11178 cfun->cfg->max_jumptable_ents = INTVAL (range);
11180 /* Do an unsigned comparison (in the proper mode) between the index
11181 expression and the value which represents the length of the range.
11182 Since we just finished subtracting the lower bound of the range
11183 from the index expression, this comparison allows us to simultaneously
11184 check that the original index expression value is both greater than
11185 or equal to the minimum value of the range and less than or equal to
11186 the maximum value of the range. */
11188 if (default_label)
11189 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11190 default_label, default_probability);
11193 /* If index is in range, it must fit in Pmode.
11194 Convert to Pmode so we can index with it. */
11195 if (mode != Pmode)
11196 index = convert_to_mode (Pmode, index, 1);
11198 /* Don't let a MEM slip through, because then INDEX that comes
11199 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11200 and break_out_memory_refs will go to work on it and mess it up. */
11201 #ifdef PIC_CASE_VECTOR_ADDRESS
11202 if (flag_pic && !REG_P (index))
11203 index = copy_to_mode_reg (Pmode, index);
11204 #endif
11206 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11207 GET_MODE_SIZE, because this indicates how large insns are. The other
11208 uses should all be Pmode, because they are addresses. This code
11209 could fail if addresses and insns are not the same size. */
11210 index = simplify_gen_binary (MULT, Pmode, index,
11211 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
11212 Pmode));
11213 index = simplify_gen_binary (PLUS, Pmode, index,
11214 gen_rtx_LABEL_REF (Pmode, table_label));
11216 #ifdef PIC_CASE_VECTOR_ADDRESS
11217 if (flag_pic)
11218 index = PIC_CASE_VECTOR_ADDRESS (index);
11219 else
11220 #endif
11221 index = memory_address (CASE_VECTOR_MODE, index);
11222 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11223 vector = gen_const_mem (CASE_VECTOR_MODE, index);
11224 convert_move (temp, vector, 0);
11226 emit_jump_insn (gen_tablejump (temp, table_label));
11228 /* If we are generating PIC code or if the table is PC-relative, the
11229 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11230 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11231 emit_barrier ();
11235 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11236 rtx table_label, rtx default_label, int default_probability)
11238 rtx index;
11240 if (! HAVE_tablejump)
11241 return 0;
11243 index_expr = fold_build2 (MINUS_EXPR, index_type,
11244 fold_convert (index_type, index_expr),
11245 fold_convert (index_type, minval));
11246 index = expand_normal (index_expr);
11247 do_pending_stack_adjust ();
11249 do_tablejump (index, TYPE_MODE (index_type),
11250 convert_modes (TYPE_MODE (index_type),
11251 TYPE_MODE (TREE_TYPE (range)),
11252 expand_normal (range),
11253 TYPE_UNSIGNED (TREE_TYPE (range))),
11254 table_label, default_label, default_probability);
11255 return 1;
11258 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11259 static rtx
11260 const_vector_from_tree (tree exp)
11262 rtvec v;
11263 unsigned i;
11264 int units;
11265 tree elt;
11266 machine_mode inner, mode;
11268 mode = TYPE_MODE (TREE_TYPE (exp));
11270 if (initializer_zerop (exp))
11271 return CONST0_RTX (mode);
11273 units = GET_MODE_NUNITS (mode);
11274 inner = GET_MODE_INNER (mode);
11276 v = rtvec_alloc (units);
11278 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11280 elt = VECTOR_CST_ELT (exp, i);
11282 if (TREE_CODE (elt) == REAL_CST)
11283 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11284 inner);
11285 else if (TREE_CODE (elt) == FIXED_CST)
11286 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11287 inner);
11288 else
11289 RTVEC_ELT (v, i) = immed_wide_int_const (elt, inner);
11292 return gen_rtx_CONST_VECTOR (mode, v);
11295 /* Build a decl for a personality function given a language prefix. */
11297 tree
11298 build_personality_function (const char *lang)
11300 const char *unwind_and_version;
11301 tree decl, type;
11302 char *name;
11304 switch (targetm_common.except_unwind_info (&global_options))
11306 case UI_NONE:
11307 return NULL;
11308 case UI_SJLJ:
11309 unwind_and_version = "_sj0";
11310 break;
11311 case UI_DWARF2:
11312 case UI_TARGET:
11313 unwind_and_version = "_v0";
11314 break;
11315 case UI_SEH:
11316 unwind_and_version = "_seh0";
11317 break;
11318 default:
11319 gcc_unreachable ();
11322 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11324 type = build_function_type_list (integer_type_node, integer_type_node,
11325 long_long_unsigned_type_node,
11326 ptr_type_node, ptr_type_node, NULL_TREE);
11327 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11328 get_identifier (name), type);
11329 DECL_ARTIFICIAL (decl) = 1;
11330 DECL_EXTERNAL (decl) = 1;
11331 TREE_PUBLIC (decl) = 1;
11333 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11334 are the flags assigned by targetm.encode_section_info. */
11335 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11337 return decl;
11340 /* Extracts the personality function of DECL and returns the corresponding
11341 libfunc. */
11344 get_personality_function (tree decl)
11346 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11347 enum eh_personality_kind pk;
11349 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11350 if (pk == eh_personality_none)
11351 return NULL;
11353 if (!personality
11354 && pk == eh_personality_any)
11355 personality = lang_hooks.eh_personality ();
11357 if (pk == eh_personality_lang)
11358 gcc_assert (personality != NULL_TREE);
11360 return XEXP (DECL_RTL (personality), 0);
11363 #include "gt-expr.h"