Merge from trunk @217148.
[official-gcc.git] / gcc / expr.c
blob7079aa60b8dfd3d90f385056063f0b5ad0439eac
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
29 #include "attribs.h"
30 #include "varasm.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "except.h"
35 #include "hashtab.h"
36 #include "hash-set.h"
37 #include "vec.h"
38 #include "input.h"
39 #include "function.h"
40 #include "insn-config.h"
41 #include "insn-attr.h"
42 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
43 #include "expr.h"
44 #include "insn-codes.h"
45 #include "optabs.h"
46 #include "libfuncs.h"
47 #include "recog.h"
48 #include "reload.h"
49 #include "typeclass.h"
50 #include "toplev.h"
51 #include "langhooks.h"
52 #include "intl.h"
53 #include "tm_p.h"
54 #include "tree-iterator.h"
55 #include "predict.h"
56 #include "dominance.h"
57 #include "cfg.h"
58 #include "basic-block.h"
59 #include "tree-ssa-alias.h"
60 #include "internal-fn.h"
61 #include "gimple-expr.h"
62 #include "is-a.h"
63 #include "gimple.h"
64 #include "gimple-ssa.h"
65 #include "hash-map.h"
66 #include "plugin-api.h"
67 #include "ipa-ref.h"
68 #include "cgraph.h"
69 #include "tree-ssanames.h"
70 #include "target.h"
71 #include "common/common-target.h"
72 #include "timevar.h"
73 #include "df.h"
74 #include "diagnostic.h"
75 #include "tree-ssa-live.h"
76 #include "tree-outof-ssa.h"
77 #include "target-globals.h"
78 #include "params.h"
79 #include "tree-ssa-address.h"
80 #include "cfgexpand.h"
81 #include "builtins.h"
82 #include "tree-chkp.h"
83 #include "rtl-chkp.h"
85 #ifndef STACK_PUSH_CODE
86 #ifdef STACK_GROWS_DOWNWARD
87 #define STACK_PUSH_CODE PRE_DEC
88 #else
89 #define STACK_PUSH_CODE PRE_INC
90 #endif
91 #endif
94 /* If this is nonzero, we do not bother generating VOLATILE
95 around volatile memory references, and we are willing to
96 output indirect addresses. If cse is to follow, we reject
97 indirect addresses so a useful potential cse is generated;
98 if it is used only once, instruction combination will produce
99 the same indirect address eventually. */
100 int cse_not_expected;
102 /* This structure is used by move_by_pieces to describe the move to
103 be performed. */
104 struct move_by_pieces_d
106 rtx to;
107 rtx to_addr;
108 int autinc_to;
109 int explicit_inc_to;
110 rtx from;
111 rtx from_addr;
112 int autinc_from;
113 int explicit_inc_from;
114 unsigned HOST_WIDE_INT len;
115 HOST_WIDE_INT offset;
116 int reverse;
119 /* This structure is used by store_by_pieces to describe the clear to
120 be performed. */
122 struct store_by_pieces_d
124 rtx to;
125 rtx to_addr;
126 int autinc_to;
127 int explicit_inc_to;
128 unsigned HOST_WIDE_INT len;
129 HOST_WIDE_INT offset;
130 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode);
131 void *constfundata;
132 int reverse;
135 static void move_by_pieces_1 (insn_gen_fn, machine_mode,
136 struct move_by_pieces_d *);
137 static bool block_move_libcall_safe_for_call_parm (void);
138 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
139 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
140 unsigned HOST_WIDE_INT);
141 static tree emit_block_move_libcall_fn (int);
142 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
143 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, machine_mode);
144 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
145 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
146 static void store_by_pieces_2 (insn_gen_fn, machine_mode,
147 struct store_by_pieces_d *);
148 static tree clear_storage_libcall_fn (int);
149 static rtx_insn *compress_float_constant (rtx, rtx);
150 static rtx get_subtarget (rtx);
151 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
152 HOST_WIDE_INT, machine_mode,
153 tree, int, alias_set_type, bool);
154 static void store_constructor (tree, rtx, int, HOST_WIDE_INT, bool);
155 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
156 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
157 machine_mode, tree, alias_set_type, bool, bool);
159 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
161 static int is_aligning_offset (const_tree, const_tree);
162 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
163 enum expand_modifier);
164 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
165 static rtx do_store_flag (sepops, rtx, machine_mode);
166 #ifdef PUSH_ROUNDING
167 static void emit_single_push_insn (machine_mode, rtx, tree);
168 #endif
169 static void do_tablejump (rtx, machine_mode, rtx, rtx, rtx, int);
170 static rtx const_vector_from_tree (tree);
171 static void write_complex_part (rtx, rtx, bool);
174 /* This is run to set up which modes can be used
175 directly in memory and to initialize the block move optab. It is run
176 at the beginning of compilation and when the target is reinitialized. */
178 void
179 init_expr_target (void)
181 rtx insn, pat;
182 machine_mode mode;
183 int num_clobbers;
184 rtx mem, mem1;
185 rtx reg;
187 /* Try indexing by frame ptr and try by stack ptr.
188 It is known that on the Convex the stack ptr isn't a valid index.
189 With luck, one or the other is valid on any machine. */
190 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
191 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
193 /* A scratch register we can modify in-place below to avoid
194 useless RTL allocations. */
195 reg = gen_rtx_REG (VOIDmode, -1);
197 insn = rtx_alloc (INSN);
198 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
199 PATTERN (insn) = pat;
201 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
202 mode = (machine_mode) ((int) mode + 1))
204 int regno;
206 direct_load[(int) mode] = direct_store[(int) mode] = 0;
207 PUT_MODE (mem, mode);
208 PUT_MODE (mem1, mode);
209 PUT_MODE (reg, mode);
211 /* See if there is some register that can be used in this mode and
212 directly loaded or stored from memory. */
214 if (mode != VOIDmode && mode != BLKmode)
215 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
216 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
217 regno++)
219 if (! HARD_REGNO_MODE_OK (regno, mode))
220 continue;
222 SET_REGNO (reg, regno);
224 SET_SRC (pat) = mem;
225 SET_DEST (pat) = reg;
226 if (recog (pat, insn, &num_clobbers) >= 0)
227 direct_load[(int) mode] = 1;
229 SET_SRC (pat) = mem1;
230 SET_DEST (pat) = reg;
231 if (recog (pat, insn, &num_clobbers) >= 0)
232 direct_load[(int) mode] = 1;
234 SET_SRC (pat) = reg;
235 SET_DEST (pat) = mem;
236 if (recog (pat, insn, &num_clobbers) >= 0)
237 direct_store[(int) mode] = 1;
239 SET_SRC (pat) = reg;
240 SET_DEST (pat) = mem1;
241 if (recog (pat, insn, &num_clobbers) >= 0)
242 direct_store[(int) mode] = 1;
246 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
248 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
249 mode = GET_MODE_WIDER_MODE (mode))
251 machine_mode srcmode;
252 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
253 srcmode = GET_MODE_WIDER_MODE (srcmode))
255 enum insn_code ic;
257 ic = can_extend_p (mode, srcmode, 0);
258 if (ic == CODE_FOR_nothing)
259 continue;
261 PUT_MODE (mem, srcmode);
263 if (insn_operand_matches (ic, 1, mem))
264 float_extend_from_mem[mode][srcmode] = true;
269 /* This is run at the start of compiling a function. */
271 void
272 init_expr (void)
274 memset (&crtl->expr, 0, sizeof (crtl->expr));
277 /* Copy data from FROM to TO, where the machine modes are not the same.
278 Both modes may be integer, or both may be floating, or both may be
279 fixed-point.
280 UNSIGNEDP should be nonzero if FROM is an unsigned type.
281 This causes zero-extension instead of sign-extension. */
283 void
284 convert_move (rtx to, rtx from, int unsignedp)
286 machine_mode to_mode = GET_MODE (to);
287 machine_mode from_mode = GET_MODE (from);
288 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
289 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
290 enum insn_code code;
291 rtx libcall;
293 /* rtx code for making an equivalent value. */
294 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
295 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
298 gcc_assert (to_real == from_real);
299 gcc_assert (to_mode != BLKmode);
300 gcc_assert (from_mode != BLKmode);
302 /* If the source and destination are already the same, then there's
303 nothing to do. */
304 if (to == from)
305 return;
307 /* If FROM is a SUBREG that indicates that we have already done at least
308 the required extension, strip it. We don't handle such SUBREGs as
309 TO here. */
311 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
312 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
313 >= GET_MODE_PRECISION (to_mode))
314 && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp))
315 from = gen_lowpart (to_mode, from), from_mode = to_mode;
317 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
319 if (to_mode == from_mode
320 || (from_mode == VOIDmode && CONSTANT_P (from)))
322 emit_move_insn (to, from);
323 return;
326 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
328 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
330 if (VECTOR_MODE_P (to_mode))
331 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
332 else
333 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
335 emit_move_insn (to, from);
336 return;
339 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
341 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
342 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
343 return;
346 if (to_real)
348 rtx value;
349 rtx_insn *insns;
350 convert_optab tab;
352 gcc_assert ((GET_MODE_PRECISION (from_mode)
353 != GET_MODE_PRECISION (to_mode))
354 || (DECIMAL_FLOAT_MODE_P (from_mode)
355 != DECIMAL_FLOAT_MODE_P (to_mode)));
357 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
358 /* Conversion between decimal float and binary float, same size. */
359 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
360 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
361 tab = sext_optab;
362 else
363 tab = trunc_optab;
365 /* Try converting directly if the insn is supported. */
367 code = convert_optab_handler (tab, to_mode, from_mode);
368 if (code != CODE_FOR_nothing)
370 emit_unop_insn (code, to, from,
371 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
372 return;
375 /* Otherwise use a libcall. */
376 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
378 /* Is this conversion implemented yet? */
379 gcc_assert (libcall);
381 start_sequence ();
382 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
383 1, from, from_mode);
384 insns = get_insns ();
385 end_sequence ();
386 emit_libcall_block (insns, to, value,
387 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
388 from)
389 : gen_rtx_FLOAT_EXTEND (to_mode, from));
390 return;
393 /* Handle pointer conversion. */ /* SPEE 900220. */
394 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
396 convert_optab ctab;
398 if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
399 ctab = trunc_optab;
400 else if (unsignedp)
401 ctab = zext_optab;
402 else
403 ctab = sext_optab;
405 if (convert_optab_handler (ctab, to_mode, from_mode)
406 != CODE_FOR_nothing)
408 emit_unop_insn (convert_optab_handler (ctab, to_mode, from_mode),
409 to, from, UNKNOWN);
410 return;
414 /* Targets are expected to provide conversion insns between PxImode and
415 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
416 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
418 machine_mode full_mode
419 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
421 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
422 != CODE_FOR_nothing);
424 if (full_mode != from_mode)
425 from = convert_to_mode (full_mode, from, unsignedp);
426 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
427 to, from, UNKNOWN);
428 return;
430 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
432 rtx new_from;
433 machine_mode full_mode
434 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
435 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
436 enum insn_code icode;
438 icode = convert_optab_handler (ctab, full_mode, from_mode);
439 gcc_assert (icode != CODE_FOR_nothing);
441 if (to_mode == full_mode)
443 emit_unop_insn (icode, to, from, UNKNOWN);
444 return;
447 new_from = gen_reg_rtx (full_mode);
448 emit_unop_insn (icode, new_from, from, UNKNOWN);
450 /* else proceed to integer conversions below. */
451 from_mode = full_mode;
452 from = new_from;
455 /* Make sure both are fixed-point modes or both are not. */
456 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
457 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
458 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
460 /* If we widen from_mode to to_mode and they are in the same class,
461 we won't saturate the result.
462 Otherwise, always saturate the result to play safe. */
463 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
464 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
465 expand_fixed_convert (to, from, 0, 0);
466 else
467 expand_fixed_convert (to, from, 0, 1);
468 return;
471 /* Now both modes are integers. */
473 /* Handle expanding beyond a word. */
474 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
475 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
477 rtx_insn *insns;
478 rtx lowpart;
479 rtx fill_value;
480 rtx lowfrom;
481 int i;
482 machine_mode lowpart_mode;
483 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
485 /* Try converting directly if the insn is supported. */
486 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
487 != CODE_FOR_nothing)
489 /* If FROM is a SUBREG, put it into a register. Do this
490 so that we always generate the same set of insns for
491 better cse'ing; if an intermediate assignment occurred,
492 we won't be doing the operation directly on the SUBREG. */
493 if (optimize > 0 && GET_CODE (from) == SUBREG)
494 from = force_reg (from_mode, from);
495 emit_unop_insn (code, to, from, equiv_code);
496 return;
498 /* Next, try converting via full word. */
499 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
500 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
501 != CODE_FOR_nothing))
503 rtx word_to = gen_reg_rtx (word_mode);
504 if (REG_P (to))
506 if (reg_overlap_mentioned_p (to, from))
507 from = force_reg (from_mode, from);
508 emit_clobber (to);
510 convert_move (word_to, from, unsignedp);
511 emit_unop_insn (code, to, word_to, equiv_code);
512 return;
515 /* No special multiword conversion insn; do it by hand. */
516 start_sequence ();
518 /* Since we will turn this into a no conflict block, we must ensure the
519 the source does not overlap the target so force it into an isolated
520 register when maybe so. Likewise for any MEM input, since the
521 conversion sequence might require several references to it and we
522 must ensure we're getting the same value every time. */
524 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
525 from = force_reg (from_mode, from);
527 /* Get a copy of FROM widened to a word, if necessary. */
528 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
529 lowpart_mode = word_mode;
530 else
531 lowpart_mode = from_mode;
533 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
535 lowpart = gen_lowpart (lowpart_mode, to);
536 emit_move_insn (lowpart, lowfrom);
538 /* Compute the value to put in each remaining word. */
539 if (unsignedp)
540 fill_value = const0_rtx;
541 else
542 fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
543 LT, lowfrom, const0_rtx,
544 lowpart_mode, 0, -1);
546 /* Fill the remaining words. */
547 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
549 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
550 rtx subword = operand_subword (to, index, 1, to_mode);
552 gcc_assert (subword);
554 if (fill_value != subword)
555 emit_move_insn (subword, fill_value);
558 insns = get_insns ();
559 end_sequence ();
561 emit_insn (insns);
562 return;
565 /* Truncating multi-word to a word or less. */
566 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
567 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
569 if (!((MEM_P (from)
570 && ! MEM_VOLATILE_P (from)
571 && direct_load[(int) to_mode]
572 && ! mode_dependent_address_p (XEXP (from, 0),
573 MEM_ADDR_SPACE (from)))
574 || REG_P (from)
575 || GET_CODE (from) == SUBREG))
576 from = force_reg (from_mode, from);
577 convert_move (to, gen_lowpart (word_mode, from), 0);
578 return;
581 /* Now follow all the conversions between integers
582 no more than a word long. */
584 /* For truncation, usually we can just refer to FROM in a narrower mode. */
585 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
586 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
588 if (!((MEM_P (from)
589 && ! MEM_VOLATILE_P (from)
590 && direct_load[(int) to_mode]
591 && ! mode_dependent_address_p (XEXP (from, 0),
592 MEM_ADDR_SPACE (from)))
593 || REG_P (from)
594 || GET_CODE (from) == SUBREG))
595 from = force_reg (from_mode, from);
596 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
597 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
598 from = copy_to_reg (from);
599 emit_move_insn (to, gen_lowpart (to_mode, from));
600 return;
603 /* Handle extension. */
604 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
606 /* Convert directly if that works. */
607 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
608 != CODE_FOR_nothing)
610 emit_unop_insn (code, to, from, equiv_code);
611 return;
613 else
615 machine_mode intermediate;
616 rtx tmp;
617 int shift_amount;
619 /* Search for a mode to convert via. */
620 for (intermediate = from_mode; intermediate != VOIDmode;
621 intermediate = GET_MODE_WIDER_MODE (intermediate))
622 if (((can_extend_p (to_mode, intermediate, unsignedp)
623 != CODE_FOR_nothing)
624 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
625 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
626 && (can_extend_p (intermediate, from_mode, unsignedp)
627 != CODE_FOR_nothing))
629 convert_move (to, convert_to_mode (intermediate, from,
630 unsignedp), unsignedp);
631 return;
634 /* No suitable intermediate mode.
635 Generate what we need with shifts. */
636 shift_amount = (GET_MODE_PRECISION (to_mode)
637 - GET_MODE_PRECISION (from_mode));
638 from = gen_lowpart (to_mode, force_reg (from_mode, from));
639 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
640 to, unsignedp);
641 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
642 to, unsignedp);
643 if (tmp != to)
644 emit_move_insn (to, tmp);
645 return;
649 /* Support special truncate insns for certain modes. */
650 if (convert_optab_handler (trunc_optab, to_mode,
651 from_mode) != CODE_FOR_nothing)
653 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
654 to, from, UNKNOWN);
655 return;
658 /* Handle truncation of volatile memrefs, and so on;
659 the things that couldn't be truncated directly,
660 and for which there was no special instruction.
662 ??? Code above formerly short-circuited this, for most integer
663 mode pairs, with a force_reg in from_mode followed by a recursive
664 call to this routine. Appears always to have been wrong. */
665 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
667 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
668 emit_move_insn (to, temp);
669 return;
672 /* Mode combination is not recognized. */
673 gcc_unreachable ();
676 /* Return an rtx for a value that would result
677 from converting X to mode MODE.
678 Both X and MODE may be floating, or both integer.
679 UNSIGNEDP is nonzero if X is an unsigned value.
680 This can be done by referring to a part of X in place
681 or by copying to a new temporary with conversion. */
684 convert_to_mode (machine_mode mode, rtx x, int unsignedp)
686 return convert_modes (mode, VOIDmode, x, unsignedp);
689 /* Return an rtx for a value that would result
690 from converting X from mode OLDMODE to mode MODE.
691 Both modes may be floating, or both integer.
692 UNSIGNEDP is nonzero if X is an unsigned value.
694 This can be done by referring to a part of X in place
695 or by copying to a new temporary with conversion.
697 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
700 convert_modes (machine_mode mode, machine_mode oldmode, rtx x, int unsignedp)
702 rtx temp;
704 /* If FROM is a SUBREG that indicates that we have already done at least
705 the required extension, strip it. */
707 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
708 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
709 && SUBREG_CHECK_PROMOTED_SIGN (x, unsignedp))
710 x = gen_lowpart (mode, SUBREG_REG (x));
712 if (GET_MODE (x) != VOIDmode)
713 oldmode = GET_MODE (x);
715 if (mode == oldmode)
716 return x;
718 if (CONST_SCALAR_INT_P (x) && GET_MODE_CLASS (mode) == MODE_INT)
720 /* If the caller did not tell us the old mode, then there is not
721 much to do with respect to canonicalization. We have to
722 assume that all the bits are significant. */
723 if (GET_MODE_CLASS (oldmode) != MODE_INT)
724 oldmode = MAX_MODE_INT;
725 wide_int w = wide_int::from (std::make_pair (x, oldmode),
726 GET_MODE_PRECISION (mode),
727 unsignedp ? UNSIGNED : SIGNED);
728 return immed_wide_int_const (w, mode);
731 /* We can do this with a gen_lowpart if both desired and current modes
732 are integer, and this is either a constant integer, a register, or a
733 non-volatile MEM. */
734 if (GET_MODE_CLASS (mode) == MODE_INT
735 && GET_MODE_CLASS (oldmode) == MODE_INT
736 && GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
737 && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) mode])
738 || (REG_P (x)
739 && (!HARD_REGISTER_P (x)
740 || HARD_REGNO_MODE_OK (REGNO (x), mode))
741 && TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (x)))))
743 return gen_lowpart (mode, x);
745 /* Converting from integer constant into mode is always equivalent to an
746 subreg operation. */
747 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
749 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
750 return simplify_gen_subreg (mode, x, oldmode, 0);
753 temp = gen_reg_rtx (mode);
754 convert_move (temp, x, unsignedp);
755 return temp;
758 /* Return the largest alignment we can use for doing a move (or store)
759 of MAX_PIECES. ALIGN is the largest alignment we could use. */
761 static unsigned int
762 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
764 machine_mode tmode;
766 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
767 if (align >= GET_MODE_ALIGNMENT (tmode))
768 align = GET_MODE_ALIGNMENT (tmode);
769 else
771 machine_mode tmode, xmode;
773 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
774 tmode != VOIDmode;
775 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
776 if (GET_MODE_SIZE (tmode) > max_pieces
777 || SLOW_UNALIGNED_ACCESS (tmode, align))
778 break;
780 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
783 return align;
786 /* Return the widest integer mode no wider than SIZE. If no such mode
787 can be found, return VOIDmode. */
789 static machine_mode
790 widest_int_mode_for_size (unsigned int size)
792 machine_mode tmode, mode = VOIDmode;
794 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
795 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
796 if (GET_MODE_SIZE (tmode) < size)
797 mode = tmode;
799 return mode;
802 /* Determine whether the LEN bytes can be moved by using several move
803 instructions. Return nonzero if a call to move_by_pieces should
804 succeed. */
807 can_move_by_pieces (unsigned HOST_WIDE_INT len,
808 unsigned int align)
810 return targetm.use_by_pieces_infrastructure_p (len, align, MOVE_BY_PIECES,
811 optimize_insn_for_speed_p ());
814 /* Generate several move instructions to copy LEN bytes from block FROM to
815 block TO. (These are MEM rtx's with BLKmode).
817 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
818 used to push FROM to the stack.
820 ALIGN is maximum stack alignment we can assume.
822 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
823 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
824 stpcpy. */
827 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
828 unsigned int align, int endp)
830 struct move_by_pieces_d data;
831 machine_mode to_addr_mode;
832 machine_mode from_addr_mode = get_address_mode (from);
833 rtx to_addr, from_addr = XEXP (from, 0);
834 unsigned int max_size = MOVE_MAX_PIECES + 1;
835 enum insn_code icode;
837 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
839 data.offset = 0;
840 data.from_addr = from_addr;
841 if (to)
843 to_addr_mode = get_address_mode (to);
844 to_addr = XEXP (to, 0);
845 data.to = to;
846 data.autinc_to
847 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
848 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
849 data.reverse
850 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
852 else
854 to_addr_mode = VOIDmode;
855 to_addr = NULL_RTX;
856 data.to = NULL_RTX;
857 data.autinc_to = 1;
858 #ifdef STACK_GROWS_DOWNWARD
859 data.reverse = 1;
860 #else
861 data.reverse = 0;
862 #endif
864 data.to_addr = to_addr;
865 data.from = from;
866 data.autinc_from
867 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
868 || GET_CODE (from_addr) == POST_INC
869 || GET_CODE (from_addr) == POST_DEC);
871 data.explicit_inc_from = 0;
872 data.explicit_inc_to = 0;
873 if (data.reverse) data.offset = len;
874 data.len = len;
876 /* If copying requires more than two move insns,
877 copy addresses to registers (to make displacements shorter)
878 and use post-increment if available. */
879 if (!(data.autinc_from && data.autinc_to)
880 && move_by_pieces_ninsns (len, align, max_size) > 2)
882 /* Find the mode of the largest move...
883 MODE might not be used depending on the definitions of the
884 USE_* macros below. */
885 machine_mode mode ATTRIBUTE_UNUSED
886 = widest_int_mode_for_size (max_size);
888 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
890 data.from_addr = copy_to_mode_reg (from_addr_mode,
891 plus_constant (from_addr_mode,
892 from_addr, len));
893 data.autinc_from = 1;
894 data.explicit_inc_from = -1;
896 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
898 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
899 data.autinc_from = 1;
900 data.explicit_inc_from = 1;
902 if (!data.autinc_from && CONSTANT_P (from_addr))
903 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
904 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
906 data.to_addr = copy_to_mode_reg (to_addr_mode,
907 plus_constant (to_addr_mode,
908 to_addr, len));
909 data.autinc_to = 1;
910 data.explicit_inc_to = -1;
912 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
914 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
915 data.autinc_to = 1;
916 data.explicit_inc_to = 1;
918 if (!data.autinc_to && CONSTANT_P (to_addr))
919 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
922 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
924 /* First move what we can in the largest integer mode, then go to
925 successively smaller modes. */
927 while (max_size > 1 && data.len > 0)
929 machine_mode mode = widest_int_mode_for_size (max_size);
931 if (mode == VOIDmode)
932 break;
934 icode = optab_handler (mov_optab, mode);
935 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
936 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
938 max_size = GET_MODE_SIZE (mode);
941 /* The code above should have handled everything. */
942 gcc_assert (!data.len);
944 if (endp)
946 rtx to1;
948 gcc_assert (!data.reverse);
949 if (data.autinc_to)
951 if (endp == 2)
953 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
954 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
955 else
956 data.to_addr = copy_to_mode_reg (to_addr_mode,
957 plus_constant (to_addr_mode,
958 data.to_addr,
959 -1));
961 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
962 data.offset);
964 else
966 if (endp == 2)
967 --data.offset;
968 to1 = adjust_address (data.to, QImode, data.offset);
970 return to1;
972 else
973 return data.to;
976 /* Return number of insns required to move L bytes by pieces.
977 ALIGN (in bits) is maximum alignment we can assume. */
979 unsigned HOST_WIDE_INT
980 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
981 unsigned int max_size)
983 unsigned HOST_WIDE_INT n_insns = 0;
985 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
987 while (max_size > 1 && l > 0)
989 machine_mode mode;
990 enum insn_code icode;
992 mode = widest_int_mode_for_size (max_size);
994 if (mode == VOIDmode)
995 break;
997 icode = optab_handler (mov_optab, mode);
998 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
999 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1001 max_size = GET_MODE_SIZE (mode);
1004 gcc_assert (!l);
1005 return n_insns;
1008 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1009 with move instructions for mode MODE. GENFUN is the gen_... function
1010 to make a move insn for that mode. DATA has all the other info. */
1012 static void
1013 move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
1014 struct move_by_pieces_d *data)
1016 unsigned int size = GET_MODE_SIZE (mode);
1017 rtx to1 = NULL_RTX, from1;
1019 while (data->len >= size)
1021 if (data->reverse)
1022 data->offset -= size;
1024 if (data->to)
1026 if (data->autinc_to)
1027 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1028 data->offset);
1029 else
1030 to1 = adjust_address (data->to, mode, data->offset);
1033 if (data->autinc_from)
1034 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1035 data->offset);
1036 else
1037 from1 = adjust_address (data->from, mode, data->offset);
1039 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1040 emit_insn (gen_add2_insn (data->to_addr,
1041 gen_int_mode (-(HOST_WIDE_INT) size,
1042 GET_MODE (data->to_addr))));
1043 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1044 emit_insn (gen_add2_insn (data->from_addr,
1045 gen_int_mode (-(HOST_WIDE_INT) size,
1046 GET_MODE (data->from_addr))));
1048 if (data->to)
1049 emit_insn ((*genfun) (to1, from1));
1050 else
1052 #ifdef PUSH_ROUNDING
1053 emit_single_push_insn (mode, from1, NULL);
1054 #else
1055 gcc_unreachable ();
1056 #endif
1059 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1060 emit_insn (gen_add2_insn (data->to_addr,
1061 gen_int_mode (size,
1062 GET_MODE (data->to_addr))));
1063 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1064 emit_insn (gen_add2_insn (data->from_addr,
1065 gen_int_mode (size,
1066 GET_MODE (data->from_addr))));
1068 if (! data->reverse)
1069 data->offset += size;
1071 data->len -= size;
1075 /* Emit code to move a block Y to a block X. This may be done with
1076 string-move instructions, with multiple scalar move instructions,
1077 or with a library call.
1079 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1080 SIZE is an rtx that says how long they are.
1081 ALIGN is the maximum alignment we can assume they have.
1082 METHOD describes what kind of copy this is, and what mechanisms may be used.
1083 MIN_SIZE is the minimal size of block to move
1084 MAX_SIZE is the maximal size of block to move, if it can not be represented
1085 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1087 Return the address of the new block, if memcpy is called and returns it,
1088 0 otherwise. */
1091 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1092 unsigned int expected_align, HOST_WIDE_INT expected_size,
1093 unsigned HOST_WIDE_INT min_size,
1094 unsigned HOST_WIDE_INT max_size,
1095 unsigned HOST_WIDE_INT probable_max_size)
1097 bool may_use_call;
1098 rtx retval = 0;
1099 unsigned int align;
1101 gcc_assert (size);
1102 if (CONST_INT_P (size)
1103 && INTVAL (size) == 0)
1104 return 0;
1106 switch (method)
1108 case BLOCK_OP_NORMAL:
1109 case BLOCK_OP_TAILCALL:
1110 may_use_call = true;
1111 break;
1113 case BLOCK_OP_CALL_PARM:
1114 may_use_call = block_move_libcall_safe_for_call_parm ();
1116 /* Make inhibit_defer_pop nonzero around the library call
1117 to force it to pop the arguments right away. */
1118 NO_DEFER_POP;
1119 break;
1121 case BLOCK_OP_NO_LIBCALL:
1122 may_use_call = false;
1123 break;
1125 default:
1126 gcc_unreachable ();
1129 gcc_assert (MEM_P (x) && MEM_P (y));
1130 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1131 gcc_assert (align >= BITS_PER_UNIT);
1133 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1134 block copy is more efficient for other large modes, e.g. DCmode. */
1135 x = adjust_address (x, BLKmode, 0);
1136 y = adjust_address (y, BLKmode, 0);
1138 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1139 can be incorrect is coming from __builtin_memcpy. */
1140 if (CONST_INT_P (size))
1142 x = shallow_copy_rtx (x);
1143 y = shallow_copy_rtx (y);
1144 set_mem_size (x, INTVAL (size));
1145 set_mem_size (y, INTVAL (size));
1148 if (CONST_INT_P (size) && can_move_by_pieces (INTVAL (size), align))
1149 move_by_pieces (x, y, INTVAL (size), align, 0);
1150 else if (emit_block_move_via_movmem (x, y, size, align,
1151 expected_align, expected_size,
1152 min_size, max_size, probable_max_size))
1154 else if (may_use_call
1155 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1156 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1158 /* Since x and y are passed to a libcall, mark the corresponding
1159 tree EXPR as addressable. */
1160 tree y_expr = MEM_EXPR (y);
1161 tree x_expr = MEM_EXPR (x);
1162 if (y_expr)
1163 mark_addressable (y_expr);
1164 if (x_expr)
1165 mark_addressable (x_expr);
1166 retval = emit_block_move_via_libcall (x, y, size,
1167 method == BLOCK_OP_TAILCALL);
1170 else
1171 emit_block_move_via_loop (x, y, size, align);
1173 if (method == BLOCK_OP_CALL_PARM)
1174 OK_DEFER_POP;
1176 return retval;
1180 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1182 unsigned HOST_WIDE_INT max, min = 0;
1183 if (GET_CODE (size) == CONST_INT)
1184 min = max = UINTVAL (size);
1185 else
1186 max = GET_MODE_MASK (GET_MODE (size));
1187 return emit_block_move_hints (x, y, size, method, 0, -1,
1188 min, max, max);
1191 /* A subroutine of emit_block_move. Returns true if calling the
1192 block move libcall will not clobber any parameters which may have
1193 already been placed on the stack. */
1195 static bool
1196 block_move_libcall_safe_for_call_parm (void)
1198 #if defined (REG_PARM_STACK_SPACE)
1199 tree fn;
1200 #endif
1202 /* If arguments are pushed on the stack, then they're safe. */
1203 if (PUSH_ARGS)
1204 return true;
1206 /* If registers go on the stack anyway, any argument is sure to clobber
1207 an outgoing argument. */
1208 #if defined (REG_PARM_STACK_SPACE)
1209 fn = emit_block_move_libcall_fn (false);
1210 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1211 depend on its argument. */
1212 (void) fn;
1213 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1214 && REG_PARM_STACK_SPACE (fn) != 0)
1215 return false;
1216 #endif
1218 /* If any argument goes in memory, then it might clobber an outgoing
1219 argument. */
1221 CUMULATIVE_ARGS args_so_far_v;
1222 cumulative_args_t args_so_far;
1223 tree fn, arg;
1225 fn = emit_block_move_libcall_fn (false);
1226 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1227 args_so_far = pack_cumulative_args (&args_so_far_v);
1229 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1230 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1232 machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1233 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1234 NULL_TREE, true);
1235 if (!tmp || !REG_P (tmp))
1236 return false;
1237 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1238 return false;
1239 targetm.calls.function_arg_advance (args_so_far, mode,
1240 NULL_TREE, true);
1243 return true;
1246 /* A subroutine of emit_block_move. Expand a movmem pattern;
1247 return true if successful. */
1249 static bool
1250 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1251 unsigned int expected_align, HOST_WIDE_INT expected_size,
1252 unsigned HOST_WIDE_INT min_size,
1253 unsigned HOST_WIDE_INT max_size,
1254 unsigned HOST_WIDE_INT probable_max_size)
1256 int save_volatile_ok = volatile_ok;
1257 machine_mode mode;
1259 if (expected_align < align)
1260 expected_align = align;
1261 if (expected_size != -1)
1263 if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1264 expected_size = probable_max_size;
1265 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1266 expected_size = min_size;
1269 /* Since this is a move insn, we don't care about volatility. */
1270 volatile_ok = 1;
1272 /* Try the most limited insn first, because there's no point
1273 including more than one in the machine description unless
1274 the more limited one has some advantage. */
1276 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1277 mode = GET_MODE_WIDER_MODE (mode))
1279 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1281 if (code != CODE_FOR_nothing
1282 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1283 here because if SIZE is less than the mode mask, as it is
1284 returned by the macro, it will definitely be less than the
1285 actual mode mask. Since SIZE is within the Pmode address
1286 space, we limit MODE to Pmode. */
1287 && ((CONST_INT_P (size)
1288 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1289 <= (GET_MODE_MASK (mode) >> 1)))
1290 || max_size <= (GET_MODE_MASK (mode) >> 1)
1291 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1293 struct expand_operand ops[9];
1294 unsigned int nops;
1296 /* ??? When called via emit_block_move_for_call, it'd be
1297 nice if there were some way to inform the backend, so
1298 that it doesn't fail the expansion because it thinks
1299 emitting the libcall would be more efficient. */
1300 nops = insn_data[(int) code].n_generator_args;
1301 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1303 create_fixed_operand (&ops[0], x);
1304 create_fixed_operand (&ops[1], y);
1305 /* The check above guarantees that this size conversion is valid. */
1306 create_convert_operand_to (&ops[2], size, mode, true);
1307 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1308 if (nops >= 6)
1310 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1311 create_integer_operand (&ops[5], expected_size);
1313 if (nops >= 8)
1315 create_integer_operand (&ops[6], min_size);
1316 /* If we can not represent the maximal size,
1317 make parameter NULL. */
1318 if ((HOST_WIDE_INT) max_size != -1)
1319 create_integer_operand (&ops[7], max_size);
1320 else
1321 create_fixed_operand (&ops[7], NULL);
1323 if (nops == 9)
1325 /* If we can not represent the maximal size,
1326 make parameter NULL. */
1327 if ((HOST_WIDE_INT) probable_max_size != -1)
1328 create_integer_operand (&ops[8], probable_max_size);
1329 else
1330 create_fixed_operand (&ops[8], NULL);
1332 if (maybe_expand_insn (code, nops, ops))
1334 volatile_ok = save_volatile_ok;
1335 return true;
1340 volatile_ok = save_volatile_ok;
1341 return false;
1344 /* A subroutine of emit_block_move. Expand a call to memcpy.
1345 Return the return value from memcpy, 0 otherwise. */
1348 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1350 rtx dst_addr, src_addr;
1351 tree call_expr, fn, src_tree, dst_tree, size_tree;
1352 machine_mode size_mode;
1353 rtx retval;
1355 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1356 pseudos. We can then place those new pseudos into a VAR_DECL and
1357 use them later. */
1359 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1360 src_addr = copy_addr_to_reg (XEXP (src, 0));
1362 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1363 src_addr = convert_memory_address (ptr_mode, src_addr);
1365 dst_tree = make_tree (ptr_type_node, dst_addr);
1366 src_tree = make_tree (ptr_type_node, src_addr);
1368 size_mode = TYPE_MODE (sizetype);
1370 size = convert_to_mode (size_mode, size, 1);
1371 size = copy_to_mode_reg (size_mode, size);
1373 /* It is incorrect to use the libcall calling conventions to call
1374 memcpy in this context. This could be a user call to memcpy and
1375 the user may wish to examine the return value from memcpy. For
1376 targets where libcalls and normal calls have different conventions
1377 for returning pointers, we could end up generating incorrect code. */
1379 size_tree = make_tree (sizetype, size);
1381 fn = emit_block_move_libcall_fn (true);
1382 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1383 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1385 retval = expand_normal (call_expr);
1387 return retval;
1390 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1391 for the function we use for block copies. */
1393 static GTY(()) tree block_move_fn;
1395 void
1396 init_block_move_fn (const char *asmspec)
1398 if (!block_move_fn)
1400 tree args, fn, attrs, attr_args;
1402 fn = get_identifier ("memcpy");
1403 args = build_function_type_list (ptr_type_node, ptr_type_node,
1404 const_ptr_type_node, sizetype,
1405 NULL_TREE);
1407 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1408 DECL_EXTERNAL (fn) = 1;
1409 TREE_PUBLIC (fn) = 1;
1410 DECL_ARTIFICIAL (fn) = 1;
1411 TREE_NOTHROW (fn) = 1;
1412 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1413 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1415 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1416 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1418 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1420 block_move_fn = fn;
1423 if (asmspec)
1424 set_user_assembler_name (block_move_fn, asmspec);
1427 static tree
1428 emit_block_move_libcall_fn (int for_call)
1430 static bool emitted_extern;
1432 if (!block_move_fn)
1433 init_block_move_fn (NULL);
1435 if (for_call && !emitted_extern)
1437 emitted_extern = true;
1438 make_decl_rtl (block_move_fn);
1441 return block_move_fn;
1444 /* A subroutine of emit_block_move. Copy the data via an explicit
1445 loop. This is used only when libcalls are forbidden. */
1446 /* ??? It'd be nice to copy in hunks larger than QImode. */
1448 static void
1449 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1450 unsigned int align ATTRIBUTE_UNUSED)
1452 rtx_code_label *cmp_label, *top_label;
1453 rtx iter, x_addr, y_addr, tmp;
1454 machine_mode x_addr_mode = get_address_mode (x);
1455 machine_mode y_addr_mode = get_address_mode (y);
1456 machine_mode iter_mode;
1458 iter_mode = GET_MODE (size);
1459 if (iter_mode == VOIDmode)
1460 iter_mode = word_mode;
1462 top_label = gen_label_rtx ();
1463 cmp_label = gen_label_rtx ();
1464 iter = gen_reg_rtx (iter_mode);
1466 emit_move_insn (iter, const0_rtx);
1468 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1469 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1470 do_pending_stack_adjust ();
1472 emit_jump (cmp_label);
1473 emit_label (top_label);
1475 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1476 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1478 if (x_addr_mode != y_addr_mode)
1479 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1480 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1482 x = change_address (x, QImode, x_addr);
1483 y = change_address (y, QImode, y_addr);
1485 emit_move_insn (x, y);
1487 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1488 true, OPTAB_LIB_WIDEN);
1489 if (tmp != iter)
1490 emit_move_insn (iter, tmp);
1492 emit_label (cmp_label);
1494 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1495 true, top_label, REG_BR_PROB_BASE * 90 / 100);
1498 /* Copy all or part of a value X into registers starting at REGNO.
1499 The number of registers to be filled is NREGS. */
1501 void
1502 move_block_to_reg (int regno, rtx x, int nregs, machine_mode mode)
1504 int i;
1505 #ifdef HAVE_load_multiple
1506 rtx pat;
1507 rtx_insn *last;
1508 #endif
1510 if (nregs == 0)
1511 return;
1513 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1514 x = validize_mem (force_const_mem (mode, x));
1516 /* See if the machine can do this with a load multiple insn. */
1517 #ifdef HAVE_load_multiple
1518 if (HAVE_load_multiple)
1520 last = get_last_insn ();
1521 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1522 GEN_INT (nregs));
1523 if (pat)
1525 emit_insn (pat);
1526 return;
1528 else
1529 delete_insns_since (last);
1531 #endif
1533 for (i = 0; i < nregs; i++)
1534 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1535 operand_subword_force (x, i, mode));
1538 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1539 The number of registers to be filled is NREGS. */
1541 void
1542 move_block_from_reg (int regno, rtx x, int nregs)
1544 int i;
1546 if (nregs == 0)
1547 return;
1549 /* See if the machine can do this with a store multiple insn. */
1550 #ifdef HAVE_store_multiple
1551 if (HAVE_store_multiple)
1553 rtx_insn *last = get_last_insn ();
1554 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1555 GEN_INT (nregs));
1556 if (pat)
1558 emit_insn (pat);
1559 return;
1561 else
1562 delete_insns_since (last);
1564 #endif
1566 for (i = 0; i < nregs; i++)
1568 rtx tem = operand_subword (x, i, 1, BLKmode);
1570 gcc_assert (tem);
1572 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1576 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1577 ORIG, where ORIG is a non-consecutive group of registers represented by
1578 a PARALLEL. The clone is identical to the original except in that the
1579 original set of registers is replaced by a new set of pseudo registers.
1580 The new set has the same modes as the original set. */
1583 gen_group_rtx (rtx orig)
1585 int i, length;
1586 rtx *tmps;
1588 gcc_assert (GET_CODE (orig) == PARALLEL);
1590 length = XVECLEN (orig, 0);
1591 tmps = XALLOCAVEC (rtx, length);
1593 /* Skip a NULL entry in first slot. */
1594 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1596 if (i)
1597 tmps[0] = 0;
1599 for (; i < length; i++)
1601 machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1602 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1604 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1607 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1610 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1611 except that values are placed in TMPS[i], and must later be moved
1612 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1614 static void
1615 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1617 rtx src;
1618 int start, i;
1619 machine_mode m = GET_MODE (orig_src);
1621 gcc_assert (GET_CODE (dst) == PARALLEL);
1623 if (m != VOIDmode
1624 && !SCALAR_INT_MODE_P (m)
1625 && !MEM_P (orig_src)
1626 && GET_CODE (orig_src) != CONCAT)
1628 machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1629 if (imode == BLKmode)
1630 src = assign_stack_temp (GET_MODE (orig_src), ssize);
1631 else
1632 src = gen_reg_rtx (imode);
1633 if (imode != BLKmode)
1634 src = gen_lowpart (GET_MODE (orig_src), src);
1635 emit_move_insn (src, orig_src);
1636 /* ...and back again. */
1637 if (imode != BLKmode)
1638 src = gen_lowpart (imode, src);
1639 emit_group_load_1 (tmps, dst, src, type, ssize);
1640 return;
1643 /* Check for a NULL entry, used to indicate that the parameter goes
1644 both on the stack and in registers. */
1645 if (XEXP (XVECEXP (dst, 0, 0), 0))
1646 start = 0;
1647 else
1648 start = 1;
1650 /* Process the pieces. */
1651 for (i = start; i < XVECLEN (dst, 0); i++)
1653 machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1654 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1655 unsigned int bytelen = GET_MODE_SIZE (mode);
1656 int shift = 0;
1658 /* Handle trailing fragments that run over the size of the struct. */
1659 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1661 /* Arrange to shift the fragment to where it belongs.
1662 extract_bit_field loads to the lsb of the reg. */
1663 if (
1664 #ifdef BLOCK_REG_PADDING
1665 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1666 == (BYTES_BIG_ENDIAN ? upward : downward)
1667 #else
1668 BYTES_BIG_ENDIAN
1669 #endif
1671 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1672 bytelen = ssize - bytepos;
1673 gcc_assert (bytelen > 0);
1676 /* If we won't be loading directly from memory, protect the real source
1677 from strange tricks we might play; but make sure that the source can
1678 be loaded directly into the destination. */
1679 src = orig_src;
1680 if (!MEM_P (orig_src)
1681 && (!CONSTANT_P (orig_src)
1682 || (GET_MODE (orig_src) != mode
1683 && GET_MODE (orig_src) != VOIDmode)))
1685 if (GET_MODE (orig_src) == VOIDmode)
1686 src = gen_reg_rtx (mode);
1687 else
1688 src = gen_reg_rtx (GET_MODE (orig_src));
1690 emit_move_insn (src, orig_src);
1693 /* Optimize the access just a bit. */
1694 if (MEM_P (src)
1695 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1696 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1697 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1698 && bytelen == GET_MODE_SIZE (mode))
1700 tmps[i] = gen_reg_rtx (mode);
1701 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1703 else if (COMPLEX_MODE_P (mode)
1704 && GET_MODE (src) == mode
1705 && bytelen == GET_MODE_SIZE (mode))
1706 /* Let emit_move_complex do the bulk of the work. */
1707 tmps[i] = src;
1708 else if (GET_CODE (src) == CONCAT)
1710 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1711 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1713 if ((bytepos == 0 && bytelen == slen0)
1714 || (bytepos != 0 && bytepos + bytelen <= slen))
1716 /* The following assumes that the concatenated objects all
1717 have the same size. In this case, a simple calculation
1718 can be used to determine the object and the bit field
1719 to be extracted. */
1720 tmps[i] = XEXP (src, bytepos / slen0);
1721 if (! CONSTANT_P (tmps[i])
1722 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1723 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1724 (bytepos % slen0) * BITS_PER_UNIT,
1725 1, NULL_RTX, mode, mode, false);
1727 else
1729 rtx mem;
1731 gcc_assert (!bytepos);
1732 mem = assign_stack_temp (GET_MODE (src), slen);
1733 emit_move_insn (mem, src);
1734 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1735 0, 1, NULL_RTX, mode, mode, false);
1738 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1739 SIMD register, which is currently broken. While we get GCC
1740 to emit proper RTL for these cases, let's dump to memory. */
1741 else if (VECTOR_MODE_P (GET_MODE (dst))
1742 && REG_P (src))
1744 int slen = GET_MODE_SIZE (GET_MODE (src));
1745 rtx mem;
1747 mem = assign_stack_temp (GET_MODE (src), slen);
1748 emit_move_insn (mem, src);
1749 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1751 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1752 && XVECLEN (dst, 0) > 1)
1753 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
1754 else if (CONSTANT_P (src))
1756 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1758 if (len == ssize)
1759 tmps[i] = src;
1760 else
1762 rtx first, second;
1764 /* TODO: const_wide_int can have sizes other than this... */
1765 gcc_assert (2 * len == ssize);
1766 split_double (src, &first, &second);
1767 if (i)
1768 tmps[i] = second;
1769 else
1770 tmps[i] = first;
1773 else if (REG_P (src) && GET_MODE (src) == mode)
1774 tmps[i] = src;
1775 else
1776 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1777 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1778 mode, mode, false);
1780 if (shift)
1781 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1782 shift, tmps[i], 0);
1786 /* Emit code to move a block SRC of type TYPE to a block DST,
1787 where DST is non-consecutive registers represented by a PARALLEL.
1788 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1789 if not known. */
1791 void
1792 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1794 rtx *tmps;
1795 int i;
1797 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1798 emit_group_load_1 (tmps, dst, src, type, ssize);
1800 /* Copy the extracted pieces into the proper (probable) hard regs. */
1801 for (i = 0; i < XVECLEN (dst, 0); i++)
1803 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1804 if (d == NULL)
1805 continue;
1806 emit_move_insn (d, tmps[i]);
1810 /* Similar, but load SRC into new pseudos in a format that looks like
1811 PARALLEL. This can later be fed to emit_group_move to get things
1812 in the right place. */
1815 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1817 rtvec vec;
1818 int i;
1820 vec = rtvec_alloc (XVECLEN (parallel, 0));
1821 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1823 /* Convert the vector to look just like the original PARALLEL, except
1824 with the computed values. */
1825 for (i = 0; i < XVECLEN (parallel, 0); i++)
1827 rtx e = XVECEXP (parallel, 0, i);
1828 rtx d = XEXP (e, 0);
1830 if (d)
1832 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1833 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1835 RTVEC_ELT (vec, i) = e;
1838 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1841 /* Emit code to move a block SRC to block DST, where SRC and DST are
1842 non-consecutive groups of registers, each represented by a PARALLEL. */
1844 void
1845 emit_group_move (rtx dst, rtx src)
1847 int i;
1849 gcc_assert (GET_CODE (src) == PARALLEL
1850 && GET_CODE (dst) == PARALLEL
1851 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1853 /* Skip first entry if NULL. */
1854 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1855 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1856 XEXP (XVECEXP (src, 0, i), 0));
1859 /* Move a group of registers represented by a PARALLEL into pseudos. */
1862 emit_group_move_into_temps (rtx src)
1864 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1865 int i;
1867 for (i = 0; i < XVECLEN (src, 0); i++)
1869 rtx e = XVECEXP (src, 0, i);
1870 rtx d = XEXP (e, 0);
1872 if (d)
1873 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1874 RTVEC_ELT (vec, i) = e;
1877 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1880 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1881 where SRC is non-consecutive registers represented by a PARALLEL.
1882 SSIZE represents the total size of block ORIG_DST, or -1 if not
1883 known. */
1885 void
1886 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1888 rtx *tmps, dst;
1889 int start, finish, i;
1890 machine_mode m = GET_MODE (orig_dst);
1892 gcc_assert (GET_CODE (src) == PARALLEL);
1894 if (!SCALAR_INT_MODE_P (m)
1895 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1897 machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1898 if (imode == BLKmode)
1899 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1900 else
1901 dst = gen_reg_rtx (imode);
1902 emit_group_store (dst, src, type, ssize);
1903 if (imode != BLKmode)
1904 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1905 emit_move_insn (orig_dst, dst);
1906 return;
1909 /* Check for a NULL entry, used to indicate that the parameter goes
1910 both on the stack and in registers. */
1911 if (XEXP (XVECEXP (src, 0, 0), 0))
1912 start = 0;
1913 else
1914 start = 1;
1915 finish = XVECLEN (src, 0);
1917 tmps = XALLOCAVEC (rtx, finish);
1919 /* Copy the (probable) hard regs into pseudos. */
1920 for (i = start; i < finish; i++)
1922 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1923 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1925 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1926 emit_move_insn (tmps[i], reg);
1928 else
1929 tmps[i] = reg;
1932 /* If we won't be storing directly into memory, protect the real destination
1933 from strange tricks we might play. */
1934 dst = orig_dst;
1935 if (GET_CODE (dst) == PARALLEL)
1937 rtx temp;
1939 /* We can get a PARALLEL dst if there is a conditional expression in
1940 a return statement. In that case, the dst and src are the same,
1941 so no action is necessary. */
1942 if (rtx_equal_p (dst, src))
1943 return;
1945 /* It is unclear if we can ever reach here, but we may as well handle
1946 it. Allocate a temporary, and split this into a store/load to/from
1947 the temporary. */
1948 temp = assign_stack_temp (GET_MODE (dst), ssize);
1949 emit_group_store (temp, src, type, ssize);
1950 emit_group_load (dst, temp, type, ssize);
1951 return;
1953 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1955 machine_mode outer = GET_MODE (dst);
1956 machine_mode inner;
1957 HOST_WIDE_INT bytepos;
1958 bool done = false;
1959 rtx temp;
1961 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1962 dst = gen_reg_rtx (outer);
1964 /* Make life a bit easier for combine. */
1965 /* If the first element of the vector is the low part
1966 of the destination mode, use a paradoxical subreg to
1967 initialize the destination. */
1968 if (start < finish)
1970 inner = GET_MODE (tmps[start]);
1971 bytepos = subreg_lowpart_offset (inner, outer);
1972 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1974 temp = simplify_gen_subreg (outer, tmps[start],
1975 inner, 0);
1976 if (temp)
1978 emit_move_insn (dst, temp);
1979 done = true;
1980 start++;
1985 /* If the first element wasn't the low part, try the last. */
1986 if (!done
1987 && start < finish - 1)
1989 inner = GET_MODE (tmps[finish - 1]);
1990 bytepos = subreg_lowpart_offset (inner, outer);
1991 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1993 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1994 inner, 0);
1995 if (temp)
1997 emit_move_insn (dst, temp);
1998 done = true;
1999 finish--;
2004 /* Otherwise, simply initialize the result to zero. */
2005 if (!done)
2006 emit_move_insn (dst, CONST0_RTX (outer));
2009 /* Process the pieces. */
2010 for (i = start; i < finish; i++)
2012 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2013 machine_mode mode = GET_MODE (tmps[i]);
2014 unsigned int bytelen = GET_MODE_SIZE (mode);
2015 unsigned int adj_bytelen;
2016 rtx dest = dst;
2018 /* Handle trailing fragments that run over the size of the struct. */
2019 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2020 adj_bytelen = ssize - bytepos;
2021 else
2022 adj_bytelen = bytelen;
2024 if (GET_CODE (dst) == CONCAT)
2026 if (bytepos + adj_bytelen
2027 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2028 dest = XEXP (dst, 0);
2029 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2031 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2032 dest = XEXP (dst, 1);
2034 else
2036 machine_mode dest_mode = GET_MODE (dest);
2037 machine_mode tmp_mode = GET_MODE (tmps[i]);
2039 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2041 if (GET_MODE_ALIGNMENT (dest_mode)
2042 >= GET_MODE_ALIGNMENT (tmp_mode))
2044 dest = assign_stack_temp (dest_mode,
2045 GET_MODE_SIZE (dest_mode));
2046 emit_move_insn (adjust_address (dest,
2047 tmp_mode,
2048 bytepos),
2049 tmps[i]);
2050 dst = dest;
2052 else
2054 dest = assign_stack_temp (tmp_mode,
2055 GET_MODE_SIZE (tmp_mode));
2056 emit_move_insn (dest, tmps[i]);
2057 dst = adjust_address (dest, dest_mode, bytepos);
2059 break;
2063 /* Handle trailing fragments that run over the size of the struct. */
2064 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2066 /* store_bit_field always takes its value from the lsb.
2067 Move the fragment to the lsb if it's not already there. */
2068 if (
2069 #ifdef BLOCK_REG_PADDING
2070 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2071 == (BYTES_BIG_ENDIAN ? upward : downward)
2072 #else
2073 BYTES_BIG_ENDIAN
2074 #endif
2077 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2078 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2079 shift, tmps[i], 0);
2082 /* Make sure not to write past the end of the struct. */
2083 store_bit_field (dest,
2084 adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2085 bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2086 VOIDmode, tmps[i], false);
2089 /* Optimize the access just a bit. */
2090 else if (MEM_P (dest)
2091 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2092 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2093 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2094 && bytelen == GET_MODE_SIZE (mode))
2095 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2097 else
2098 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2099 0, 0, mode, tmps[i], false);
2102 /* Copy from the pseudo into the (probable) hard reg. */
2103 if (orig_dst != dst)
2104 emit_move_insn (orig_dst, dst);
2107 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2108 of the value stored in X. */
2111 maybe_emit_group_store (rtx x, tree type)
2113 machine_mode mode = TYPE_MODE (type);
2114 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2115 if (GET_CODE (x) == PARALLEL)
2117 rtx result = gen_reg_rtx (mode);
2118 emit_group_store (result, x, type, int_size_in_bytes (type));
2119 return result;
2121 return x;
2124 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2126 This is used on targets that return BLKmode values in registers. */
2128 void
2129 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2131 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2132 rtx src = NULL, dst = NULL;
2133 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2134 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2135 machine_mode mode = GET_MODE (srcreg);
2136 machine_mode tmode = GET_MODE (target);
2137 machine_mode copy_mode;
2139 /* BLKmode registers created in the back-end shouldn't have survived. */
2140 gcc_assert (mode != BLKmode);
2142 /* If the structure doesn't take up a whole number of words, see whether
2143 SRCREG is padded on the left or on the right. If it's on the left,
2144 set PADDING_CORRECTION to the number of bits to skip.
2146 In most ABIs, the structure will be returned at the least end of
2147 the register, which translates to right padding on little-endian
2148 targets and left padding on big-endian targets. The opposite
2149 holds if the structure is returned at the most significant
2150 end of the register. */
2151 if (bytes % UNITS_PER_WORD != 0
2152 && (targetm.calls.return_in_msb (type)
2153 ? !BYTES_BIG_ENDIAN
2154 : BYTES_BIG_ENDIAN))
2155 padding_correction
2156 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2158 /* We can use a single move if we have an exact mode for the size. */
2159 else if (MEM_P (target)
2160 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2161 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2162 && bytes == GET_MODE_SIZE (mode))
2164 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2165 return;
2168 /* And if we additionally have the same mode for a register. */
2169 else if (REG_P (target)
2170 && GET_MODE (target) == mode
2171 && bytes == GET_MODE_SIZE (mode))
2173 emit_move_insn (target, srcreg);
2174 return;
2177 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2178 into a new pseudo which is a full word. */
2179 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2181 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2182 mode = word_mode;
2185 /* Copy the structure BITSIZE bits at a time. If the target lives in
2186 memory, take care of not reading/writing past its end by selecting
2187 a copy mode suited to BITSIZE. This should always be possible given
2188 how it is computed.
2190 If the target lives in register, make sure not to select a copy mode
2191 larger than the mode of the register.
2193 We could probably emit more efficient code for machines which do not use
2194 strict alignment, but it doesn't seem worth the effort at the current
2195 time. */
2197 copy_mode = word_mode;
2198 if (MEM_P (target))
2200 machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2201 if (mem_mode != BLKmode)
2202 copy_mode = mem_mode;
2204 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2205 copy_mode = tmode;
2207 for (bitpos = 0, xbitpos = padding_correction;
2208 bitpos < bytes * BITS_PER_UNIT;
2209 bitpos += bitsize, xbitpos += bitsize)
2211 /* We need a new source operand each time xbitpos is on a
2212 word boundary and when xbitpos == padding_correction
2213 (the first time through). */
2214 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2215 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2217 /* We need a new destination operand each time bitpos is on
2218 a word boundary. */
2219 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2220 dst = target;
2221 else if (bitpos % BITS_PER_WORD == 0)
2222 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2224 /* Use xbitpos for the source extraction (right justified) and
2225 bitpos for the destination store (left justified). */
2226 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2227 extract_bit_field (src, bitsize,
2228 xbitpos % BITS_PER_WORD, 1,
2229 NULL_RTX, copy_mode, copy_mode,
2230 false),
2231 false);
2235 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2236 register if it contains any data, otherwise return null.
2238 This is used on targets that return BLKmode values in registers. */
2241 copy_blkmode_to_reg (machine_mode mode, tree src)
2243 int i, n_regs;
2244 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2245 unsigned int bitsize;
2246 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2247 machine_mode dst_mode;
2249 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2251 x = expand_normal (src);
2253 bytes = int_size_in_bytes (TREE_TYPE (src));
2254 if (bytes == 0)
2255 return NULL_RTX;
2257 /* If the structure doesn't take up a whole number of words, see
2258 whether the register value should be padded on the left or on
2259 the right. Set PADDING_CORRECTION to the number of padding
2260 bits needed on the left side.
2262 In most ABIs, the structure will be returned at the least end of
2263 the register, which translates to right padding on little-endian
2264 targets and left padding on big-endian targets. The opposite
2265 holds if the structure is returned at the most significant
2266 end of the register. */
2267 if (bytes % UNITS_PER_WORD != 0
2268 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2269 ? !BYTES_BIG_ENDIAN
2270 : BYTES_BIG_ENDIAN))
2271 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2272 * BITS_PER_UNIT));
2274 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2275 dst_words = XALLOCAVEC (rtx, n_regs);
2276 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2278 /* Copy the structure BITSIZE bits at a time. */
2279 for (bitpos = 0, xbitpos = padding_correction;
2280 bitpos < bytes * BITS_PER_UNIT;
2281 bitpos += bitsize, xbitpos += bitsize)
2283 /* We need a new destination pseudo each time xbitpos is
2284 on a word boundary and when xbitpos == padding_correction
2285 (the first time through). */
2286 if (xbitpos % BITS_PER_WORD == 0
2287 || xbitpos == padding_correction)
2289 /* Generate an appropriate register. */
2290 dst_word = gen_reg_rtx (word_mode);
2291 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2293 /* Clear the destination before we move anything into it. */
2294 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2297 /* We need a new source operand each time bitpos is on a word
2298 boundary. */
2299 if (bitpos % BITS_PER_WORD == 0)
2300 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2302 /* Use bitpos for the source extraction (left justified) and
2303 xbitpos for the destination store (right justified). */
2304 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2305 0, 0, word_mode,
2306 extract_bit_field (src_word, bitsize,
2307 bitpos % BITS_PER_WORD, 1,
2308 NULL_RTX, word_mode, word_mode,
2309 false),
2310 false);
2313 if (mode == BLKmode)
2315 /* Find the smallest integer mode large enough to hold the
2316 entire structure. */
2317 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2318 mode != VOIDmode;
2319 mode = GET_MODE_WIDER_MODE (mode))
2320 /* Have we found a large enough mode? */
2321 if (GET_MODE_SIZE (mode) >= bytes)
2322 break;
2324 /* A suitable mode should have been found. */
2325 gcc_assert (mode != VOIDmode);
2328 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2329 dst_mode = word_mode;
2330 else
2331 dst_mode = mode;
2332 dst = gen_reg_rtx (dst_mode);
2334 for (i = 0; i < n_regs; i++)
2335 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2337 if (mode != dst_mode)
2338 dst = gen_lowpart (mode, dst);
2340 return dst;
2343 /* Add a USE expression for REG to the (possibly empty) list pointed
2344 to by CALL_FUSAGE. REG must denote a hard register. */
2346 void
2347 use_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2349 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2351 *call_fusage
2352 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2355 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2356 to by CALL_FUSAGE. REG must denote a hard register. */
2358 void
2359 clobber_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2361 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2363 *call_fusage
2364 = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2367 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2368 starting at REGNO. All of these registers must be hard registers. */
2370 void
2371 use_regs (rtx *call_fusage, int regno, int nregs)
2373 int i;
2375 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2377 for (i = 0; i < nregs; i++)
2378 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2381 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2382 PARALLEL REGS. This is for calls that pass values in multiple
2383 non-contiguous locations. The Irix 6 ABI has examples of this. */
2385 void
2386 use_group_regs (rtx *call_fusage, rtx regs)
2388 int i;
2390 for (i = 0; i < XVECLEN (regs, 0); i++)
2392 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2394 /* A NULL entry means the parameter goes both on the stack and in
2395 registers. This can also be a MEM for targets that pass values
2396 partially on the stack and partially in registers. */
2397 if (reg != 0 && REG_P (reg))
2398 use_reg (call_fusage, reg);
2402 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2403 assigment and the code of the expresion on the RHS is CODE. Return
2404 NULL otherwise. */
2406 static gimple
2407 get_def_for_expr (tree name, enum tree_code code)
2409 gimple def_stmt;
2411 if (TREE_CODE (name) != SSA_NAME)
2412 return NULL;
2414 def_stmt = get_gimple_for_ssa_name (name);
2415 if (!def_stmt
2416 || gimple_assign_rhs_code (def_stmt) != code)
2417 return NULL;
2419 return def_stmt;
2422 #ifdef HAVE_conditional_move
2423 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2424 assigment and the class of the expresion on the RHS is CLASS. Return
2425 NULL otherwise. */
2427 static gimple
2428 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2430 gimple def_stmt;
2432 if (TREE_CODE (name) != SSA_NAME)
2433 return NULL;
2435 def_stmt = get_gimple_for_ssa_name (name);
2436 if (!def_stmt
2437 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2438 return NULL;
2440 return def_stmt;
2442 #endif
2445 /* Determine whether the LEN bytes generated by CONSTFUN can be
2446 stored to memory using several move instructions. CONSTFUNDATA is
2447 a pointer which will be passed as argument in every CONSTFUN call.
2448 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2449 a memset operation and false if it's a copy of a constant string.
2450 Return nonzero if a call to store_by_pieces should succeed. */
2453 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2454 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2455 void *constfundata, unsigned int align, bool memsetp)
2457 unsigned HOST_WIDE_INT l;
2458 unsigned int max_size;
2459 HOST_WIDE_INT offset = 0;
2460 machine_mode mode;
2461 enum insn_code icode;
2462 int reverse;
2463 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2464 rtx cst ATTRIBUTE_UNUSED;
2466 if (len == 0)
2467 return 1;
2469 if (!targetm.use_by_pieces_infrastructure_p (len, align,
2470 memsetp
2471 ? SET_BY_PIECES
2472 : STORE_BY_PIECES,
2473 optimize_insn_for_speed_p ()))
2474 return 0;
2476 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2478 /* We would first store what we can in the largest integer mode, then go to
2479 successively smaller modes. */
2481 for (reverse = 0;
2482 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2483 reverse++)
2485 l = len;
2486 max_size = STORE_MAX_PIECES + 1;
2487 while (max_size > 1 && l > 0)
2489 mode = widest_int_mode_for_size (max_size);
2491 if (mode == VOIDmode)
2492 break;
2494 icode = optab_handler (mov_optab, mode);
2495 if (icode != CODE_FOR_nothing
2496 && align >= GET_MODE_ALIGNMENT (mode))
2498 unsigned int size = GET_MODE_SIZE (mode);
2500 while (l >= size)
2502 if (reverse)
2503 offset -= size;
2505 cst = (*constfun) (constfundata, offset, mode);
2506 if (!targetm.legitimate_constant_p (mode, cst))
2507 return 0;
2509 if (!reverse)
2510 offset += size;
2512 l -= size;
2516 max_size = GET_MODE_SIZE (mode);
2519 /* The code above should have handled everything. */
2520 gcc_assert (!l);
2523 return 1;
2526 /* Generate several move instructions to store LEN bytes generated by
2527 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2528 pointer which will be passed as argument in every CONSTFUN call.
2529 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2530 a memset operation and false if it's a copy of a constant string.
2531 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2532 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2533 stpcpy. */
2536 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2537 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2538 void *constfundata, unsigned int align, bool memsetp, int endp)
2540 machine_mode to_addr_mode = get_address_mode (to);
2541 struct store_by_pieces_d data;
2543 if (len == 0)
2545 gcc_assert (endp != 2);
2546 return to;
2549 gcc_assert (targetm.use_by_pieces_infrastructure_p
2550 (len, align,
2551 memsetp
2552 ? SET_BY_PIECES
2553 : STORE_BY_PIECES,
2554 optimize_insn_for_speed_p ()));
2556 data.constfun = constfun;
2557 data.constfundata = constfundata;
2558 data.len = len;
2559 data.to = to;
2560 store_by_pieces_1 (&data, align);
2561 if (endp)
2563 rtx to1;
2565 gcc_assert (!data.reverse);
2566 if (data.autinc_to)
2568 if (endp == 2)
2570 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2571 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2572 else
2573 data.to_addr = copy_to_mode_reg (to_addr_mode,
2574 plus_constant (to_addr_mode,
2575 data.to_addr,
2576 -1));
2578 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2579 data.offset);
2581 else
2583 if (endp == 2)
2584 --data.offset;
2585 to1 = adjust_address (data.to, QImode, data.offset);
2587 return to1;
2589 else
2590 return data.to;
2593 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2594 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2596 static void
2597 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2599 struct store_by_pieces_d data;
2601 if (len == 0)
2602 return;
2604 data.constfun = clear_by_pieces_1;
2605 data.constfundata = NULL;
2606 data.len = len;
2607 data.to = to;
2608 store_by_pieces_1 (&data, align);
2611 /* Callback routine for clear_by_pieces.
2612 Return const0_rtx unconditionally. */
2614 static rtx
2615 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2616 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2617 machine_mode mode ATTRIBUTE_UNUSED)
2619 return const0_rtx;
2622 /* Subroutine of clear_by_pieces and store_by_pieces.
2623 Generate several move instructions to store LEN bytes of block TO. (A MEM
2624 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2626 static void
2627 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2628 unsigned int align ATTRIBUTE_UNUSED)
2630 machine_mode to_addr_mode = get_address_mode (data->to);
2631 rtx to_addr = XEXP (data->to, 0);
2632 unsigned int max_size = STORE_MAX_PIECES + 1;
2633 enum insn_code icode;
2635 data->offset = 0;
2636 data->to_addr = to_addr;
2637 data->autinc_to
2638 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2639 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2641 data->explicit_inc_to = 0;
2642 data->reverse
2643 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2644 if (data->reverse)
2645 data->offset = data->len;
2647 /* If storing requires more than two move insns,
2648 copy addresses to registers (to make displacements shorter)
2649 and use post-increment if available. */
2650 if (!data->autinc_to
2651 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2653 /* Determine the main mode we'll be using.
2654 MODE might not be used depending on the definitions of the
2655 USE_* macros below. */
2656 machine_mode mode ATTRIBUTE_UNUSED
2657 = widest_int_mode_for_size (max_size);
2659 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2661 data->to_addr = copy_to_mode_reg (to_addr_mode,
2662 plus_constant (to_addr_mode,
2663 to_addr,
2664 data->len));
2665 data->autinc_to = 1;
2666 data->explicit_inc_to = -1;
2669 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2670 && ! data->autinc_to)
2672 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2673 data->autinc_to = 1;
2674 data->explicit_inc_to = 1;
2677 if ( !data->autinc_to && CONSTANT_P (to_addr))
2678 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2681 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2683 /* First store what we can in the largest integer mode, then go to
2684 successively smaller modes. */
2686 while (max_size > 1 && data->len > 0)
2688 machine_mode mode = widest_int_mode_for_size (max_size);
2690 if (mode == VOIDmode)
2691 break;
2693 icode = optab_handler (mov_optab, mode);
2694 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2695 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2697 max_size = GET_MODE_SIZE (mode);
2700 /* The code above should have handled everything. */
2701 gcc_assert (!data->len);
2704 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2705 with move instructions for mode MODE. GENFUN is the gen_... function
2706 to make a move insn for that mode. DATA has all the other info. */
2708 static void
2709 store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
2710 struct store_by_pieces_d *data)
2712 unsigned int size = GET_MODE_SIZE (mode);
2713 rtx to1, cst;
2715 while (data->len >= size)
2717 if (data->reverse)
2718 data->offset -= size;
2720 if (data->autinc_to)
2721 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2722 data->offset);
2723 else
2724 to1 = adjust_address (data->to, mode, data->offset);
2726 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2727 emit_insn (gen_add2_insn (data->to_addr,
2728 gen_int_mode (-(HOST_WIDE_INT) size,
2729 GET_MODE (data->to_addr))));
2731 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2732 emit_insn ((*genfun) (to1, cst));
2734 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2735 emit_insn (gen_add2_insn (data->to_addr,
2736 gen_int_mode (size,
2737 GET_MODE (data->to_addr))));
2739 if (! data->reverse)
2740 data->offset += size;
2742 data->len -= size;
2746 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2747 its length in bytes. */
2750 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2751 unsigned int expected_align, HOST_WIDE_INT expected_size,
2752 unsigned HOST_WIDE_INT min_size,
2753 unsigned HOST_WIDE_INT max_size,
2754 unsigned HOST_WIDE_INT probable_max_size)
2756 machine_mode mode = GET_MODE (object);
2757 unsigned int align;
2759 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2761 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2762 just move a zero. Otherwise, do this a piece at a time. */
2763 if (mode != BLKmode
2764 && CONST_INT_P (size)
2765 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2767 rtx zero = CONST0_RTX (mode);
2768 if (zero != NULL)
2770 emit_move_insn (object, zero);
2771 return NULL;
2774 if (COMPLEX_MODE_P (mode))
2776 zero = CONST0_RTX (GET_MODE_INNER (mode));
2777 if (zero != NULL)
2779 write_complex_part (object, zero, 0);
2780 write_complex_part (object, zero, 1);
2781 return NULL;
2786 if (size == const0_rtx)
2787 return NULL;
2789 align = MEM_ALIGN (object);
2791 if (CONST_INT_P (size)
2792 && targetm.use_by_pieces_infrastructure_p (INTVAL (size), align,
2793 CLEAR_BY_PIECES,
2794 optimize_insn_for_speed_p ()))
2795 clear_by_pieces (object, INTVAL (size), align);
2796 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2797 expected_align, expected_size,
2798 min_size, max_size, probable_max_size))
2800 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2801 return set_storage_via_libcall (object, size, const0_rtx,
2802 method == BLOCK_OP_TAILCALL);
2803 else
2804 gcc_unreachable ();
2806 return NULL;
2810 clear_storage (rtx object, rtx size, enum block_op_methods method)
2812 unsigned HOST_WIDE_INT max, min = 0;
2813 if (GET_CODE (size) == CONST_INT)
2814 min = max = UINTVAL (size);
2815 else
2816 max = GET_MODE_MASK (GET_MODE (size));
2817 return clear_storage_hints (object, size, method, 0, -1, min, max, max);
2821 /* A subroutine of clear_storage. Expand a call to memset.
2822 Return the return value of memset, 0 otherwise. */
2825 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2827 tree call_expr, fn, object_tree, size_tree, val_tree;
2828 machine_mode size_mode;
2829 rtx retval;
2831 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2832 place those into new pseudos into a VAR_DECL and use them later. */
2834 object = copy_addr_to_reg (XEXP (object, 0));
2836 size_mode = TYPE_MODE (sizetype);
2837 size = convert_to_mode (size_mode, size, 1);
2838 size = copy_to_mode_reg (size_mode, size);
2840 /* It is incorrect to use the libcall calling conventions to call
2841 memset in this context. This could be a user call to memset and
2842 the user may wish to examine the return value from memset. For
2843 targets where libcalls and normal calls have different conventions
2844 for returning pointers, we could end up generating incorrect code. */
2846 object_tree = make_tree (ptr_type_node, object);
2847 if (!CONST_INT_P (val))
2848 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2849 size_tree = make_tree (sizetype, size);
2850 val_tree = make_tree (integer_type_node, val);
2852 fn = clear_storage_libcall_fn (true);
2853 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2854 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2856 retval = expand_normal (call_expr);
2858 return retval;
2861 /* A subroutine of set_storage_via_libcall. Create the tree node
2862 for the function we use for block clears. */
2864 tree block_clear_fn;
2866 void
2867 init_block_clear_fn (const char *asmspec)
2869 if (!block_clear_fn)
2871 tree fn, args;
2873 fn = get_identifier ("memset");
2874 args = build_function_type_list (ptr_type_node, ptr_type_node,
2875 integer_type_node, sizetype,
2876 NULL_TREE);
2878 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2879 DECL_EXTERNAL (fn) = 1;
2880 TREE_PUBLIC (fn) = 1;
2881 DECL_ARTIFICIAL (fn) = 1;
2882 TREE_NOTHROW (fn) = 1;
2883 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2884 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2886 block_clear_fn = fn;
2889 if (asmspec)
2890 set_user_assembler_name (block_clear_fn, asmspec);
2893 static tree
2894 clear_storage_libcall_fn (int for_call)
2896 static bool emitted_extern;
2898 if (!block_clear_fn)
2899 init_block_clear_fn (NULL);
2901 if (for_call && !emitted_extern)
2903 emitted_extern = true;
2904 make_decl_rtl (block_clear_fn);
2907 return block_clear_fn;
2910 /* Expand a setmem pattern; return true if successful. */
2912 bool
2913 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2914 unsigned int expected_align, HOST_WIDE_INT expected_size,
2915 unsigned HOST_WIDE_INT min_size,
2916 unsigned HOST_WIDE_INT max_size,
2917 unsigned HOST_WIDE_INT probable_max_size)
2919 /* Try the most limited insn first, because there's no point
2920 including more than one in the machine description unless
2921 the more limited one has some advantage. */
2923 machine_mode mode;
2925 if (expected_align < align)
2926 expected_align = align;
2927 if (expected_size != -1)
2929 if ((unsigned HOST_WIDE_INT)expected_size > max_size)
2930 expected_size = max_size;
2931 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
2932 expected_size = min_size;
2935 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2936 mode = GET_MODE_WIDER_MODE (mode))
2938 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2940 if (code != CODE_FOR_nothing
2941 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2942 here because if SIZE is less than the mode mask, as it is
2943 returned by the macro, it will definitely be less than the
2944 actual mode mask. Since SIZE is within the Pmode address
2945 space, we limit MODE to Pmode. */
2946 && ((CONST_INT_P (size)
2947 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2948 <= (GET_MODE_MASK (mode) >> 1)))
2949 || max_size <= (GET_MODE_MASK (mode) >> 1)
2950 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
2952 struct expand_operand ops[9];
2953 unsigned int nops;
2955 nops = insn_data[(int) code].n_generator_args;
2956 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
2958 create_fixed_operand (&ops[0], object);
2959 /* The check above guarantees that this size conversion is valid. */
2960 create_convert_operand_to (&ops[1], size, mode, true);
2961 create_convert_operand_from (&ops[2], val, byte_mode, true);
2962 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2963 if (nops >= 6)
2965 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2966 create_integer_operand (&ops[5], expected_size);
2968 if (nops >= 8)
2970 create_integer_operand (&ops[6], min_size);
2971 /* If we can not represent the maximal size,
2972 make parameter NULL. */
2973 if ((HOST_WIDE_INT) max_size != -1)
2974 create_integer_operand (&ops[7], max_size);
2975 else
2976 create_fixed_operand (&ops[7], NULL);
2978 if (nops == 9)
2980 /* If we can not represent the maximal size,
2981 make parameter NULL. */
2982 if ((HOST_WIDE_INT) probable_max_size != -1)
2983 create_integer_operand (&ops[8], probable_max_size);
2984 else
2985 create_fixed_operand (&ops[8], NULL);
2987 if (maybe_expand_insn (code, nops, ops))
2988 return true;
2992 return false;
2996 /* Write to one of the components of the complex value CPLX. Write VAL to
2997 the real part if IMAG_P is false, and the imaginary part if its true. */
2999 static void
3000 write_complex_part (rtx cplx, rtx val, bool imag_p)
3002 machine_mode cmode;
3003 machine_mode imode;
3004 unsigned ibitsize;
3006 if (GET_CODE (cplx) == CONCAT)
3008 emit_move_insn (XEXP (cplx, imag_p), val);
3009 return;
3012 cmode = GET_MODE (cplx);
3013 imode = GET_MODE_INNER (cmode);
3014 ibitsize = GET_MODE_BITSIZE (imode);
3016 /* For MEMs simplify_gen_subreg may generate an invalid new address
3017 because, e.g., the original address is considered mode-dependent
3018 by the target, which restricts simplify_subreg from invoking
3019 adjust_address_nv. Instead of preparing fallback support for an
3020 invalid address, we call adjust_address_nv directly. */
3021 if (MEM_P (cplx))
3023 emit_move_insn (adjust_address_nv (cplx, imode,
3024 imag_p ? GET_MODE_SIZE (imode) : 0),
3025 val);
3026 return;
3029 /* If the sub-object is at least word sized, then we know that subregging
3030 will work. This special case is important, since store_bit_field
3031 wants to operate on integer modes, and there's rarely an OImode to
3032 correspond to TCmode. */
3033 if (ibitsize >= BITS_PER_WORD
3034 /* For hard regs we have exact predicates. Assume we can split
3035 the original object if it spans an even number of hard regs.
3036 This special case is important for SCmode on 64-bit platforms
3037 where the natural size of floating-point regs is 32-bit. */
3038 || (REG_P (cplx)
3039 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3040 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3042 rtx part = simplify_gen_subreg (imode, cplx, cmode,
3043 imag_p ? GET_MODE_SIZE (imode) : 0);
3044 if (part)
3046 emit_move_insn (part, val);
3047 return;
3049 else
3050 /* simplify_gen_subreg may fail for sub-word MEMs. */
3051 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3054 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val,
3055 false);
3058 /* Extract one of the components of the complex value CPLX. Extract the
3059 real part if IMAG_P is false, and the imaginary part if it's true. */
3061 static rtx
3062 read_complex_part (rtx cplx, bool imag_p)
3064 machine_mode cmode, imode;
3065 unsigned ibitsize;
3067 if (GET_CODE (cplx) == CONCAT)
3068 return XEXP (cplx, imag_p);
3070 cmode = GET_MODE (cplx);
3071 imode = GET_MODE_INNER (cmode);
3072 ibitsize = GET_MODE_BITSIZE (imode);
3074 /* Special case reads from complex constants that got spilled to memory. */
3075 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3077 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3078 if (decl && TREE_CODE (decl) == COMPLEX_CST)
3080 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3081 if (CONSTANT_CLASS_P (part))
3082 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3086 /* For MEMs simplify_gen_subreg may generate an invalid new address
3087 because, e.g., the original address is considered mode-dependent
3088 by the target, which restricts simplify_subreg from invoking
3089 adjust_address_nv. Instead of preparing fallback support for an
3090 invalid address, we call adjust_address_nv directly. */
3091 if (MEM_P (cplx))
3092 return adjust_address_nv (cplx, imode,
3093 imag_p ? GET_MODE_SIZE (imode) : 0);
3095 /* If the sub-object is at least word sized, then we know that subregging
3096 will work. This special case is important, since extract_bit_field
3097 wants to operate on integer modes, and there's rarely an OImode to
3098 correspond to TCmode. */
3099 if (ibitsize >= BITS_PER_WORD
3100 /* For hard regs we have exact predicates. Assume we can split
3101 the original object if it spans an even number of hard regs.
3102 This special case is important for SCmode on 64-bit platforms
3103 where the natural size of floating-point regs is 32-bit. */
3104 || (REG_P (cplx)
3105 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3106 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3108 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3109 imag_p ? GET_MODE_SIZE (imode) : 0);
3110 if (ret)
3111 return ret;
3112 else
3113 /* simplify_gen_subreg may fail for sub-word MEMs. */
3114 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3117 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3118 true, NULL_RTX, imode, imode, false);
3121 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3122 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3123 represented in NEW_MODE. If FORCE is true, this will never happen, as
3124 we'll force-create a SUBREG if needed. */
3126 static rtx
3127 emit_move_change_mode (machine_mode new_mode,
3128 machine_mode old_mode, rtx x, bool force)
3130 rtx ret;
3132 if (push_operand (x, GET_MODE (x)))
3134 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3135 MEM_COPY_ATTRIBUTES (ret, x);
3137 else if (MEM_P (x))
3139 /* We don't have to worry about changing the address since the
3140 size in bytes is supposed to be the same. */
3141 if (reload_in_progress)
3143 /* Copy the MEM to change the mode and move any
3144 substitutions from the old MEM to the new one. */
3145 ret = adjust_address_nv (x, new_mode, 0);
3146 copy_replacements (x, ret);
3148 else
3149 ret = adjust_address (x, new_mode, 0);
3151 else
3153 /* Note that we do want simplify_subreg's behavior of validating
3154 that the new mode is ok for a hard register. If we were to use
3155 simplify_gen_subreg, we would create the subreg, but would
3156 probably run into the target not being able to implement it. */
3157 /* Except, of course, when FORCE is true, when this is exactly what
3158 we want. Which is needed for CCmodes on some targets. */
3159 if (force)
3160 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3161 else
3162 ret = simplify_subreg (new_mode, x, old_mode, 0);
3165 return ret;
3168 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3169 an integer mode of the same size as MODE. Returns the instruction
3170 emitted, or NULL if such a move could not be generated. */
3172 static rtx_insn *
3173 emit_move_via_integer (machine_mode mode, rtx x, rtx y, bool force)
3175 machine_mode imode;
3176 enum insn_code code;
3178 /* There must exist a mode of the exact size we require. */
3179 imode = int_mode_for_mode (mode);
3180 if (imode == BLKmode)
3181 return NULL;
3183 /* The target must support moves in this mode. */
3184 code = optab_handler (mov_optab, imode);
3185 if (code == CODE_FOR_nothing)
3186 return NULL;
3188 x = emit_move_change_mode (imode, mode, x, force);
3189 if (x == NULL_RTX)
3190 return NULL;
3191 y = emit_move_change_mode (imode, mode, y, force);
3192 if (y == NULL_RTX)
3193 return NULL;
3194 return emit_insn (GEN_FCN (code) (x, y));
3197 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3198 Return an equivalent MEM that does not use an auto-increment. */
3201 emit_move_resolve_push (machine_mode mode, rtx x)
3203 enum rtx_code code = GET_CODE (XEXP (x, 0));
3204 HOST_WIDE_INT adjust;
3205 rtx temp;
3207 adjust = GET_MODE_SIZE (mode);
3208 #ifdef PUSH_ROUNDING
3209 adjust = PUSH_ROUNDING (adjust);
3210 #endif
3211 if (code == PRE_DEC || code == POST_DEC)
3212 adjust = -adjust;
3213 else if (code == PRE_MODIFY || code == POST_MODIFY)
3215 rtx expr = XEXP (XEXP (x, 0), 1);
3216 HOST_WIDE_INT val;
3218 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3219 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3220 val = INTVAL (XEXP (expr, 1));
3221 if (GET_CODE (expr) == MINUS)
3222 val = -val;
3223 gcc_assert (adjust == val || adjust == -val);
3224 adjust = val;
3227 /* Do not use anti_adjust_stack, since we don't want to update
3228 stack_pointer_delta. */
3229 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3230 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3231 0, OPTAB_LIB_WIDEN);
3232 if (temp != stack_pointer_rtx)
3233 emit_move_insn (stack_pointer_rtx, temp);
3235 switch (code)
3237 case PRE_INC:
3238 case PRE_DEC:
3239 case PRE_MODIFY:
3240 temp = stack_pointer_rtx;
3241 break;
3242 case POST_INC:
3243 case POST_DEC:
3244 case POST_MODIFY:
3245 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3246 break;
3247 default:
3248 gcc_unreachable ();
3251 return replace_equiv_address (x, temp);
3254 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3255 X is known to satisfy push_operand, and MODE is known to be complex.
3256 Returns the last instruction emitted. */
3258 rtx_insn *
3259 emit_move_complex_push (machine_mode mode, rtx x, rtx y)
3261 machine_mode submode = GET_MODE_INNER (mode);
3262 bool imag_first;
3264 #ifdef PUSH_ROUNDING
3265 unsigned int submodesize = GET_MODE_SIZE (submode);
3267 /* In case we output to the stack, but the size is smaller than the
3268 machine can push exactly, we need to use move instructions. */
3269 if (PUSH_ROUNDING (submodesize) != submodesize)
3271 x = emit_move_resolve_push (mode, x);
3272 return emit_move_insn (x, y);
3274 #endif
3276 /* Note that the real part always precedes the imag part in memory
3277 regardless of machine's endianness. */
3278 switch (GET_CODE (XEXP (x, 0)))
3280 case PRE_DEC:
3281 case POST_DEC:
3282 imag_first = true;
3283 break;
3284 case PRE_INC:
3285 case POST_INC:
3286 imag_first = false;
3287 break;
3288 default:
3289 gcc_unreachable ();
3292 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3293 read_complex_part (y, imag_first));
3294 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3295 read_complex_part (y, !imag_first));
3298 /* A subroutine of emit_move_complex. Perform the move from Y to X
3299 via two moves of the parts. Returns the last instruction emitted. */
3301 rtx_insn *
3302 emit_move_complex_parts (rtx x, rtx y)
3304 /* Show the output dies here. This is necessary for SUBREGs
3305 of pseudos since we cannot track their lifetimes correctly;
3306 hard regs shouldn't appear here except as return values. */
3307 if (!reload_completed && !reload_in_progress
3308 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3309 emit_clobber (x);
3311 write_complex_part (x, read_complex_part (y, false), false);
3312 write_complex_part (x, read_complex_part (y, true), true);
3314 return get_last_insn ();
3317 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3318 MODE is known to be complex. Returns the last instruction emitted. */
3320 static rtx_insn *
3321 emit_move_complex (machine_mode mode, rtx x, rtx y)
3323 bool try_int;
3325 /* Need to take special care for pushes, to maintain proper ordering
3326 of the data, and possibly extra padding. */
3327 if (push_operand (x, mode))
3328 return emit_move_complex_push (mode, x, y);
3330 /* See if we can coerce the target into moving both values at once, except
3331 for floating point where we favor moving as parts if this is easy. */
3332 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3333 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3334 && !(REG_P (x)
3335 && HARD_REGISTER_P (x)
3336 && hard_regno_nregs[REGNO (x)][mode] == 1)
3337 && !(REG_P (y)
3338 && HARD_REGISTER_P (y)
3339 && hard_regno_nregs[REGNO (y)][mode] == 1))
3340 try_int = false;
3341 /* Not possible if the values are inherently not adjacent. */
3342 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3343 try_int = false;
3344 /* Is possible if both are registers (or subregs of registers). */
3345 else if (register_operand (x, mode) && register_operand (y, mode))
3346 try_int = true;
3347 /* If one of the operands is a memory, and alignment constraints
3348 are friendly enough, we may be able to do combined memory operations.
3349 We do not attempt this if Y is a constant because that combination is
3350 usually better with the by-parts thing below. */
3351 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3352 && (!STRICT_ALIGNMENT
3353 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3354 try_int = true;
3355 else
3356 try_int = false;
3358 if (try_int)
3360 rtx_insn *ret;
3362 /* For memory to memory moves, optimal behavior can be had with the
3363 existing block move logic. */
3364 if (MEM_P (x) && MEM_P (y))
3366 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3367 BLOCK_OP_NO_LIBCALL);
3368 return get_last_insn ();
3371 ret = emit_move_via_integer (mode, x, y, true);
3372 if (ret)
3373 return ret;
3376 return emit_move_complex_parts (x, y);
3379 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3380 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3382 static rtx_insn *
3383 emit_move_ccmode (machine_mode mode, rtx x, rtx y)
3385 rtx_insn *ret;
3387 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3388 if (mode != CCmode)
3390 enum insn_code code = optab_handler (mov_optab, CCmode);
3391 if (code != CODE_FOR_nothing)
3393 x = emit_move_change_mode (CCmode, mode, x, true);
3394 y = emit_move_change_mode (CCmode, mode, y, true);
3395 return emit_insn (GEN_FCN (code) (x, y));
3399 /* Otherwise, find the MODE_INT mode of the same width. */
3400 ret = emit_move_via_integer (mode, x, y, false);
3401 gcc_assert (ret != NULL);
3402 return ret;
3405 /* Return true if word I of OP lies entirely in the
3406 undefined bits of a paradoxical subreg. */
3408 static bool
3409 undefined_operand_subword_p (const_rtx op, int i)
3411 machine_mode innermode, innermostmode;
3412 int offset;
3413 if (GET_CODE (op) != SUBREG)
3414 return false;
3415 innermode = GET_MODE (op);
3416 innermostmode = GET_MODE (SUBREG_REG (op));
3417 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3418 /* The SUBREG_BYTE represents offset, as if the value were stored in
3419 memory, except for a paradoxical subreg where we define
3420 SUBREG_BYTE to be 0; undo this exception as in
3421 simplify_subreg. */
3422 if (SUBREG_BYTE (op) == 0
3423 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3425 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3426 if (WORDS_BIG_ENDIAN)
3427 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3428 if (BYTES_BIG_ENDIAN)
3429 offset += difference % UNITS_PER_WORD;
3431 if (offset >= GET_MODE_SIZE (innermostmode)
3432 || offset <= -GET_MODE_SIZE (word_mode))
3433 return true;
3434 return false;
3437 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3438 MODE is any multi-word or full-word mode that lacks a move_insn
3439 pattern. Note that you will get better code if you define such
3440 patterns, even if they must turn into multiple assembler instructions. */
3442 static rtx_insn *
3443 emit_move_multi_word (machine_mode mode, rtx x, rtx y)
3445 rtx_insn *last_insn = 0;
3446 rtx_insn *seq;
3447 rtx inner;
3448 bool need_clobber;
3449 int i;
3451 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3453 /* If X is a push on the stack, do the push now and replace
3454 X with a reference to the stack pointer. */
3455 if (push_operand (x, mode))
3456 x = emit_move_resolve_push (mode, x);
3458 /* If we are in reload, see if either operand is a MEM whose address
3459 is scheduled for replacement. */
3460 if (reload_in_progress && MEM_P (x)
3461 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3462 x = replace_equiv_address_nv (x, inner);
3463 if (reload_in_progress && MEM_P (y)
3464 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3465 y = replace_equiv_address_nv (y, inner);
3467 start_sequence ();
3469 need_clobber = false;
3470 for (i = 0;
3471 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3472 i++)
3474 rtx xpart = operand_subword (x, i, 1, mode);
3475 rtx ypart;
3477 /* Do not generate code for a move if it would come entirely
3478 from the undefined bits of a paradoxical subreg. */
3479 if (undefined_operand_subword_p (y, i))
3480 continue;
3482 ypart = operand_subword (y, i, 1, mode);
3484 /* If we can't get a part of Y, put Y into memory if it is a
3485 constant. Otherwise, force it into a register. Then we must
3486 be able to get a part of Y. */
3487 if (ypart == 0 && CONSTANT_P (y))
3489 y = use_anchored_address (force_const_mem (mode, y));
3490 ypart = operand_subword (y, i, 1, mode);
3492 else if (ypart == 0)
3493 ypart = operand_subword_force (y, i, mode);
3495 gcc_assert (xpart && ypart);
3497 need_clobber |= (GET_CODE (xpart) == SUBREG);
3499 last_insn = emit_move_insn (xpart, ypart);
3502 seq = get_insns ();
3503 end_sequence ();
3505 /* Show the output dies here. This is necessary for SUBREGs
3506 of pseudos since we cannot track their lifetimes correctly;
3507 hard regs shouldn't appear here except as return values.
3508 We never want to emit such a clobber after reload. */
3509 if (x != y
3510 && ! (reload_in_progress || reload_completed)
3511 && need_clobber != 0)
3512 emit_clobber (x);
3514 emit_insn (seq);
3516 return last_insn;
3519 /* Low level part of emit_move_insn.
3520 Called just like emit_move_insn, but assumes X and Y
3521 are basically valid. */
3523 rtx_insn *
3524 emit_move_insn_1 (rtx x, rtx y)
3526 machine_mode mode = GET_MODE (x);
3527 enum insn_code code;
3529 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3531 code = optab_handler (mov_optab, mode);
3532 if (code != CODE_FOR_nothing)
3533 return emit_insn (GEN_FCN (code) (x, y));
3535 /* Expand complex moves by moving real part and imag part. */
3536 if (COMPLEX_MODE_P (mode))
3537 return emit_move_complex (mode, x, y);
3539 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3540 || ALL_FIXED_POINT_MODE_P (mode))
3542 rtx_insn *result = emit_move_via_integer (mode, x, y, true);
3544 /* If we can't find an integer mode, use multi words. */
3545 if (result)
3546 return result;
3547 else
3548 return emit_move_multi_word (mode, x, y);
3551 if (GET_MODE_CLASS (mode) == MODE_CC)
3552 return emit_move_ccmode (mode, x, y);
3554 /* Try using a move pattern for the corresponding integer mode. This is
3555 only safe when simplify_subreg can convert MODE constants into integer
3556 constants. At present, it can only do this reliably if the value
3557 fits within a HOST_WIDE_INT. */
3558 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3560 rtx_insn *ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3562 if (ret)
3564 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3565 return ret;
3569 return emit_move_multi_word (mode, x, y);
3572 /* Generate code to copy Y into X.
3573 Both Y and X must have the same mode, except that
3574 Y can be a constant with VOIDmode.
3575 This mode cannot be BLKmode; use emit_block_move for that.
3577 Return the last instruction emitted. */
3579 rtx_insn *
3580 emit_move_insn (rtx x, rtx y)
3582 machine_mode mode = GET_MODE (x);
3583 rtx y_cst = NULL_RTX;
3584 rtx_insn *last_insn;
3585 rtx set;
3587 gcc_assert (mode != BLKmode
3588 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3590 if (CONSTANT_P (y))
3592 if (optimize
3593 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3594 && (last_insn = compress_float_constant (x, y)))
3595 return last_insn;
3597 y_cst = y;
3599 if (!targetm.legitimate_constant_p (mode, y))
3601 y = force_const_mem (mode, y);
3603 /* If the target's cannot_force_const_mem prevented the spill,
3604 assume that the target's move expanders will also take care
3605 of the non-legitimate constant. */
3606 if (!y)
3607 y = y_cst;
3608 else
3609 y = use_anchored_address (y);
3613 /* If X or Y are memory references, verify that their addresses are valid
3614 for the machine. */
3615 if (MEM_P (x)
3616 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3617 MEM_ADDR_SPACE (x))
3618 && ! push_operand (x, GET_MODE (x))))
3619 x = validize_mem (x);
3621 if (MEM_P (y)
3622 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3623 MEM_ADDR_SPACE (y)))
3624 y = validize_mem (y);
3626 gcc_assert (mode != BLKmode);
3628 last_insn = emit_move_insn_1 (x, y);
3630 if (y_cst && REG_P (x)
3631 && (set = single_set (last_insn)) != NULL_RTX
3632 && SET_DEST (set) == x
3633 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3634 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3636 return last_insn;
3639 /* Generate the body of an instruction to copy Y into X.
3640 It may be a list of insns, if one insn isn't enough. */
3643 gen_move_insn (rtx x, rtx y)
3645 rtx_insn *seq;
3647 start_sequence ();
3648 emit_move_insn_1 (x, y);
3649 seq = get_insns ();
3650 end_sequence ();
3651 return seq;
3654 /* If Y is representable exactly in a narrower mode, and the target can
3655 perform the extension directly from constant or memory, then emit the
3656 move as an extension. */
3658 static rtx_insn *
3659 compress_float_constant (rtx x, rtx y)
3661 machine_mode dstmode = GET_MODE (x);
3662 machine_mode orig_srcmode = GET_MODE (y);
3663 machine_mode srcmode;
3664 REAL_VALUE_TYPE r;
3665 int oldcost, newcost;
3666 bool speed = optimize_insn_for_speed_p ();
3668 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3670 if (targetm.legitimate_constant_p (dstmode, y))
3671 oldcost = set_src_cost (y, speed);
3672 else
3673 oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3675 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3676 srcmode != orig_srcmode;
3677 srcmode = GET_MODE_WIDER_MODE (srcmode))
3679 enum insn_code ic;
3680 rtx trunc_y;
3681 rtx_insn *last_insn;
3683 /* Skip if the target can't extend this way. */
3684 ic = can_extend_p (dstmode, srcmode, 0);
3685 if (ic == CODE_FOR_nothing)
3686 continue;
3688 /* Skip if the narrowed value isn't exact. */
3689 if (! exact_real_truncate (srcmode, &r))
3690 continue;
3692 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3694 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3696 /* Skip if the target needs extra instructions to perform
3697 the extension. */
3698 if (!insn_operand_matches (ic, 1, trunc_y))
3699 continue;
3700 /* This is valid, but may not be cheaper than the original. */
3701 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3702 speed);
3703 if (oldcost < newcost)
3704 continue;
3706 else if (float_extend_from_mem[dstmode][srcmode])
3708 trunc_y = force_const_mem (srcmode, trunc_y);
3709 /* This is valid, but may not be cheaper than the original. */
3710 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3711 speed);
3712 if (oldcost < newcost)
3713 continue;
3714 trunc_y = validize_mem (trunc_y);
3716 else
3717 continue;
3719 /* For CSE's benefit, force the compressed constant pool entry
3720 into a new pseudo. This constant may be used in different modes,
3721 and if not, combine will put things back together for us. */
3722 trunc_y = force_reg (srcmode, trunc_y);
3724 /* If x is a hard register, perform the extension into a pseudo,
3725 so that e.g. stack realignment code is aware of it. */
3726 rtx target = x;
3727 if (REG_P (x) && HARD_REGISTER_P (x))
3728 target = gen_reg_rtx (dstmode);
3730 emit_unop_insn (ic, target, trunc_y, UNKNOWN);
3731 last_insn = get_last_insn ();
3733 if (REG_P (target))
3734 set_unique_reg_note (last_insn, REG_EQUAL, y);
3736 if (target != x)
3737 return emit_move_insn (x, target);
3738 return last_insn;
3741 return NULL;
3744 /* Pushing data onto the stack. */
3746 /* Push a block of length SIZE (perhaps variable)
3747 and return an rtx to address the beginning of the block.
3748 The value may be virtual_outgoing_args_rtx.
3750 EXTRA is the number of bytes of padding to push in addition to SIZE.
3751 BELOW nonzero means this padding comes at low addresses;
3752 otherwise, the padding comes at high addresses. */
3755 push_block (rtx size, int extra, int below)
3757 rtx temp;
3759 size = convert_modes (Pmode, ptr_mode, size, 1);
3760 if (CONSTANT_P (size))
3761 anti_adjust_stack (plus_constant (Pmode, size, extra));
3762 else if (REG_P (size) && extra == 0)
3763 anti_adjust_stack (size);
3764 else
3766 temp = copy_to_mode_reg (Pmode, size);
3767 if (extra != 0)
3768 temp = expand_binop (Pmode, add_optab, temp,
3769 gen_int_mode (extra, Pmode),
3770 temp, 0, OPTAB_LIB_WIDEN);
3771 anti_adjust_stack (temp);
3774 #ifndef STACK_GROWS_DOWNWARD
3775 if (0)
3776 #else
3777 if (1)
3778 #endif
3780 temp = virtual_outgoing_args_rtx;
3781 if (extra != 0 && below)
3782 temp = plus_constant (Pmode, temp, extra);
3784 else
3786 if (CONST_INT_P (size))
3787 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3788 -INTVAL (size) - (below ? 0 : extra));
3789 else if (extra != 0 && !below)
3790 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3791 negate_rtx (Pmode, plus_constant (Pmode, size,
3792 extra)));
3793 else
3794 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3795 negate_rtx (Pmode, size));
3798 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3801 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3803 static rtx
3804 mem_autoinc_base (rtx mem)
3806 if (MEM_P (mem))
3808 rtx addr = XEXP (mem, 0);
3809 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3810 return XEXP (addr, 0);
3812 return NULL;
3815 /* A utility routine used here, in reload, and in try_split. The insns
3816 after PREV up to and including LAST are known to adjust the stack,
3817 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3818 placing notes as appropriate. PREV may be NULL, indicating the
3819 entire insn sequence prior to LAST should be scanned.
3821 The set of allowed stack pointer modifications is small:
3822 (1) One or more auto-inc style memory references (aka pushes),
3823 (2) One or more addition/subtraction with the SP as destination,
3824 (3) A single move insn with the SP as destination,
3825 (4) A call_pop insn,
3826 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3828 Insns in the sequence that do not modify the SP are ignored,
3829 except for noreturn calls.
3831 The return value is the amount of adjustment that can be trivially
3832 verified, via immediate operand or auto-inc. If the adjustment
3833 cannot be trivially extracted, the return value is INT_MIN. */
3835 HOST_WIDE_INT
3836 find_args_size_adjust (rtx_insn *insn)
3838 rtx dest, set, pat;
3839 int i;
3841 pat = PATTERN (insn);
3842 set = NULL;
3844 /* Look for a call_pop pattern. */
3845 if (CALL_P (insn))
3847 /* We have to allow non-call_pop patterns for the case
3848 of emit_single_push_insn of a TLS address. */
3849 if (GET_CODE (pat) != PARALLEL)
3850 return 0;
3852 /* All call_pop have a stack pointer adjust in the parallel.
3853 The call itself is always first, and the stack adjust is
3854 usually last, so search from the end. */
3855 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3857 set = XVECEXP (pat, 0, i);
3858 if (GET_CODE (set) != SET)
3859 continue;
3860 dest = SET_DEST (set);
3861 if (dest == stack_pointer_rtx)
3862 break;
3864 /* We'd better have found the stack pointer adjust. */
3865 if (i == 0)
3866 return 0;
3867 /* Fall through to process the extracted SET and DEST
3868 as if it was a standalone insn. */
3870 else if (GET_CODE (pat) == SET)
3871 set = pat;
3872 else if ((set = single_set (insn)) != NULL)
3874 else if (GET_CODE (pat) == PARALLEL)
3876 /* ??? Some older ports use a parallel with a stack adjust
3877 and a store for a PUSH_ROUNDING pattern, rather than a
3878 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3879 /* ??? See h8300 and m68k, pushqi1. */
3880 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3882 set = XVECEXP (pat, 0, i);
3883 if (GET_CODE (set) != SET)
3884 continue;
3885 dest = SET_DEST (set);
3886 if (dest == stack_pointer_rtx)
3887 break;
3889 /* We do not expect an auto-inc of the sp in the parallel. */
3890 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3891 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3892 != stack_pointer_rtx);
3894 if (i < 0)
3895 return 0;
3897 else
3898 return 0;
3900 dest = SET_DEST (set);
3902 /* Look for direct modifications of the stack pointer. */
3903 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3905 /* Look for a trivial adjustment, otherwise assume nothing. */
3906 /* Note that the SPU restore_stack_block pattern refers to
3907 the stack pointer in V4SImode. Consider that non-trivial. */
3908 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3909 && GET_CODE (SET_SRC (set)) == PLUS
3910 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3911 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3912 return INTVAL (XEXP (SET_SRC (set), 1));
3913 /* ??? Reload can generate no-op moves, which will be cleaned
3914 up later. Recognize it and continue searching. */
3915 else if (rtx_equal_p (dest, SET_SRC (set)))
3916 return 0;
3917 else
3918 return HOST_WIDE_INT_MIN;
3920 else
3922 rtx mem, addr;
3924 /* Otherwise only think about autoinc patterns. */
3925 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3927 mem = dest;
3928 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3929 != stack_pointer_rtx);
3931 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3932 mem = SET_SRC (set);
3933 else
3934 return 0;
3936 addr = XEXP (mem, 0);
3937 switch (GET_CODE (addr))
3939 case PRE_INC:
3940 case POST_INC:
3941 return GET_MODE_SIZE (GET_MODE (mem));
3942 case PRE_DEC:
3943 case POST_DEC:
3944 return -GET_MODE_SIZE (GET_MODE (mem));
3945 case PRE_MODIFY:
3946 case POST_MODIFY:
3947 addr = XEXP (addr, 1);
3948 gcc_assert (GET_CODE (addr) == PLUS);
3949 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3950 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3951 return INTVAL (XEXP (addr, 1));
3952 default:
3953 gcc_unreachable ();
3959 fixup_args_size_notes (rtx_insn *prev, rtx_insn *last, int end_args_size)
3961 int args_size = end_args_size;
3962 bool saw_unknown = false;
3963 rtx_insn *insn;
3965 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3967 HOST_WIDE_INT this_delta;
3969 if (!NONDEBUG_INSN_P (insn))
3970 continue;
3972 this_delta = find_args_size_adjust (insn);
3973 if (this_delta == 0)
3975 if (!CALL_P (insn)
3976 || ACCUMULATE_OUTGOING_ARGS
3977 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3978 continue;
3981 gcc_assert (!saw_unknown);
3982 if (this_delta == HOST_WIDE_INT_MIN)
3983 saw_unknown = true;
3985 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3986 #ifdef STACK_GROWS_DOWNWARD
3987 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
3988 #endif
3989 args_size -= this_delta;
3992 return saw_unknown ? INT_MIN : args_size;
3995 #ifdef PUSH_ROUNDING
3996 /* Emit single push insn. */
3998 static void
3999 emit_single_push_insn_1 (machine_mode mode, rtx x, tree type)
4001 rtx dest_addr;
4002 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
4003 rtx dest;
4004 enum insn_code icode;
4006 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
4007 /* If there is push pattern, use it. Otherwise try old way of throwing
4008 MEM representing push operation to move expander. */
4009 icode = optab_handler (push_optab, mode);
4010 if (icode != CODE_FOR_nothing)
4012 struct expand_operand ops[1];
4014 create_input_operand (&ops[0], x, mode);
4015 if (maybe_expand_insn (icode, 1, ops))
4016 return;
4018 if (GET_MODE_SIZE (mode) == rounded_size)
4019 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
4020 /* If we are to pad downward, adjust the stack pointer first and
4021 then store X into the stack location using an offset. This is
4022 because emit_move_insn does not know how to pad; it does not have
4023 access to type. */
4024 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
4026 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
4027 HOST_WIDE_INT offset;
4029 emit_move_insn (stack_pointer_rtx,
4030 expand_binop (Pmode,
4031 #ifdef STACK_GROWS_DOWNWARD
4032 sub_optab,
4033 #else
4034 add_optab,
4035 #endif
4036 stack_pointer_rtx,
4037 gen_int_mode (rounded_size, Pmode),
4038 NULL_RTX, 0, OPTAB_LIB_WIDEN));
4040 offset = (HOST_WIDE_INT) padding_size;
4041 #ifdef STACK_GROWS_DOWNWARD
4042 if (STACK_PUSH_CODE == POST_DEC)
4043 /* We have already decremented the stack pointer, so get the
4044 previous value. */
4045 offset += (HOST_WIDE_INT) rounded_size;
4046 #else
4047 if (STACK_PUSH_CODE == POST_INC)
4048 /* We have already incremented the stack pointer, so get the
4049 previous value. */
4050 offset -= (HOST_WIDE_INT) rounded_size;
4051 #endif
4052 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4053 gen_int_mode (offset, Pmode));
4055 else
4057 #ifdef STACK_GROWS_DOWNWARD
4058 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4059 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4060 gen_int_mode (-(HOST_WIDE_INT) rounded_size,
4061 Pmode));
4062 #else
4063 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4064 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4065 gen_int_mode (rounded_size, Pmode));
4066 #endif
4067 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4070 dest = gen_rtx_MEM (mode, dest_addr);
4072 if (type != 0)
4074 set_mem_attributes (dest, type, 1);
4076 if (cfun->tail_call_marked)
4077 /* Function incoming arguments may overlap with sibling call
4078 outgoing arguments and we cannot allow reordering of reads
4079 from function arguments with stores to outgoing arguments
4080 of sibling calls. */
4081 set_mem_alias_set (dest, 0);
4083 emit_move_insn (dest, x);
4086 /* Emit and annotate a single push insn. */
4088 static void
4089 emit_single_push_insn (machine_mode mode, rtx x, tree type)
4091 int delta, old_delta = stack_pointer_delta;
4092 rtx_insn *prev = get_last_insn ();
4093 rtx_insn *last;
4095 emit_single_push_insn_1 (mode, x, type);
4097 last = get_last_insn ();
4099 /* Notice the common case where we emitted exactly one insn. */
4100 if (PREV_INSN (last) == prev)
4102 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4103 return;
4106 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4107 gcc_assert (delta == INT_MIN || delta == old_delta);
4109 #endif
4111 /* Generate code to push X onto the stack, assuming it has mode MODE and
4112 type TYPE.
4113 MODE is redundant except when X is a CONST_INT (since they don't
4114 carry mode info).
4115 SIZE is an rtx for the size of data to be copied (in bytes),
4116 needed only if X is BLKmode.
4118 ALIGN (in bits) is maximum alignment we can assume.
4120 If PARTIAL and REG are both nonzero, then copy that many of the first
4121 bytes of X into registers starting with REG, and push the rest of X.
4122 The amount of space pushed is decreased by PARTIAL bytes.
4123 REG must be a hard register in this case.
4124 If REG is zero but PARTIAL is not, take any all others actions for an
4125 argument partially in registers, but do not actually load any
4126 registers.
4128 EXTRA is the amount in bytes of extra space to leave next to this arg.
4129 This is ignored if an argument block has already been allocated.
4131 On a machine that lacks real push insns, ARGS_ADDR is the address of
4132 the bottom of the argument block for this call. We use indexing off there
4133 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4134 argument block has not been preallocated.
4136 ARGS_SO_FAR is the size of args previously pushed for this call.
4138 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4139 for arguments passed in registers. If nonzero, it will be the number
4140 of bytes required. */
4142 void
4143 emit_push_insn (rtx x, machine_mode mode, tree type, rtx size,
4144 unsigned int align, int partial, rtx reg, int extra,
4145 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4146 rtx alignment_pad)
4148 rtx xinner;
4149 enum direction stack_direction
4150 #ifdef STACK_GROWS_DOWNWARD
4151 = downward;
4152 #else
4153 = upward;
4154 #endif
4156 /* Decide where to pad the argument: `downward' for below,
4157 `upward' for above, or `none' for don't pad it.
4158 Default is below for small data on big-endian machines; else above. */
4159 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4161 /* Invert direction if stack is post-decrement.
4162 FIXME: why? */
4163 if (STACK_PUSH_CODE == POST_DEC)
4164 if (where_pad != none)
4165 where_pad = (where_pad == downward ? upward : downward);
4167 xinner = x;
4169 if (mode == BLKmode
4170 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4172 /* Copy a block into the stack, entirely or partially. */
4174 rtx temp;
4175 int used;
4176 int offset;
4177 int skip;
4179 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4180 used = partial - offset;
4182 if (mode != BLKmode)
4184 /* A value is to be stored in an insufficiently aligned
4185 stack slot; copy via a suitably aligned slot if
4186 necessary. */
4187 size = GEN_INT (GET_MODE_SIZE (mode));
4188 if (!MEM_P (xinner))
4190 temp = assign_temp (type, 1, 1);
4191 emit_move_insn (temp, xinner);
4192 xinner = temp;
4196 gcc_assert (size);
4198 /* USED is now the # of bytes we need not copy to the stack
4199 because registers will take care of them. */
4201 if (partial != 0)
4202 xinner = adjust_address (xinner, BLKmode, used);
4204 /* If the partial register-part of the arg counts in its stack size,
4205 skip the part of stack space corresponding to the registers.
4206 Otherwise, start copying to the beginning of the stack space,
4207 by setting SKIP to 0. */
4208 skip = (reg_parm_stack_space == 0) ? 0 : used;
4210 #ifdef PUSH_ROUNDING
4211 /* Do it with several push insns if that doesn't take lots of insns
4212 and if there is no difficulty with push insns that skip bytes
4213 on the stack for alignment purposes. */
4214 if (args_addr == 0
4215 && PUSH_ARGS
4216 && CONST_INT_P (size)
4217 && skip == 0
4218 && MEM_ALIGN (xinner) >= align
4219 && can_move_by_pieces ((unsigned) INTVAL (size) - used, align)
4220 /* Here we avoid the case of a structure whose weak alignment
4221 forces many pushes of a small amount of data,
4222 and such small pushes do rounding that causes trouble. */
4223 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4224 || align >= BIGGEST_ALIGNMENT
4225 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4226 == (align / BITS_PER_UNIT)))
4227 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4229 /* Push padding now if padding above and stack grows down,
4230 or if padding below and stack grows up.
4231 But if space already allocated, this has already been done. */
4232 if (extra && args_addr == 0
4233 && where_pad != none && where_pad != stack_direction)
4234 anti_adjust_stack (GEN_INT (extra));
4236 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4238 else
4239 #endif /* PUSH_ROUNDING */
4241 rtx target;
4243 /* Otherwise make space on the stack and copy the data
4244 to the address of that space. */
4246 /* Deduct words put into registers from the size we must copy. */
4247 if (partial != 0)
4249 if (CONST_INT_P (size))
4250 size = GEN_INT (INTVAL (size) - used);
4251 else
4252 size = expand_binop (GET_MODE (size), sub_optab, size,
4253 gen_int_mode (used, GET_MODE (size)),
4254 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4257 /* Get the address of the stack space.
4258 In this case, we do not deal with EXTRA separately.
4259 A single stack adjust will do. */
4260 if (! args_addr)
4262 temp = push_block (size, extra, where_pad == downward);
4263 extra = 0;
4265 else if (CONST_INT_P (args_so_far))
4266 temp = memory_address (BLKmode,
4267 plus_constant (Pmode, args_addr,
4268 skip + INTVAL (args_so_far)));
4269 else
4270 temp = memory_address (BLKmode,
4271 plus_constant (Pmode,
4272 gen_rtx_PLUS (Pmode,
4273 args_addr,
4274 args_so_far),
4275 skip));
4277 if (!ACCUMULATE_OUTGOING_ARGS)
4279 /* If the source is referenced relative to the stack pointer,
4280 copy it to another register to stabilize it. We do not need
4281 to do this if we know that we won't be changing sp. */
4283 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4284 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4285 temp = copy_to_reg (temp);
4288 target = gen_rtx_MEM (BLKmode, temp);
4290 /* We do *not* set_mem_attributes here, because incoming arguments
4291 may overlap with sibling call outgoing arguments and we cannot
4292 allow reordering of reads from function arguments with stores
4293 to outgoing arguments of sibling calls. We do, however, want
4294 to record the alignment of the stack slot. */
4295 /* ALIGN may well be better aligned than TYPE, e.g. due to
4296 PARM_BOUNDARY. Assume the caller isn't lying. */
4297 set_mem_align (target, align);
4299 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4302 else if (partial > 0)
4304 /* Scalar partly in registers. */
4306 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4307 int i;
4308 int not_stack;
4309 /* # bytes of start of argument
4310 that we must make space for but need not store. */
4311 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4312 int args_offset = INTVAL (args_so_far);
4313 int skip;
4315 /* Push padding now if padding above and stack grows down,
4316 or if padding below and stack grows up.
4317 But if space already allocated, this has already been done. */
4318 if (extra && args_addr == 0
4319 && where_pad != none && where_pad != stack_direction)
4320 anti_adjust_stack (GEN_INT (extra));
4322 /* If we make space by pushing it, we might as well push
4323 the real data. Otherwise, we can leave OFFSET nonzero
4324 and leave the space uninitialized. */
4325 if (args_addr == 0)
4326 offset = 0;
4328 /* Now NOT_STACK gets the number of words that we don't need to
4329 allocate on the stack. Convert OFFSET to words too. */
4330 not_stack = (partial - offset) / UNITS_PER_WORD;
4331 offset /= UNITS_PER_WORD;
4333 /* If the partial register-part of the arg counts in its stack size,
4334 skip the part of stack space corresponding to the registers.
4335 Otherwise, start copying to the beginning of the stack space,
4336 by setting SKIP to 0. */
4337 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4339 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4340 x = validize_mem (force_const_mem (mode, x));
4342 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4343 SUBREGs of such registers are not allowed. */
4344 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4345 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4346 x = copy_to_reg (x);
4348 /* Loop over all the words allocated on the stack for this arg. */
4349 /* We can do it by words, because any scalar bigger than a word
4350 has a size a multiple of a word. */
4351 for (i = size - 1; i >= not_stack; i--)
4352 if (i >= not_stack + offset)
4353 emit_push_insn (operand_subword_force (x, i, mode),
4354 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4355 0, args_addr,
4356 GEN_INT (args_offset + ((i - not_stack + skip)
4357 * UNITS_PER_WORD)),
4358 reg_parm_stack_space, alignment_pad);
4360 else
4362 rtx addr;
4363 rtx dest;
4365 /* Push padding now if padding above and stack grows down,
4366 or if padding below and stack grows up.
4367 But if space already allocated, this has already been done. */
4368 if (extra && args_addr == 0
4369 && where_pad != none && where_pad != stack_direction)
4370 anti_adjust_stack (GEN_INT (extra));
4372 #ifdef PUSH_ROUNDING
4373 if (args_addr == 0 && PUSH_ARGS)
4374 emit_single_push_insn (mode, x, type);
4375 else
4376 #endif
4378 if (CONST_INT_P (args_so_far))
4379 addr
4380 = memory_address (mode,
4381 plus_constant (Pmode, args_addr,
4382 INTVAL (args_so_far)));
4383 else
4384 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4385 args_so_far));
4386 dest = gen_rtx_MEM (mode, addr);
4388 /* We do *not* set_mem_attributes here, because incoming arguments
4389 may overlap with sibling call outgoing arguments and we cannot
4390 allow reordering of reads from function arguments with stores
4391 to outgoing arguments of sibling calls. We do, however, want
4392 to record the alignment of the stack slot. */
4393 /* ALIGN may well be better aligned than TYPE, e.g. due to
4394 PARM_BOUNDARY. Assume the caller isn't lying. */
4395 set_mem_align (dest, align);
4397 emit_move_insn (dest, x);
4401 /* If part should go in registers, copy that part
4402 into the appropriate registers. Do this now, at the end,
4403 since mem-to-mem copies above may do function calls. */
4404 if (partial > 0 && reg != 0)
4406 /* Handle calls that pass values in multiple non-contiguous locations.
4407 The Irix 6 ABI has examples of this. */
4408 if (GET_CODE (reg) == PARALLEL)
4409 emit_group_load (reg, x, type, -1);
4410 else
4412 gcc_assert (partial % UNITS_PER_WORD == 0);
4413 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4417 if (extra && args_addr == 0 && where_pad == stack_direction)
4418 anti_adjust_stack (GEN_INT (extra));
4420 if (alignment_pad && args_addr == 0)
4421 anti_adjust_stack (alignment_pad);
4424 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4425 operations. */
4427 static rtx
4428 get_subtarget (rtx x)
4430 return (optimize
4431 || x == 0
4432 /* Only registers can be subtargets. */
4433 || !REG_P (x)
4434 /* Don't use hard regs to avoid extending their life. */
4435 || REGNO (x) < FIRST_PSEUDO_REGISTER
4436 ? 0 : x);
4439 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4440 FIELD is a bitfield. Returns true if the optimization was successful,
4441 and there's nothing else to do. */
4443 static bool
4444 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4445 unsigned HOST_WIDE_INT bitpos,
4446 unsigned HOST_WIDE_INT bitregion_start,
4447 unsigned HOST_WIDE_INT bitregion_end,
4448 machine_mode mode1, rtx str_rtx,
4449 tree to, tree src, bool reverse)
4451 machine_mode str_mode = GET_MODE (str_rtx);
4452 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4453 tree op0, op1;
4454 rtx value, result;
4455 optab binop;
4456 gimple srcstmt;
4457 enum tree_code code;
4459 if (mode1 != VOIDmode
4460 || bitsize >= BITS_PER_WORD
4461 || str_bitsize > BITS_PER_WORD
4462 || TREE_SIDE_EFFECTS (to)
4463 || TREE_THIS_VOLATILE (to))
4464 return false;
4466 STRIP_NOPS (src);
4467 if (TREE_CODE (src) != SSA_NAME)
4468 return false;
4469 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4470 return false;
4472 srcstmt = get_gimple_for_ssa_name (src);
4473 if (!srcstmt
4474 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4475 return false;
4477 code = gimple_assign_rhs_code (srcstmt);
4479 op0 = gimple_assign_rhs1 (srcstmt);
4481 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4482 to find its initialization. Hopefully the initialization will
4483 be from a bitfield load. */
4484 if (TREE_CODE (op0) == SSA_NAME)
4486 gimple op0stmt = get_gimple_for_ssa_name (op0);
4488 /* We want to eventually have OP0 be the same as TO, which
4489 should be a bitfield. */
4490 if (!op0stmt
4491 || !is_gimple_assign (op0stmt)
4492 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4493 return false;
4494 op0 = gimple_assign_rhs1 (op0stmt);
4497 op1 = gimple_assign_rhs2 (srcstmt);
4499 if (!operand_equal_p (to, op0, 0))
4500 return false;
4502 if (MEM_P (str_rtx))
4504 unsigned HOST_WIDE_INT offset1;
4506 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4507 str_mode = word_mode;
4508 str_mode = get_best_mode (bitsize, bitpos,
4509 bitregion_start, bitregion_end,
4510 MEM_ALIGN (str_rtx), str_mode, 0);
4511 if (str_mode == VOIDmode)
4512 return false;
4513 str_bitsize = GET_MODE_BITSIZE (str_mode);
4515 offset1 = bitpos;
4516 bitpos %= str_bitsize;
4517 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4518 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4520 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4521 return false;
4522 else
4523 gcc_assert (!reverse);
4525 /* If the bit field covers the whole REG/MEM, store_field
4526 will likely generate better code. */
4527 if (bitsize >= str_bitsize)
4528 return false;
4530 /* We can't handle fields split across multiple entities. */
4531 if (bitpos + bitsize > str_bitsize)
4532 return false;
4534 if (BYTES_BIG_ENDIAN)
4535 bitpos = str_bitsize - bitpos - bitsize;
4537 switch (code)
4539 case PLUS_EXPR:
4540 case MINUS_EXPR:
4541 /* For now, just optimize the case of the topmost bitfield
4542 where we don't need to do any masking and also
4543 1 bit bitfields where xor can be used.
4544 We might win by one instruction for the other bitfields
4545 too if insv/extv instructions aren't used, so that
4546 can be added later. */
4547 if ((reverse || bitpos + bitsize != str_bitsize)
4548 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4549 break;
4551 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4552 value = convert_modes (str_mode,
4553 TYPE_MODE (TREE_TYPE (op1)), value,
4554 TYPE_UNSIGNED (TREE_TYPE (op1)));
4556 /* We may be accessing data outside the field, which means
4557 we can alias adjacent data. */
4558 if (MEM_P (str_rtx))
4560 str_rtx = shallow_copy_rtx (str_rtx);
4561 set_mem_alias_set (str_rtx, 0);
4562 set_mem_expr (str_rtx, 0);
4565 if (bitsize == 1 && (reverse || bitpos + bitsize != str_bitsize))
4567 value = expand_and (str_mode, value, const1_rtx, NULL);
4568 binop = xor_optab;
4570 else
4571 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4573 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4574 if (reverse)
4575 value = flip_storage_order (str_mode, value);
4576 result = expand_binop (str_mode, binop, str_rtx,
4577 value, str_rtx, 1, OPTAB_WIDEN);
4578 if (result != str_rtx)
4579 emit_move_insn (str_rtx, result);
4580 return true;
4582 case BIT_IOR_EXPR:
4583 case BIT_XOR_EXPR:
4584 if (TREE_CODE (op1) != INTEGER_CST)
4585 break;
4586 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4587 value = convert_modes (str_mode,
4588 TYPE_MODE (TREE_TYPE (op1)), value,
4589 TYPE_UNSIGNED (TREE_TYPE (op1)));
4591 /* We may be accessing data outside the field, which means
4592 we can alias adjacent data. */
4593 if (MEM_P (str_rtx))
4595 str_rtx = shallow_copy_rtx (str_rtx);
4596 set_mem_alias_set (str_rtx, 0);
4597 set_mem_expr (str_rtx, 0);
4600 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4601 if (bitpos + bitsize != str_bitsize)
4603 rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4604 str_mode);
4605 value = expand_and (str_mode, value, mask, NULL_RTX);
4607 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4608 if (reverse)
4609 value = flip_storage_order (str_mode, value);
4610 result = expand_binop (str_mode, binop, str_rtx,
4611 value, str_rtx, 1, OPTAB_WIDEN);
4612 if (result != str_rtx)
4613 emit_move_insn (str_rtx, result);
4614 return true;
4616 default:
4617 break;
4620 return false;
4623 /* In the C++ memory model, consecutive bit fields in a structure are
4624 considered one memory location.
4626 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4627 returns the bit range of consecutive bits in which this COMPONENT_REF
4628 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4629 and *OFFSET may be adjusted in the process.
4631 If the access does not need to be restricted, 0 is returned in both
4632 *BITSTART and *BITEND. */
4634 static void
4635 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4636 unsigned HOST_WIDE_INT *bitend,
4637 tree exp,
4638 HOST_WIDE_INT *bitpos,
4639 tree *offset)
4641 HOST_WIDE_INT bitoffset;
4642 tree field, repr;
4644 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4646 field = TREE_OPERAND (exp, 1);
4647 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4648 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4649 need to limit the range we can access. */
4650 if (!repr)
4652 *bitstart = *bitend = 0;
4653 return;
4656 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4657 part of a larger bit field, then the representative does not serve any
4658 useful purpose. This can occur in Ada. */
4659 if (handled_component_p (TREE_OPERAND (exp, 0)))
4661 machine_mode rmode;
4662 HOST_WIDE_INT rbitsize, rbitpos;
4663 tree roffset;
4664 int unsignedp, reversep, volatilep = 0;
4665 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4666 &roffset, &rmode, &unsignedp, &reversep,
4667 &volatilep, false);
4668 if ((rbitpos % BITS_PER_UNIT) != 0)
4670 *bitstart = *bitend = 0;
4671 return;
4675 /* Compute the adjustment to bitpos from the offset of the field
4676 relative to the representative. DECL_FIELD_OFFSET of field and
4677 repr are the same by construction if they are not constants,
4678 see finish_bitfield_layout. */
4679 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
4680 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
4681 bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
4682 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
4683 else
4684 bitoffset = 0;
4685 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4686 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4688 /* If the adjustment is larger than bitpos, we would have a negative bit
4689 position for the lower bound and this may wreak havoc later. Adjust
4690 offset and bitpos to make the lower bound non-negative in that case. */
4691 if (bitoffset > *bitpos)
4693 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4694 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4696 *bitpos += adjust;
4697 if (*offset == NULL_TREE)
4698 *offset = size_int (-adjust / BITS_PER_UNIT);
4699 else
4700 *offset
4701 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4702 *bitstart = 0;
4704 else
4705 *bitstart = *bitpos - bitoffset;
4707 *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4710 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4711 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4712 DECL_RTL was not set yet, return NORTL. */
4714 static inline bool
4715 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4717 if (TREE_CODE (addr) != ADDR_EXPR)
4718 return false;
4720 tree base = TREE_OPERAND (addr, 0);
4722 if (!DECL_P (base)
4723 || TREE_ADDRESSABLE (base)
4724 || DECL_MODE (base) == BLKmode)
4725 return false;
4727 if (!DECL_RTL_SET_P (base))
4728 return nortl;
4730 return (!MEM_P (DECL_RTL (base)));
4733 /* Returns true if the MEM_REF REF refers to an object that does not
4734 reside in memory and has non-BLKmode. */
4736 static inline bool
4737 mem_ref_refers_to_non_mem_p (tree ref)
4739 tree base = TREE_OPERAND (ref, 0);
4740 return addr_expr_of_non_mem_decl_p_1 (base, false);
4743 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4744 is true, try generating a nontemporal store. */
4746 void
4747 expand_assignment (tree to, tree from, bool nontemporal)
4749 rtx to_rtx = 0;
4750 rtx result;
4751 machine_mode mode;
4752 unsigned int align;
4753 enum insn_code icode;
4755 /* Don't crash if the lhs of the assignment was erroneous. */
4756 if (TREE_CODE (to) == ERROR_MARK)
4758 expand_normal (from);
4759 return;
4762 /* Optimize away no-op moves without side-effects. */
4763 if (operand_equal_p (to, from, 0))
4764 return;
4766 /* Handle misaligned stores. */
4767 mode = TYPE_MODE (TREE_TYPE (to));
4768 if ((TREE_CODE (to) == MEM_REF
4769 || TREE_CODE (to) == TARGET_MEM_REF)
4770 && mode != BLKmode
4771 && !mem_ref_refers_to_non_mem_p (to)
4772 && ((align = get_object_alignment (to))
4773 < GET_MODE_ALIGNMENT (mode))
4774 && (((icode = optab_handler (movmisalign_optab, mode))
4775 != CODE_FOR_nothing)
4776 || SLOW_UNALIGNED_ACCESS (mode, align)))
4778 rtx reg, mem;
4780 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4781 reg = force_not_mem (reg);
4782 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4783 if (TREE_CODE (to) == MEM_REF && REF_REVERSE_STORAGE_ORDER (to))
4784 reg = flip_storage_order (mode, reg);
4786 if (icode != CODE_FOR_nothing)
4788 struct expand_operand ops[2];
4790 create_fixed_operand (&ops[0], mem);
4791 create_input_operand (&ops[1], reg, mode);
4792 /* The movmisalign<mode> pattern cannot fail, else the assignment
4793 would silently be omitted. */
4794 expand_insn (icode, 2, ops);
4796 else
4797 store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg,
4798 false);
4799 return;
4802 /* Assignment of a structure component needs special treatment
4803 if the structure component's rtx is not simply a MEM.
4804 Assignment of an array element at a constant index, and assignment of
4805 an array element in an unaligned packed structure field, has the same
4806 problem. Same for (partially) storing into a non-memory object. */
4807 if (handled_component_p (to)
4808 || (TREE_CODE (to) == MEM_REF
4809 && (REF_REVERSE_STORAGE_ORDER (to)
4810 || mem_ref_refers_to_non_mem_p (to)))
4811 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4813 machine_mode mode1;
4814 HOST_WIDE_INT bitsize, bitpos;
4815 unsigned HOST_WIDE_INT bitregion_start = 0;
4816 unsigned HOST_WIDE_INT bitregion_end = 0;
4817 tree offset;
4818 int unsignedp, reversep, volatilep = 0;
4819 tree tem;
4821 push_temp_slots ();
4822 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4823 &unsignedp, &reversep, &volatilep, true);
4825 /* Make sure bitpos is not negative, it can wreak havoc later. */
4826 if (bitpos < 0)
4828 gcc_assert (offset == NULL_TREE);
4829 offset = size_int (bitpos >> (BITS_PER_UNIT == 8
4830 ? 3 : exact_log2 (BITS_PER_UNIT)));
4831 bitpos &= BITS_PER_UNIT - 1;
4834 if (TREE_CODE (to) == COMPONENT_REF
4835 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4836 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4837 /* The C++ memory model naturally applies to byte-aligned fields.
4838 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4839 BITSIZE are not byte-aligned, there is no need to limit the range
4840 we can access. This can occur with packed structures in Ada. */
4841 else if (bitsize > 0
4842 && bitsize % BITS_PER_UNIT == 0
4843 && bitpos % BITS_PER_UNIT == 0)
4845 bitregion_start = bitpos;
4846 bitregion_end = bitpos + bitsize - 1;
4849 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4851 /* If the field has a mode, we want to access it in the
4852 field's mode, not the computed mode.
4853 If a MEM has VOIDmode (external with incomplete type),
4854 use BLKmode for it instead. */
4855 if (MEM_P (to_rtx))
4857 if (mode1 != VOIDmode)
4858 to_rtx = adjust_address (to_rtx, mode1, 0);
4859 else if (GET_MODE (to_rtx) == VOIDmode)
4860 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4863 if (offset != 0)
4865 machine_mode address_mode;
4866 rtx offset_rtx;
4868 if (!MEM_P (to_rtx))
4870 /* We can get constant negative offsets into arrays with broken
4871 user code. Translate this to a trap instead of ICEing. */
4872 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4873 expand_builtin_trap ();
4874 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4877 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4878 address_mode = get_address_mode (to_rtx);
4879 if (GET_MODE (offset_rtx) != address_mode)
4880 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4882 /* If we have an expression in OFFSET_RTX and a non-zero
4883 byte offset in BITPOS, adding the byte offset before the
4884 OFFSET_RTX results in better intermediate code, which makes
4885 later rtl optimization passes perform better.
4887 We prefer intermediate code like this:
4889 r124:DI=r123:DI+0x18
4890 [r124:DI]=r121:DI
4892 ... instead of ...
4894 r124:DI=r123:DI+0x10
4895 [r124:DI+0x8]=r121:DI
4897 This is only done for aligned data values, as these can
4898 be expected to result in single move instructions. */
4899 if (mode1 != VOIDmode
4900 && bitpos != 0
4901 && bitsize > 0
4902 && (bitpos % bitsize) == 0
4903 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4904 && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
4906 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4907 bitregion_start = 0;
4908 if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos)
4909 bitregion_end -= bitpos;
4910 bitpos = 0;
4913 to_rtx = offset_address (to_rtx, offset_rtx,
4914 highest_pow2_factor_for_target (to,
4915 offset));
4918 /* No action is needed if the target is not a memory and the field
4919 lies completely outside that target. This can occur if the source
4920 code contains an out-of-bounds access to a small array. */
4921 if (!MEM_P (to_rtx)
4922 && GET_MODE (to_rtx) != BLKmode
4923 && (unsigned HOST_WIDE_INT) bitpos
4924 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4926 expand_normal (from);
4927 result = NULL;
4929 /* Handle expand_expr of a complex value returning a CONCAT. */
4930 else if (GET_CODE (to_rtx) == CONCAT)
4932 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4933 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4934 && bitpos == 0
4935 && bitsize == mode_bitsize)
4936 result = store_expr (from, to_rtx, false, nontemporal, reversep);
4937 else if (bitsize == mode_bitsize / 2
4938 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4939 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4940 nontemporal, reversep);
4941 else if (bitpos + bitsize <= mode_bitsize / 2)
4942 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4943 bitregion_start, bitregion_end,
4944 mode1, from, get_alias_set (to),
4945 nontemporal, reversep);
4946 else if (bitpos >= mode_bitsize / 2)
4947 result = store_field (XEXP (to_rtx, 1), bitsize,
4948 bitpos - mode_bitsize / 2,
4949 bitregion_start, bitregion_end,
4950 mode1, from, get_alias_set (to),
4951 nontemporal, reversep);
4952 else if (bitpos == 0 && bitsize == mode_bitsize)
4954 rtx from_rtx;
4955 result = expand_normal (from);
4956 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4957 TYPE_MODE (TREE_TYPE (from)), 0);
4958 emit_move_insn (XEXP (to_rtx, 0),
4959 read_complex_part (from_rtx, false));
4960 emit_move_insn (XEXP (to_rtx, 1),
4961 read_complex_part (from_rtx, true));
4963 else
4965 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4966 GET_MODE_SIZE (GET_MODE (to_rtx)));
4967 write_complex_part (temp, XEXP (to_rtx, 0), false);
4968 write_complex_part (temp, XEXP (to_rtx, 1), true);
4969 result = store_field (temp, bitsize, bitpos,
4970 bitregion_start, bitregion_end,
4971 mode1, from, get_alias_set (to),
4972 nontemporal, reversep);
4973 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4974 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4977 else
4979 if (MEM_P (to_rtx))
4981 /* If the field is at offset zero, we could have been given the
4982 DECL_RTX of the parent struct. Don't munge it. */
4983 to_rtx = shallow_copy_rtx (to_rtx);
4984 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4985 if (volatilep)
4986 MEM_VOLATILE_P (to_rtx) = 1;
4989 if (optimize_bitfield_assignment_op (bitsize, bitpos,
4990 bitregion_start, bitregion_end,
4991 mode1, to_rtx, to, from,
4992 reversep))
4993 result = NULL;
4994 else
4995 result = store_field (to_rtx, bitsize, bitpos,
4996 bitregion_start, bitregion_end,
4997 mode1, from, get_alias_set (to),
4998 nontemporal, reversep);
5001 if (result)
5002 preserve_temp_slots (result);
5003 pop_temp_slots ();
5004 return;
5007 /* If the rhs is a function call and its value is not an aggregate,
5008 call the function before we start to compute the lhs.
5009 This is needed for correct code for cases such as
5010 val = setjmp (buf) on machines where reference to val
5011 requires loading up part of an address in a separate insn.
5013 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5014 since it might be a promoted variable where the zero- or sign- extension
5015 needs to be done. Handling this in the normal way is safe because no
5016 computation is done before the call. The same is true for SSA names. */
5017 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
5018 && COMPLETE_TYPE_P (TREE_TYPE (from))
5019 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
5020 && ! (((TREE_CODE (to) == VAR_DECL
5021 || TREE_CODE (to) == PARM_DECL
5022 || TREE_CODE (to) == RESULT_DECL)
5023 && REG_P (DECL_RTL (to)))
5024 || TREE_CODE (to) == SSA_NAME))
5026 rtx value;
5027 rtx bounds;
5029 push_temp_slots ();
5030 value = expand_normal (from);
5032 /* Split value and bounds to store them separately. */
5033 chkp_split_slot (value, &value, &bounds);
5035 if (to_rtx == 0)
5036 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5038 /* Handle calls that return values in multiple non-contiguous locations.
5039 The Irix 6 ABI has examples of this. */
5040 if (GET_CODE (to_rtx) == PARALLEL)
5042 if (GET_CODE (value) == PARALLEL)
5043 emit_group_move (to_rtx, value);
5044 else
5045 emit_group_load (to_rtx, value, TREE_TYPE (from),
5046 int_size_in_bytes (TREE_TYPE (from)));
5048 else if (GET_CODE (value) == PARALLEL)
5049 emit_group_store (to_rtx, value, TREE_TYPE (from),
5050 int_size_in_bytes (TREE_TYPE (from)));
5051 else if (GET_MODE (to_rtx) == BLKmode)
5053 /* Handle calls that return BLKmode values in registers. */
5054 if (REG_P (value))
5055 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5056 else
5057 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5059 else
5061 if (POINTER_TYPE_P (TREE_TYPE (to)))
5062 value = convert_memory_address_addr_space
5063 (GET_MODE (to_rtx), value,
5064 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5066 emit_move_insn (to_rtx, value);
5069 /* Store bounds if required. */
5070 if (bounds
5071 && (BOUNDED_P (to) || chkp_type_has_pointer (TREE_TYPE (to))))
5073 gcc_assert (MEM_P (to_rtx));
5074 chkp_emit_bounds_store (bounds, value, to_rtx);
5077 preserve_temp_slots (to_rtx);
5078 pop_temp_slots ();
5079 return;
5082 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5083 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5085 /* Don't move directly into a return register. */
5086 if (TREE_CODE (to) == RESULT_DECL
5087 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5089 rtx temp;
5091 push_temp_slots ();
5093 /* If the source is itself a return value, it still is in a pseudo at
5094 this point so we can move it back to the return register directly. */
5095 if (REG_P (to_rtx)
5096 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5097 && TREE_CODE (from) != CALL_EXPR)
5098 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5099 else
5100 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5102 /* Handle calls that return values in multiple non-contiguous locations.
5103 The Irix 6 ABI has examples of this. */
5104 if (GET_CODE (to_rtx) == PARALLEL)
5106 if (GET_CODE (temp) == PARALLEL)
5107 emit_group_move (to_rtx, temp);
5108 else
5109 emit_group_load (to_rtx, temp, TREE_TYPE (from),
5110 int_size_in_bytes (TREE_TYPE (from)));
5112 else if (temp)
5113 emit_move_insn (to_rtx, temp);
5115 preserve_temp_slots (to_rtx);
5116 pop_temp_slots ();
5117 return;
5120 /* In case we are returning the contents of an object which overlaps
5121 the place the value is being stored, use a safe function when copying
5122 a value through a pointer into a structure value return block. */
5123 if (TREE_CODE (to) == RESULT_DECL
5124 && TREE_CODE (from) == INDIRECT_REF
5125 && ADDR_SPACE_GENERIC_P
5126 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5127 && refs_may_alias_p (to, from)
5128 && cfun->returns_struct
5129 && !cfun->returns_pcc_struct)
5131 rtx from_rtx, size;
5133 push_temp_slots ();
5134 size = expr_size (from);
5135 from_rtx = expand_normal (from);
5137 emit_library_call (memmove_libfunc, LCT_NORMAL,
5138 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
5139 XEXP (from_rtx, 0), Pmode,
5140 convert_to_mode (TYPE_MODE (sizetype),
5141 size, TYPE_UNSIGNED (sizetype)),
5142 TYPE_MODE (sizetype));
5144 preserve_temp_slots (to_rtx);
5145 pop_temp_slots ();
5146 return;
5149 /* Compute FROM and store the value in the rtx we got. */
5151 push_temp_slots ();
5152 result = store_expr_with_bounds (from, to_rtx, 0, nontemporal, false, to);
5153 preserve_temp_slots (result);
5154 pop_temp_slots ();
5155 return;
5158 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5159 succeeded, false otherwise. */
5161 bool
5162 emit_storent_insn (rtx to, rtx from)
5164 struct expand_operand ops[2];
5165 machine_mode mode = GET_MODE (to);
5166 enum insn_code code = optab_handler (storent_optab, mode);
5168 if (code == CODE_FOR_nothing)
5169 return false;
5171 create_fixed_operand (&ops[0], to);
5172 create_input_operand (&ops[1], from, mode);
5173 return maybe_expand_insn (code, 2, ops);
5176 /* Generate code for computing expression EXP,
5177 and storing the value into TARGET.
5179 If the mode is BLKmode then we may return TARGET itself.
5180 It turns out that in BLKmode it doesn't cause a problem.
5181 because C has no operators that could combine two different
5182 assignments into the same BLKmode object with different values
5183 with no sequence point. Will other languages need this to
5184 be more thorough?
5186 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5187 stack, and block moves may need to be treated specially.
5189 If NONTEMPORAL is true, try using a nontemporal store instruction.
5191 If REVERSE is true, the store is to be done in reverse order.
5193 If BTARGET is not NULL then computed bounds of EXP are
5194 associated with BTARGET. */
5197 store_expr_with_bounds (tree exp, rtx target, int call_param_p,
5198 bool nontemporal, bool reverse, tree btarget)
5200 rtx temp;
5201 rtx alt_rtl = NULL_RTX;
5202 location_t loc = curr_insn_location ();
5204 if (VOID_TYPE_P (TREE_TYPE (exp)))
5206 /* C++ can generate ?: expressions with a throw expression in one
5207 branch and an rvalue in the other. Here, we resolve attempts to
5208 store the throw expression's nonexistent result. */
5209 gcc_assert (!call_param_p);
5210 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5211 return NULL_RTX;
5213 if (TREE_CODE (exp) == COMPOUND_EXPR)
5215 /* Perform first part of compound expression, then assign from second
5216 part. */
5217 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5218 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5219 return store_expr_with_bounds (TREE_OPERAND (exp, 1), target,
5220 call_param_p, nontemporal, reverse,
5221 btarget);
5223 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5225 /* For conditional expression, get safe form of the target. Then
5226 test the condition, doing the appropriate assignment on either
5227 side. This avoids the creation of unnecessary temporaries.
5228 For non-BLKmode, it is more efficient not to do this. */
5230 rtx_code_label *lab1 = gen_label_rtx (), *lab2 = gen_label_rtx ();
5232 do_pending_stack_adjust ();
5233 NO_DEFER_POP;
5234 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5235 store_expr_with_bounds (TREE_OPERAND (exp, 1), target, call_param_p,
5236 nontemporal, reverse, btarget);
5237 emit_jump_insn (gen_jump (lab2));
5238 emit_barrier ();
5239 emit_label (lab1);
5240 store_expr_with_bounds (TREE_OPERAND (exp, 2), target, call_param_p,
5241 nontemporal, reverse, btarget);
5242 emit_label (lab2);
5243 OK_DEFER_POP;
5245 return NULL_RTX;
5247 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5248 /* If this is a scalar in a register that is stored in a wider mode
5249 than the declared mode, compute the result into its declared mode
5250 and then convert to the wider mode. Our value is the computed
5251 expression. */
5253 rtx inner_target = 0;
5255 /* We can do the conversion inside EXP, which will often result
5256 in some optimizations. Do the conversion in two steps: first
5257 change the signedness, if needed, then the extend. But don't
5258 do this if the type of EXP is a subtype of something else
5259 since then the conversion might involve more than just
5260 converting modes. */
5261 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5262 && TREE_TYPE (TREE_TYPE (exp)) == 0
5263 && GET_MODE_PRECISION (GET_MODE (target))
5264 == TYPE_PRECISION (TREE_TYPE (exp)))
5266 if (!SUBREG_CHECK_PROMOTED_SIGN (target,
5267 TYPE_UNSIGNED (TREE_TYPE (exp))))
5269 /* Some types, e.g. Fortran's logical*4, won't have a signed
5270 version, so use the mode instead. */
5271 tree ntype
5272 = (signed_or_unsigned_type_for
5273 (SUBREG_PROMOTED_SIGN (target), TREE_TYPE (exp)));
5274 if (ntype == NULL)
5275 ntype = lang_hooks.types.type_for_mode
5276 (TYPE_MODE (TREE_TYPE (exp)),
5277 SUBREG_PROMOTED_SIGN (target));
5279 exp = fold_convert_loc (loc, ntype, exp);
5282 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5283 (GET_MODE (SUBREG_REG (target)),
5284 SUBREG_PROMOTED_SIGN (target)),
5285 exp);
5287 inner_target = SUBREG_REG (target);
5290 temp = expand_expr (exp, inner_target, VOIDmode,
5291 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5293 /* Handle bounds returned by call. */
5294 if (TREE_CODE (exp) == CALL_EXPR)
5296 rtx bounds;
5297 chkp_split_slot (temp, &temp, &bounds);
5298 if (bounds && btarget)
5300 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5301 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5302 chkp_set_rtl_bounds (btarget, tmp);
5306 /* If TEMP is a VOIDmode constant, use convert_modes to make
5307 sure that we properly convert it. */
5308 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5310 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5311 temp, SUBREG_PROMOTED_SIGN (target));
5312 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5313 GET_MODE (target), temp,
5314 SUBREG_PROMOTED_SIGN (target));
5317 convert_move (SUBREG_REG (target), temp,
5318 SUBREG_PROMOTED_SIGN (target));
5320 return NULL_RTX;
5322 else if ((TREE_CODE (exp) == STRING_CST
5323 || (TREE_CODE (exp) == MEM_REF
5324 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5325 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5326 == STRING_CST
5327 && integer_zerop (TREE_OPERAND (exp, 1))))
5328 && !nontemporal && !call_param_p
5329 && MEM_P (target))
5331 /* Optimize initialization of an array with a STRING_CST. */
5332 HOST_WIDE_INT exp_len, str_copy_len;
5333 rtx dest_mem;
5334 tree str = TREE_CODE (exp) == STRING_CST
5335 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5337 exp_len = int_expr_size (exp);
5338 if (exp_len <= 0)
5339 goto normal_expr;
5341 if (TREE_STRING_LENGTH (str) <= 0)
5342 goto normal_expr;
5344 str_copy_len = strlen (TREE_STRING_POINTER (str));
5345 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5346 goto normal_expr;
5348 str_copy_len = TREE_STRING_LENGTH (str);
5349 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5350 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5352 str_copy_len += STORE_MAX_PIECES - 1;
5353 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5355 str_copy_len = MIN (str_copy_len, exp_len);
5356 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5357 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5358 MEM_ALIGN (target), false))
5359 goto normal_expr;
5361 dest_mem = target;
5363 dest_mem = store_by_pieces (dest_mem,
5364 str_copy_len, builtin_strncpy_read_str,
5365 CONST_CAST (char *,
5366 TREE_STRING_POINTER (str)),
5367 MEM_ALIGN (target), false,
5368 exp_len > str_copy_len ? 1 : 0);
5369 if (exp_len > str_copy_len)
5370 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5371 GEN_INT (exp_len - str_copy_len),
5372 BLOCK_OP_NORMAL);
5373 return NULL_RTX;
5375 else
5377 rtx tmp_target;
5379 normal_expr:
5380 /* If we want to use a nontemporal or a reverse order store, force the
5381 value into a register first. */
5382 tmp_target = nontemporal || reverse ? NULL_RTX : target;
5383 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5384 (call_param_p
5385 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5386 &alt_rtl, false);
5388 /* Handle bounds returned by call. */
5389 if (TREE_CODE (exp) == CALL_EXPR)
5391 rtx bounds;
5392 chkp_split_slot (temp, &temp, &bounds);
5393 if (bounds && btarget)
5395 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5396 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5397 chkp_set_rtl_bounds (btarget, tmp);
5402 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5403 the same as that of TARGET, adjust the constant. This is needed, for
5404 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5405 only a word-sized value. */
5406 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5407 && TREE_CODE (exp) != ERROR_MARK
5408 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5409 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5410 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5412 /* If value was not generated in the target, store it there.
5413 Convert the value to TARGET's type first if necessary and emit the
5414 pending incrementations that have been queued when expanding EXP.
5415 Note that we cannot emit the whole queue blindly because this will
5416 effectively disable the POST_INC optimization later.
5418 If TEMP and TARGET compare equal according to rtx_equal_p, but
5419 one or both of them are volatile memory refs, we have to distinguish
5420 two cases:
5421 - expand_expr has used TARGET. In this case, we must not generate
5422 another copy. This can be detected by TARGET being equal according
5423 to == .
5424 - expand_expr has not used TARGET - that means that the source just
5425 happens to have the same RTX form. Since temp will have been created
5426 by expand_expr, it will compare unequal according to == .
5427 We must generate a copy in this case, to reach the correct number
5428 of volatile memory references. */
5430 if ((! rtx_equal_p (temp, target)
5431 || (temp != target && (side_effects_p (temp)
5432 || side_effects_p (target))))
5433 && TREE_CODE (exp) != ERROR_MARK
5434 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5435 but TARGET is not valid memory reference, TEMP will differ
5436 from TARGET although it is really the same location. */
5437 && !(alt_rtl
5438 && rtx_equal_p (alt_rtl, target)
5439 && !side_effects_p (alt_rtl)
5440 && !side_effects_p (target))
5441 /* If there's nothing to copy, don't bother. Don't call
5442 expr_size unless necessary, because some front-ends (C++)
5443 expr_size-hook must not be given objects that are not
5444 supposed to be bit-copied or bit-initialized. */
5445 && expr_size (exp) != const0_rtx)
5447 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5449 if (GET_MODE (target) == BLKmode)
5451 /* Handle calls that return BLKmode values in registers. */
5452 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5453 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5454 else
5455 store_bit_field (target,
5456 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5457 0, 0, 0, GET_MODE (temp), temp, reverse);
5459 else
5460 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5463 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5465 /* Handle copying a string constant into an array. The string
5466 constant may be shorter than the array. So copy just the string's
5467 actual length, and clear the rest. First get the size of the data
5468 type of the string, which is actually the size of the target. */
5469 rtx size = expr_size (exp);
5471 if (CONST_INT_P (size)
5472 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5473 emit_block_move (target, temp, size,
5474 (call_param_p
5475 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5476 else
5478 machine_mode pointer_mode
5479 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5480 machine_mode address_mode = get_address_mode (target);
5482 /* Compute the size of the data to copy from the string. */
5483 tree copy_size
5484 = size_binop_loc (loc, MIN_EXPR,
5485 make_tree (sizetype, size),
5486 size_int (TREE_STRING_LENGTH (exp)));
5487 rtx copy_size_rtx
5488 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5489 (call_param_p
5490 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5491 rtx_code_label *label = 0;
5493 /* Copy that much. */
5494 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5495 TYPE_UNSIGNED (sizetype));
5496 emit_block_move (target, temp, copy_size_rtx,
5497 (call_param_p
5498 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5500 /* Figure out how much is left in TARGET that we have to clear.
5501 Do all calculations in pointer_mode. */
5502 if (CONST_INT_P (copy_size_rtx))
5504 size = plus_constant (address_mode, size,
5505 -INTVAL (copy_size_rtx));
5506 target = adjust_address (target, BLKmode,
5507 INTVAL (copy_size_rtx));
5509 else
5511 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5512 copy_size_rtx, NULL_RTX, 0,
5513 OPTAB_LIB_WIDEN);
5515 if (GET_MODE (copy_size_rtx) != address_mode)
5516 copy_size_rtx = convert_to_mode (address_mode,
5517 copy_size_rtx,
5518 TYPE_UNSIGNED (sizetype));
5520 target = offset_address (target, copy_size_rtx,
5521 highest_pow2_factor (copy_size));
5522 label = gen_label_rtx ();
5523 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5524 GET_MODE (size), 0, label);
5527 if (size != const0_rtx)
5528 clear_storage (target, size, BLOCK_OP_NORMAL);
5530 if (label)
5531 emit_label (label);
5534 /* Handle calls that return values in multiple non-contiguous locations.
5535 The Irix 6 ABI has examples of this. */
5536 else if (GET_CODE (target) == PARALLEL)
5538 if (GET_CODE (temp) == PARALLEL)
5539 emit_group_move (target, temp);
5540 else
5541 emit_group_load (target, temp, TREE_TYPE (exp),
5542 int_size_in_bytes (TREE_TYPE (exp)));
5544 else if (GET_CODE (temp) == PARALLEL)
5545 emit_group_store (target, temp, TREE_TYPE (exp),
5546 int_size_in_bytes (TREE_TYPE (exp)));
5547 else if (GET_MODE (temp) == BLKmode)
5548 emit_block_move (target, temp, expr_size (exp),
5549 (call_param_p
5550 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5551 /* If we emit a nontemporal store, there is nothing else to do. */
5552 else if (nontemporal && emit_storent_insn (target, temp))
5554 else
5556 if (reverse)
5557 temp = flip_storage_order (GET_MODE (target), temp);
5558 temp = force_operand (temp, target);
5559 if (temp != target)
5560 emit_move_insn (target, temp);
5564 return NULL_RTX;
5567 /* Same as store_expr_with_bounds but ignoring bounds of EXP. */
5569 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal,
5570 bool reverse)
5572 return store_expr_with_bounds (exp, target, call_param_p, nontemporal,
5573 reverse, NULL);
5576 /* Return true if field F of structure TYPE is a flexible array. */
5578 static bool
5579 flexible_array_member_p (const_tree f, const_tree type)
5581 const_tree tf;
5583 tf = TREE_TYPE (f);
5584 return (DECL_CHAIN (f) == NULL
5585 && TREE_CODE (tf) == ARRAY_TYPE
5586 && TYPE_DOMAIN (tf)
5587 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5588 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5589 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5590 && int_size_in_bytes (type) >= 0);
5593 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5594 must have in order for it to completely initialize a value of type TYPE.
5595 Return -1 if the number isn't known.
5597 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5599 static HOST_WIDE_INT
5600 count_type_elements (const_tree type, bool for_ctor_p)
5602 switch (TREE_CODE (type))
5604 case ARRAY_TYPE:
5606 tree nelts;
5608 nelts = array_type_nelts (type);
5609 if (nelts && tree_fits_uhwi_p (nelts))
5611 unsigned HOST_WIDE_INT n;
5613 n = tree_to_uhwi (nelts) + 1;
5614 if (n == 0 || for_ctor_p)
5615 return n;
5616 else
5617 return n * count_type_elements (TREE_TYPE (type), false);
5619 return for_ctor_p ? -1 : 1;
5622 case RECORD_TYPE:
5624 unsigned HOST_WIDE_INT n;
5625 tree f;
5627 n = 0;
5628 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5629 if (TREE_CODE (f) == FIELD_DECL)
5631 if (!for_ctor_p)
5632 n += count_type_elements (TREE_TYPE (f), false);
5633 else if (!flexible_array_member_p (f, type))
5634 /* Don't count flexible arrays, which are not supposed
5635 to be initialized. */
5636 n += 1;
5639 return n;
5642 case UNION_TYPE:
5643 case QUAL_UNION_TYPE:
5645 tree f;
5646 HOST_WIDE_INT n, m;
5648 gcc_assert (!for_ctor_p);
5649 /* Estimate the number of scalars in each field and pick the
5650 maximum. Other estimates would do instead; the idea is simply
5651 to make sure that the estimate is not sensitive to the ordering
5652 of the fields. */
5653 n = 1;
5654 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5655 if (TREE_CODE (f) == FIELD_DECL)
5657 m = count_type_elements (TREE_TYPE (f), false);
5658 /* If the field doesn't span the whole union, add an extra
5659 scalar for the rest. */
5660 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5661 TYPE_SIZE (type)) != 1)
5662 m++;
5663 if (n < m)
5664 n = m;
5666 return n;
5669 case COMPLEX_TYPE:
5670 return 2;
5672 case VECTOR_TYPE:
5673 return TYPE_VECTOR_SUBPARTS (type);
5675 case INTEGER_TYPE:
5676 case REAL_TYPE:
5677 case FIXED_POINT_TYPE:
5678 case ENUMERAL_TYPE:
5679 case BOOLEAN_TYPE:
5680 case POINTER_TYPE:
5681 case OFFSET_TYPE:
5682 case REFERENCE_TYPE:
5683 case NULLPTR_TYPE:
5684 return 1;
5686 case ERROR_MARK:
5687 return 0;
5689 case VOID_TYPE:
5690 case METHOD_TYPE:
5691 case FUNCTION_TYPE:
5692 case LANG_TYPE:
5693 default:
5694 gcc_unreachable ();
5698 /* Helper for categorize_ctor_elements. Identical interface. */
5700 static bool
5701 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5702 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5704 unsigned HOST_WIDE_INT idx;
5705 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5706 tree value, purpose, elt_type;
5708 /* Whether CTOR is a valid constant initializer, in accordance with what
5709 initializer_constant_valid_p does. If inferred from the constructor
5710 elements, true until proven otherwise. */
5711 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5712 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5714 nz_elts = 0;
5715 init_elts = 0;
5716 num_fields = 0;
5717 elt_type = NULL_TREE;
5719 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5721 HOST_WIDE_INT mult = 1;
5723 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5725 tree lo_index = TREE_OPERAND (purpose, 0);
5726 tree hi_index = TREE_OPERAND (purpose, 1);
5728 if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
5729 mult = (tree_to_uhwi (hi_index)
5730 - tree_to_uhwi (lo_index) + 1);
5732 num_fields += mult;
5733 elt_type = TREE_TYPE (value);
5735 switch (TREE_CODE (value))
5737 case CONSTRUCTOR:
5739 HOST_WIDE_INT nz = 0, ic = 0;
5741 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5742 p_complete);
5744 nz_elts += mult * nz;
5745 init_elts += mult * ic;
5747 if (const_from_elts_p && const_p)
5748 const_p = const_elt_p;
5750 break;
5752 case INTEGER_CST:
5753 case REAL_CST:
5754 case FIXED_CST:
5755 if (!initializer_zerop (value))
5756 nz_elts += mult;
5757 init_elts += mult;
5758 break;
5760 case STRING_CST:
5761 nz_elts += mult * TREE_STRING_LENGTH (value);
5762 init_elts += mult * TREE_STRING_LENGTH (value);
5763 break;
5765 case COMPLEX_CST:
5766 if (!initializer_zerop (TREE_REALPART (value)))
5767 nz_elts += mult;
5768 if (!initializer_zerop (TREE_IMAGPART (value)))
5769 nz_elts += mult;
5770 init_elts += mult;
5771 break;
5773 case VECTOR_CST:
5775 unsigned i;
5776 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5778 tree v = VECTOR_CST_ELT (value, i);
5779 if (!initializer_zerop (v))
5780 nz_elts += mult;
5781 init_elts += mult;
5784 break;
5786 default:
5788 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5789 nz_elts += mult * tc;
5790 init_elts += mult * tc;
5792 if (const_from_elts_p && const_p)
5793 const_p = initializer_constant_valid_p (value, elt_type)
5794 != NULL_TREE;
5796 break;
5800 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5801 num_fields, elt_type))
5802 *p_complete = false;
5804 *p_nz_elts += nz_elts;
5805 *p_init_elts += init_elts;
5807 return const_p;
5810 /* Examine CTOR to discover:
5811 * how many scalar fields are set to nonzero values,
5812 and place it in *P_NZ_ELTS;
5813 * how many scalar fields in total are in CTOR,
5814 and place it in *P_ELT_COUNT.
5815 * whether the constructor is complete -- in the sense that every
5816 meaningful byte is explicitly given a value --
5817 and place it in *P_COMPLETE.
5819 Return whether or not CTOR is a valid static constant initializer, the same
5820 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5822 bool
5823 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5824 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5826 *p_nz_elts = 0;
5827 *p_init_elts = 0;
5828 *p_complete = true;
5830 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5833 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5834 of which had type LAST_TYPE. Each element was itself a complete
5835 initializer, in the sense that every meaningful byte was explicitly
5836 given a value. Return true if the same is true for the constructor
5837 as a whole. */
5839 bool
5840 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5841 const_tree last_type)
5843 if (TREE_CODE (type) == UNION_TYPE
5844 || TREE_CODE (type) == QUAL_UNION_TYPE)
5846 if (num_elts == 0)
5847 return false;
5849 gcc_assert (num_elts == 1 && last_type);
5851 /* ??? We could look at each element of the union, and find the
5852 largest element. Which would avoid comparing the size of the
5853 initialized element against any tail padding in the union.
5854 Doesn't seem worth the effort... */
5855 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5858 return count_type_elements (type, true) == num_elts;
5861 /* Return 1 if EXP contains mostly (3/4) zeros. */
5863 static int
5864 mostly_zeros_p (const_tree exp)
5866 if (TREE_CODE (exp) == CONSTRUCTOR)
5868 HOST_WIDE_INT nz_elts, init_elts;
5869 bool complete_p;
5871 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5872 return !complete_p || nz_elts < init_elts / 4;
5875 return initializer_zerop (exp);
5878 /* Return 1 if EXP contains all zeros. */
5880 static int
5881 all_zeros_p (const_tree exp)
5883 if (TREE_CODE (exp) == CONSTRUCTOR)
5885 HOST_WIDE_INT nz_elts, init_elts;
5886 bool complete_p;
5888 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5889 return nz_elts == 0;
5892 return initializer_zerop (exp);
5895 /* Helper function for store_constructor.
5896 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5897 CLEARED is as for store_constructor.
5898 ALIAS_SET is the alias set to use for any stores.
5899 If REVERSE is true, the store is to be done in reverse order.
5901 This provides a recursive shortcut back to store_constructor when it isn't
5902 necessary to go through store_field. This is so that we can pass through
5903 the cleared field to let store_constructor know that we may not have to
5904 clear a substructure if the outer structure has already been cleared. */
5906 static void
5907 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5908 HOST_WIDE_INT bitpos, machine_mode mode,
5909 tree exp, int cleared,
5910 alias_set_type alias_set, bool reverse)
5912 if (TREE_CODE (exp) == CONSTRUCTOR
5913 /* We can only call store_constructor recursively if the size and
5914 bit position are on a byte boundary. */
5915 && bitpos % BITS_PER_UNIT == 0
5916 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5917 /* If we have a nonzero bitpos for a register target, then we just
5918 let store_field do the bitfield handling. This is unlikely to
5919 generate unnecessary clear instructions anyways. */
5920 && (bitpos == 0 || MEM_P (target)))
5922 if (MEM_P (target))
5923 target
5924 = adjust_address (target,
5925 GET_MODE (target) == BLKmode
5926 || 0 != (bitpos
5927 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5928 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5931 /* Update the alias set, if required. */
5932 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5933 && MEM_ALIAS_SET (target) != 0)
5935 target = copy_rtx (target);
5936 set_mem_alias_set (target, alias_set);
5939 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT,
5940 reverse);
5942 else
5943 store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false,
5944 reverse);
5948 /* Returns the number of FIELD_DECLs in TYPE. */
5950 static int
5951 fields_length (const_tree type)
5953 tree t = TYPE_FIELDS (type);
5954 int count = 0;
5956 for (; t; t = DECL_CHAIN (t))
5957 if (TREE_CODE (t) == FIELD_DECL)
5958 ++count;
5960 return count;
5964 /* Store the value of constructor EXP into the rtx TARGET.
5965 TARGET is either a REG or a MEM; we know it cannot conflict, since
5966 safe_from_p has been called.
5967 CLEARED is true if TARGET is known to have been zero'd.
5968 SIZE is the number of bytes of TARGET we are allowed to modify: this
5969 may not be the same as the size of EXP if we are assigning to a field
5970 which has been packed to exclude padding bits.
5971 If REVERSE is true, the store is to be done in reverse order. */
5973 static void
5974 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size,
5975 bool reverse)
5977 tree type = TREE_TYPE (exp);
5978 #ifdef WORD_REGISTER_OPERATIONS
5979 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5980 #endif
5982 switch (TREE_CODE (type))
5984 case RECORD_TYPE:
5985 case UNION_TYPE:
5986 case QUAL_UNION_TYPE:
5988 unsigned HOST_WIDE_INT idx;
5989 tree field, value;
5991 /* The storage order is specified for every aggregate type. */
5992 reverse = TYPE_REVERSE_STORAGE_ORDER (type);
5994 /* If size is zero or the target is already cleared, do nothing. */
5995 if (size == 0 || cleared)
5996 cleared = 1;
5997 /* We either clear the aggregate or indicate the value is dead. */
5998 else if ((TREE_CODE (type) == UNION_TYPE
5999 || TREE_CODE (type) == QUAL_UNION_TYPE)
6000 && ! CONSTRUCTOR_ELTS (exp))
6001 /* If the constructor is empty, clear the union. */
6003 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6004 cleared = 1;
6007 /* If we are building a static constructor into a register,
6008 set the initial value as zero so we can fold the value into
6009 a constant. But if more than one register is involved,
6010 this probably loses. */
6011 else if (REG_P (target) && TREE_STATIC (exp)
6012 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
6014 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6015 cleared = 1;
6018 /* If the constructor has fewer fields than the structure or
6019 if we are initializing the structure to mostly zeros, clear
6020 the whole structure first. Don't do this if TARGET is a
6021 register whose mode size isn't equal to SIZE since
6022 clear_storage can't handle this case. */
6023 else if (size > 0
6024 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
6025 != fields_length (type))
6026 || mostly_zeros_p (exp))
6027 && (!REG_P (target)
6028 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
6029 == size)))
6031 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6032 cleared = 1;
6035 if (REG_P (target) && !cleared)
6036 emit_clobber (target);
6038 /* Store each element of the constructor into the
6039 corresponding field of TARGET. */
6040 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
6042 machine_mode mode;
6043 HOST_WIDE_INT bitsize;
6044 HOST_WIDE_INT bitpos = 0;
6045 tree offset;
6046 rtx to_rtx = target;
6048 /* Just ignore missing fields. We cleared the whole
6049 structure, above, if any fields are missing. */
6050 if (field == 0)
6051 continue;
6053 if (cleared && initializer_zerop (value))
6054 continue;
6056 if (tree_fits_uhwi_p (DECL_SIZE (field)))
6057 bitsize = tree_to_uhwi (DECL_SIZE (field));
6058 else
6059 bitsize = -1;
6061 mode = DECL_MODE (field);
6062 if (DECL_BIT_FIELD (field))
6063 mode = VOIDmode;
6065 offset = DECL_FIELD_OFFSET (field);
6066 if (tree_fits_shwi_p (offset)
6067 && tree_fits_shwi_p (bit_position (field)))
6069 bitpos = int_bit_position (field);
6070 offset = 0;
6072 else
6073 bitpos = tree_to_shwi (DECL_FIELD_BIT_OFFSET (field));
6075 if (offset)
6077 machine_mode address_mode;
6078 rtx offset_rtx;
6080 offset
6081 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
6082 make_tree (TREE_TYPE (exp),
6083 target));
6085 offset_rtx = expand_normal (offset);
6086 gcc_assert (MEM_P (to_rtx));
6088 address_mode = get_address_mode (to_rtx);
6089 if (GET_MODE (offset_rtx) != address_mode)
6090 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
6092 to_rtx = offset_address (to_rtx, offset_rtx,
6093 highest_pow2_factor (offset));
6096 #ifdef WORD_REGISTER_OPERATIONS
6097 /* If this initializes a field that is smaller than a
6098 word, at the start of a word, try to widen it to a full
6099 word. This special case allows us to output C++ member
6100 function initializations in a form that the optimizers
6101 can understand. */
6102 if (REG_P (target)
6103 && bitsize < BITS_PER_WORD
6104 && bitpos % BITS_PER_WORD == 0
6105 && GET_MODE_CLASS (mode) == MODE_INT
6106 && TREE_CODE (value) == INTEGER_CST
6107 && exp_size >= 0
6108 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6110 tree type = TREE_TYPE (value);
6112 if (TYPE_PRECISION (type) < BITS_PER_WORD)
6114 type = lang_hooks.types.type_for_mode
6115 (word_mode, TYPE_UNSIGNED (type));
6116 value = fold_convert (type, value);
6119 if (BYTES_BIG_ENDIAN)
6120 value
6121 = fold_build2 (LSHIFT_EXPR, type, value,
6122 build_int_cst (type,
6123 BITS_PER_WORD - bitsize));
6124 bitsize = BITS_PER_WORD;
6125 mode = word_mode;
6127 #endif
6129 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6130 && DECL_NONADDRESSABLE_P (field))
6132 to_rtx = copy_rtx (to_rtx);
6133 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6136 store_constructor_field (to_rtx, bitsize, bitpos, mode,
6137 value, cleared,
6138 get_alias_set (TREE_TYPE (field)),
6139 reverse);
6141 break;
6143 case ARRAY_TYPE:
6145 tree value, index;
6146 unsigned HOST_WIDE_INT i;
6147 int need_to_clear;
6148 tree domain;
6149 tree elttype = TREE_TYPE (type);
6150 int const_bounds_p;
6151 HOST_WIDE_INT minelt = 0;
6152 HOST_WIDE_INT maxelt = 0;
6154 /* The storage order is specified for every aggregate type. */
6155 reverse = TYPE_REVERSE_STORAGE_ORDER (type);
6157 domain = TYPE_DOMAIN (type);
6158 const_bounds_p = (TYPE_MIN_VALUE (domain)
6159 && TYPE_MAX_VALUE (domain)
6160 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6161 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6163 /* If we have constant bounds for the range of the type, get them. */
6164 if (const_bounds_p)
6166 minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6167 maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6170 /* If the constructor has fewer elements than the array, clear
6171 the whole array first. Similarly if this is static
6172 constructor of a non-BLKmode object. */
6173 if (cleared)
6174 need_to_clear = 0;
6175 else if (REG_P (target) && TREE_STATIC (exp))
6176 need_to_clear = 1;
6177 else
6179 unsigned HOST_WIDE_INT idx;
6180 tree index, value;
6181 HOST_WIDE_INT count = 0, zero_count = 0;
6182 need_to_clear = ! const_bounds_p;
6184 /* This loop is a more accurate version of the loop in
6185 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6186 is also needed to check for missing elements. */
6187 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6189 HOST_WIDE_INT this_node_count;
6191 if (need_to_clear)
6192 break;
6194 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6196 tree lo_index = TREE_OPERAND (index, 0);
6197 tree hi_index = TREE_OPERAND (index, 1);
6199 if (! tree_fits_uhwi_p (lo_index)
6200 || ! tree_fits_uhwi_p (hi_index))
6202 need_to_clear = 1;
6203 break;
6206 this_node_count = (tree_to_uhwi (hi_index)
6207 - tree_to_uhwi (lo_index) + 1);
6209 else
6210 this_node_count = 1;
6212 count += this_node_count;
6213 if (mostly_zeros_p (value))
6214 zero_count += this_node_count;
6217 /* Clear the entire array first if there are any missing
6218 elements, or if the incidence of zero elements is >=
6219 75%. */
6220 if (! need_to_clear
6221 && (count < maxelt - minelt + 1
6222 || 4 * zero_count >= 3 * count))
6223 need_to_clear = 1;
6226 if (need_to_clear && size > 0)
6228 if (REG_P (target))
6229 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6230 else
6231 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6232 cleared = 1;
6235 if (!cleared && REG_P (target))
6236 /* Inform later passes that the old value is dead. */
6237 emit_clobber (target);
6239 /* Store each element of the constructor into the
6240 corresponding element of TARGET, determined by counting the
6241 elements. */
6242 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6244 machine_mode mode;
6245 HOST_WIDE_INT bitsize;
6246 HOST_WIDE_INT bitpos;
6247 rtx xtarget = target;
6249 if (cleared && initializer_zerop (value))
6250 continue;
6252 mode = TYPE_MODE (elttype);
6253 if (mode == BLKmode)
6254 bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6255 ? tree_to_uhwi (TYPE_SIZE (elttype))
6256 : -1);
6257 else
6258 bitsize = GET_MODE_BITSIZE (mode);
6260 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6262 tree lo_index = TREE_OPERAND (index, 0);
6263 tree hi_index = TREE_OPERAND (index, 1);
6264 rtx index_r, pos_rtx;
6265 HOST_WIDE_INT lo, hi, count;
6266 tree position;
6268 /* If the range is constant and "small", unroll the loop. */
6269 if (const_bounds_p
6270 && tree_fits_shwi_p (lo_index)
6271 && tree_fits_shwi_p (hi_index)
6272 && (lo = tree_to_shwi (lo_index),
6273 hi = tree_to_shwi (hi_index),
6274 count = hi - lo + 1,
6275 (!MEM_P (target)
6276 || count <= 2
6277 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6278 && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6279 <= 40 * 8)))))
6281 lo -= minelt; hi -= minelt;
6282 for (; lo <= hi; lo++)
6284 bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6286 if (MEM_P (target)
6287 && !MEM_KEEP_ALIAS_SET_P (target)
6288 && TREE_CODE (type) == ARRAY_TYPE
6289 && TYPE_NONALIASED_COMPONENT (type))
6291 target = copy_rtx (target);
6292 MEM_KEEP_ALIAS_SET_P (target) = 1;
6295 store_constructor_field
6296 (target, bitsize, bitpos, mode, value, cleared,
6297 get_alias_set (elttype), reverse);
6300 else
6302 rtx_code_label *loop_start = gen_label_rtx ();
6303 rtx_code_label *loop_end = gen_label_rtx ();
6304 tree exit_cond;
6306 expand_normal (hi_index);
6308 index = build_decl (EXPR_LOCATION (exp),
6309 VAR_DECL, NULL_TREE, domain);
6310 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6311 SET_DECL_RTL (index, index_r);
6312 store_expr (lo_index, index_r, 0, false, reverse);
6314 /* Build the head of the loop. */
6315 do_pending_stack_adjust ();
6316 emit_label (loop_start);
6318 /* Assign value to element index. */
6319 position =
6320 fold_convert (ssizetype,
6321 fold_build2 (MINUS_EXPR,
6322 TREE_TYPE (index),
6323 index,
6324 TYPE_MIN_VALUE (domain)));
6326 position =
6327 size_binop (MULT_EXPR, position,
6328 fold_convert (ssizetype,
6329 TYPE_SIZE_UNIT (elttype)));
6331 pos_rtx = expand_normal (position);
6332 xtarget = offset_address (target, pos_rtx,
6333 highest_pow2_factor (position));
6334 xtarget = adjust_address (xtarget, mode, 0);
6335 if (TREE_CODE (value) == CONSTRUCTOR)
6336 store_constructor (value, xtarget, cleared,
6337 bitsize / BITS_PER_UNIT, reverse);
6338 else
6339 store_expr (value, xtarget, 0, false, reverse);
6341 /* Generate a conditional jump to exit the loop. */
6342 exit_cond = build2 (LT_EXPR, integer_type_node,
6343 index, hi_index);
6344 jumpif (exit_cond, loop_end, -1);
6346 /* Update the loop counter, and jump to the head of
6347 the loop. */
6348 expand_assignment (index,
6349 build2 (PLUS_EXPR, TREE_TYPE (index),
6350 index, integer_one_node),
6351 false);
6353 emit_jump (loop_start);
6355 /* Build the end of the loop. */
6356 emit_label (loop_end);
6359 else if ((index != 0 && ! tree_fits_shwi_p (index))
6360 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6362 tree position;
6364 if (index == 0)
6365 index = ssize_int (1);
6367 if (minelt)
6368 index = fold_convert (ssizetype,
6369 fold_build2 (MINUS_EXPR,
6370 TREE_TYPE (index),
6371 index,
6372 TYPE_MIN_VALUE (domain)));
6374 position =
6375 size_binop (MULT_EXPR, index,
6376 fold_convert (ssizetype,
6377 TYPE_SIZE_UNIT (elttype)));
6378 xtarget = offset_address (target,
6379 expand_normal (position),
6380 highest_pow2_factor (position));
6381 xtarget = adjust_address (xtarget, mode, 0);
6382 store_expr (value, xtarget, 0, false, reverse);
6384 else
6386 if (index != 0)
6387 bitpos = ((tree_to_shwi (index) - minelt)
6388 * tree_to_uhwi (TYPE_SIZE (elttype)));
6389 else
6390 bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6392 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6393 && TREE_CODE (type) == ARRAY_TYPE
6394 && TYPE_NONALIASED_COMPONENT (type))
6396 target = copy_rtx (target);
6397 MEM_KEEP_ALIAS_SET_P (target) = 1;
6399 store_constructor_field (target, bitsize, bitpos, mode, value,
6400 cleared, get_alias_set (elttype),
6401 reverse);
6404 break;
6407 case VECTOR_TYPE:
6409 unsigned HOST_WIDE_INT idx;
6410 constructor_elt *ce;
6411 int i;
6412 int need_to_clear;
6413 int icode = CODE_FOR_nothing;
6414 tree elttype = TREE_TYPE (type);
6415 int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6416 machine_mode eltmode = TYPE_MODE (elttype);
6417 HOST_WIDE_INT bitsize;
6418 HOST_WIDE_INT bitpos;
6419 rtvec vector = NULL;
6420 unsigned n_elts;
6421 alias_set_type alias;
6423 gcc_assert (eltmode != BLKmode);
6425 n_elts = TYPE_VECTOR_SUBPARTS (type);
6426 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6428 machine_mode mode = GET_MODE (target);
6430 icode = (int) optab_handler (vec_init_optab, mode);
6431 /* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
6432 if (icode != CODE_FOR_nothing)
6434 tree value;
6436 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6437 if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
6439 icode = CODE_FOR_nothing;
6440 break;
6443 if (icode != CODE_FOR_nothing)
6445 unsigned int i;
6447 vector = rtvec_alloc (n_elts);
6448 for (i = 0; i < n_elts; i++)
6449 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6453 /* If the constructor has fewer elements than the vector,
6454 clear the whole array first. Similarly if this is static
6455 constructor of a non-BLKmode object. */
6456 if (cleared)
6457 need_to_clear = 0;
6458 else if (REG_P (target) && TREE_STATIC (exp))
6459 need_to_clear = 1;
6460 else
6462 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6463 tree value;
6465 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6467 int n_elts_here = tree_to_uhwi
6468 (int_const_binop (TRUNC_DIV_EXPR,
6469 TYPE_SIZE (TREE_TYPE (value)),
6470 TYPE_SIZE (elttype)));
6472 count += n_elts_here;
6473 if (mostly_zeros_p (value))
6474 zero_count += n_elts_here;
6477 /* Clear the entire vector first if there are any missing elements,
6478 or if the incidence of zero elements is >= 75%. */
6479 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6482 if (need_to_clear && size > 0 && !vector)
6484 if (REG_P (target))
6485 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6486 else
6487 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6488 cleared = 1;
6491 /* Inform later passes that the old value is dead. */
6492 if (!cleared && !vector && REG_P (target))
6493 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6495 if (MEM_P (target))
6496 alias = MEM_ALIAS_SET (target);
6497 else
6498 alias = get_alias_set (elttype);
6500 /* Store each element of the constructor into the corresponding
6501 element of TARGET, determined by counting the elements. */
6502 for (idx = 0, i = 0;
6503 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6504 idx++, i += bitsize / elt_size)
6506 HOST_WIDE_INT eltpos;
6507 tree value = ce->value;
6509 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6510 if (cleared && initializer_zerop (value))
6511 continue;
6513 if (ce->index)
6514 eltpos = tree_to_uhwi (ce->index);
6515 else
6516 eltpos = i;
6518 if (vector)
6520 /* vec_init<mode> should not be used if there are VECTOR_TYPE
6521 elements. */
6522 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6523 RTVEC_ELT (vector, eltpos)
6524 = expand_normal (value);
6526 else
6528 machine_mode value_mode =
6529 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6530 ? TYPE_MODE (TREE_TYPE (value))
6531 : eltmode;
6532 bitpos = eltpos * elt_size;
6533 store_constructor_field (target, bitsize, bitpos, value_mode,
6534 value, cleared, alias, reverse);
6538 if (vector)
6539 emit_insn (GEN_FCN (icode)
6540 (target,
6541 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6542 break;
6545 default:
6546 gcc_unreachable ();
6550 /* Store the value of EXP (an expression tree)
6551 into a subfield of TARGET which has mode MODE and occupies
6552 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6553 If MODE is VOIDmode, it means that we are storing into a bit-field.
6555 BITREGION_START is bitpos of the first bitfield in this region.
6556 BITREGION_END is the bitpos of the ending bitfield in this region.
6557 These two fields are 0, if the C++ memory model does not apply,
6558 or we are not interested in keeping track of bitfield regions.
6560 Always return const0_rtx unless we have something particular to
6561 return.
6563 ALIAS_SET is the alias set for the destination. This value will
6564 (in general) be different from that for TARGET, since TARGET is a
6565 reference to the containing structure.
6567 If NONTEMPORAL is true, try generating a nontemporal store.
6569 If REVERSE is true, the store is to be done in reverse order. */
6571 static rtx
6572 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6573 unsigned HOST_WIDE_INT bitregion_start,
6574 unsigned HOST_WIDE_INT bitregion_end,
6575 machine_mode mode, tree exp,
6576 alias_set_type alias_set, bool nontemporal, bool reverse)
6578 if (TREE_CODE (exp) == ERROR_MARK)
6579 return const0_rtx;
6581 /* If we have nothing to store, do nothing unless the expression has
6582 side-effects. */
6583 if (bitsize == 0)
6584 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6586 if (GET_CODE (target) == CONCAT)
6588 /* We're storing into a struct containing a single __complex. */
6590 gcc_assert (!bitpos);
6591 return store_expr (exp, target, 0, nontemporal, reverse);
6594 /* If the structure is in a register or if the component
6595 is a bit field, we cannot use addressing to access it.
6596 Use bit-field techniques or SUBREG to store in it. */
6598 if (mode == VOIDmode
6599 || (mode != BLKmode && ! direct_store[(int) mode]
6600 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6601 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6602 || REG_P (target)
6603 || GET_CODE (target) == SUBREG
6604 /* If the field isn't aligned enough to store as an ordinary memref,
6605 store it as a bit field. */
6606 || (mode != BLKmode
6607 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6608 || bitpos % GET_MODE_ALIGNMENT (mode))
6609 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6610 || (bitpos % BITS_PER_UNIT != 0)))
6611 || (bitsize >= 0 && mode != BLKmode
6612 && GET_MODE_BITSIZE (mode) > bitsize)
6613 /* If the RHS and field are a constant size and the size of the
6614 RHS isn't the same size as the bitfield, we must use bitfield
6615 operations. */
6616 || (bitsize >= 0
6617 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6618 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6619 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6620 decl we must use bitfield operations. */
6621 || (bitsize >= 0
6622 && TREE_CODE (exp) == MEM_REF
6623 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6624 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6625 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6626 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6628 rtx temp;
6629 gimple nop_def;
6631 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6632 implies a mask operation. If the precision is the same size as
6633 the field we're storing into, that mask is redundant. This is
6634 particularly common with bit field assignments generated by the
6635 C front end. */
6636 nop_def = get_def_for_expr (exp, NOP_EXPR);
6637 if (nop_def)
6639 tree type = TREE_TYPE (exp);
6640 if (INTEGRAL_TYPE_P (type)
6641 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6642 && bitsize == TYPE_PRECISION (type))
6644 tree op = gimple_assign_rhs1 (nop_def);
6645 type = TREE_TYPE (op);
6646 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6647 exp = op;
6651 temp = expand_normal (exp);
6653 /* If the value has a record type and an integral mode then, if BITSIZE
6654 is narrower than this mode and this is a big-endian machine, we must
6655 first put the value into the low-order bits. Moreover, the field may
6656 be not aligned on a byte boundary; in this case, if it has reverse
6657 storage order, it needs to be accessed as a scalar field with reverse
6658 storage order and we must first put the value into target order. */
6659 if (TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
6660 && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT)
6662 HOST_WIDE_INT size = GET_MODE_BITSIZE (GET_MODE (temp));
6664 reverse = TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (exp));
6666 if (reverse)
6667 temp = flip_storage_order (GET_MODE (temp), temp);
6669 if (bitsize < size
6670 && reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6671 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6672 size - bitsize, NULL_RTX, 1);
6675 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6676 if (mode != VOIDmode && mode != BLKmode
6677 && mode != TYPE_MODE (TREE_TYPE (exp)))
6678 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6680 /* If the modes of TEMP and TARGET are both BLKmode, both
6681 must be in memory and BITPOS must be aligned on a byte
6682 boundary. If so, we simply do a block copy. Likewise
6683 for a BLKmode-like TARGET. */
6684 if (GET_MODE (temp) == BLKmode
6685 && (GET_MODE (target) == BLKmode
6686 || (MEM_P (target)
6687 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6688 && (bitpos % BITS_PER_UNIT) == 0
6689 && (bitsize % BITS_PER_UNIT) == 0)))
6691 gcc_assert (MEM_P (target) && MEM_P (temp)
6692 && (bitpos % BITS_PER_UNIT) == 0);
6694 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6695 emit_block_move (target, temp,
6696 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6697 / BITS_PER_UNIT),
6698 BLOCK_OP_NORMAL);
6700 return const0_rtx;
6703 /* Handle calls that return values in multiple non-contiguous locations.
6704 The Irix 6 ABI has examples of this. */
6705 if (GET_CODE (temp) == PARALLEL)
6707 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6708 rtx temp_target;
6709 if (mode == BLKmode || mode == VOIDmode)
6710 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6711 temp_target = gen_reg_rtx (mode);
6712 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6713 temp = temp_target;
6715 else if (mode == BLKmode)
6717 /* Handle calls that return BLKmode values in registers. */
6718 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6720 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6721 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6722 temp = temp_target;
6724 else
6726 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6727 rtx temp_target;
6728 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6729 temp_target = gen_reg_rtx (mode);
6730 temp_target
6731 = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
6732 temp_target, mode, mode, false);
6733 temp = temp_target;
6737 /* Store the value in the bitfield. */
6738 store_bit_field (target, bitsize, bitpos,
6739 bitregion_start, bitregion_end,
6740 mode, temp, reverse);
6742 return const0_rtx;
6744 else
6746 /* Now build a reference to just the desired component. */
6747 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6749 if (to_rtx == target)
6750 to_rtx = copy_rtx (to_rtx);
6752 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6753 set_mem_alias_set (to_rtx, alias_set);
6755 return store_expr (exp, to_rtx, 0, nontemporal, reverse);
6759 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6760 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6761 codes and find the ultimate containing object, which we return.
6763 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6764 bit position, *PUNSIGNEDP to the signedness and *PREVERSEP to the
6765 storage order of the field.
6766 If the position of the field is variable, we store a tree
6767 giving the variable offset (in units) in *POFFSET.
6768 This offset is in addition to the bit position.
6769 If the position is not variable, we store 0 in *POFFSET.
6771 If any of the extraction expressions is volatile,
6772 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6774 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6775 Otherwise, it is a mode that can be used to access the field.
6777 If the field describes a variable-sized object, *PMODE is set to
6778 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6779 this case, but the address of the object can be found.
6781 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6782 look through nodes that serve as markers of a greater alignment than
6783 the one that can be deduced from the expression. These nodes make it
6784 possible for front-ends to prevent temporaries from being created by
6785 the middle-end on alignment considerations. For that purpose, the
6786 normal operating mode at high-level is to always pass FALSE so that
6787 the ultimate containing object is really returned; moreover, the
6788 associated predicate handled_component_p will always return TRUE
6789 on these nodes, thus indicating that they are essentially handled
6790 by get_inner_reference. TRUE should only be passed when the caller
6791 is scanning the expression in order to build another representation
6792 and specifically knows how to handle these nodes; as such, this is
6793 the normal operating mode in the RTL expanders. */
6795 tree
6796 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6797 HOST_WIDE_INT *pbitpos, tree *poffset,
6798 machine_mode *pmode, int *punsignedp,
6799 int *preversep, int *pvolatilep, bool keep_aligning)
6801 tree size_tree = 0;
6802 machine_mode mode = VOIDmode;
6803 bool blkmode_bitfield = false;
6804 tree offset = size_zero_node;
6805 offset_int bit_offset = 0;
6807 /* First get the mode, signedness, storage order and size. We do this from
6808 just the outermost expression. */
6809 *pbitsize = -1;
6810 if (TREE_CODE (exp) == COMPONENT_REF)
6812 tree field = TREE_OPERAND (exp, 1);
6813 size_tree = DECL_SIZE (field);
6814 if (flag_strict_volatile_bitfields > 0
6815 && TREE_THIS_VOLATILE (exp)
6816 && DECL_BIT_FIELD_TYPE (field)
6817 && DECL_MODE (field) != BLKmode)
6818 /* Volatile bitfields should be accessed in the mode of the
6819 field's type, not the mode computed based on the bit
6820 size. */
6821 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6822 else if (!DECL_BIT_FIELD (field))
6823 mode = DECL_MODE (field);
6824 else if (DECL_MODE (field) == BLKmode)
6825 blkmode_bitfield = true;
6827 *punsignedp = DECL_UNSIGNED (field);
6828 *preversep
6829 = TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_OPERAND (exp, 0)))
6830 && !AGGREGATE_TYPE_P (TREE_TYPE (exp));
6832 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6834 size_tree = TREE_OPERAND (exp, 1);
6835 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6836 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6837 *preversep = REF_REVERSE_STORAGE_ORDER (exp);
6839 /* For vector types, with the correct size of access, use the mode of
6840 inner type. */
6841 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6842 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6843 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6844 mode = TYPE_MODE (TREE_TYPE (exp));
6846 else
6848 mode = TYPE_MODE (TREE_TYPE (exp));
6849 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6850 *preversep
6851 = ((TREE_CODE (exp) == ARRAY_REF
6852 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_OPERAND (exp, 0))))
6853 || (TREE_CODE (exp) == MEM_REF
6854 && REF_REVERSE_STORAGE_ORDER (exp)))
6855 && !AGGREGATE_TYPE_P (TREE_TYPE (exp));
6857 if (mode == BLKmode)
6858 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6859 else
6860 *pbitsize = GET_MODE_BITSIZE (mode);
6863 if (size_tree != 0)
6865 if (! tree_fits_uhwi_p (size_tree))
6866 mode = BLKmode, *pbitsize = -1;
6867 else
6868 *pbitsize = tree_to_uhwi (size_tree);
6871 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6872 and find the ultimate containing object. */
6873 while (1)
6875 switch (TREE_CODE (exp))
6877 case BIT_FIELD_REF:
6878 bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
6879 break;
6881 case COMPONENT_REF:
6883 tree field = TREE_OPERAND (exp, 1);
6884 tree this_offset = component_ref_field_offset (exp);
6886 /* If this field hasn't been filled in yet, don't go past it.
6887 This should only happen when folding expressions made during
6888 type construction. */
6889 if (this_offset == 0)
6890 break;
6892 offset = size_binop (PLUS_EXPR, offset, this_offset);
6893 bit_offset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
6895 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6897 break;
6899 case ARRAY_REF:
6900 case ARRAY_RANGE_REF:
6902 tree index = TREE_OPERAND (exp, 1);
6903 tree low_bound = array_ref_low_bound (exp);
6904 tree unit_size = array_ref_element_size (exp);
6906 /* We assume all arrays have sizes that are a multiple of a byte.
6907 First subtract the lower bound, if any, in the type of the
6908 index, then convert to sizetype and multiply by the size of
6909 the array element. */
6910 if (! integer_zerop (low_bound))
6911 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6912 index, low_bound);
6914 offset = size_binop (PLUS_EXPR, offset,
6915 size_binop (MULT_EXPR,
6916 fold_convert (sizetype, index),
6917 unit_size));
6919 break;
6921 case REALPART_EXPR:
6922 break;
6924 case IMAGPART_EXPR:
6925 bit_offset += *pbitsize;
6926 break;
6928 case VIEW_CONVERT_EXPR:
6929 if (keep_aligning && STRICT_ALIGNMENT
6930 && (TYPE_ALIGN (TREE_TYPE (exp))
6931 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6932 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6933 < BIGGEST_ALIGNMENT)
6934 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6935 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6936 goto done;
6937 break;
6939 case MEM_REF:
6940 /* Hand back the decl for MEM[&decl, off]. */
6941 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6943 tree off = TREE_OPERAND (exp, 1);
6944 if (!integer_zerop (off))
6946 offset_int boff, coff = mem_ref_offset (exp);
6947 boff = wi::lshift (coff, LOG2_BITS_PER_UNIT);
6948 bit_offset += boff;
6950 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6952 goto done;
6954 default:
6955 goto done;
6958 /* If any reference in the chain is volatile, the effect is volatile. */
6959 if (TREE_THIS_VOLATILE (exp))
6960 *pvolatilep = 1;
6962 exp = TREE_OPERAND (exp, 0);
6964 done:
6966 /* If OFFSET is constant, see if we can return the whole thing as a
6967 constant bit position. Make sure to handle overflow during
6968 this conversion. */
6969 if (TREE_CODE (offset) == INTEGER_CST)
6971 offset_int tem = wi::sext (wi::to_offset (offset),
6972 TYPE_PRECISION (sizetype));
6973 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
6974 tem += bit_offset;
6975 if (wi::fits_shwi_p (tem))
6977 *pbitpos = tem.to_shwi ();
6978 *poffset = offset = NULL_TREE;
6982 /* Otherwise, split it up. */
6983 if (offset)
6985 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6986 if (wi::neg_p (bit_offset))
6988 offset_int mask = wi::mask <offset_int> (LOG2_BITS_PER_UNIT, false);
6989 offset_int tem = bit_offset.and_not (mask);
6990 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6991 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6992 bit_offset -= tem;
6993 tem = wi::arshift (tem, LOG2_BITS_PER_UNIT);
6994 offset = size_binop (PLUS_EXPR, offset,
6995 wide_int_to_tree (sizetype, tem));
6998 *pbitpos = bit_offset.to_shwi ();
6999 *poffset = offset;
7002 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
7003 if (mode == VOIDmode
7004 && blkmode_bitfield
7005 && (*pbitpos % BITS_PER_UNIT) == 0
7006 && (*pbitsize % BITS_PER_UNIT) == 0)
7007 *pmode = BLKmode;
7008 else
7009 *pmode = mode;
7011 return exp;
7014 /* Return a tree of sizetype representing the size, in bytes, of the element
7015 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
7017 tree
7018 array_ref_element_size (tree exp)
7020 tree aligned_size = TREE_OPERAND (exp, 3);
7021 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7022 location_t loc = EXPR_LOCATION (exp);
7024 /* If a size was specified in the ARRAY_REF, it's the size measured
7025 in alignment units of the element type. So multiply by that value. */
7026 if (aligned_size)
7028 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
7029 sizetype from another type of the same width and signedness. */
7030 if (TREE_TYPE (aligned_size) != sizetype)
7031 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
7032 return size_binop_loc (loc, MULT_EXPR, aligned_size,
7033 size_int (TYPE_ALIGN_UNIT (elmt_type)));
7036 /* Otherwise, take the size from that of the element type. Substitute
7037 any PLACEHOLDER_EXPR that we have. */
7038 else
7039 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
7042 /* Return a tree representing the lower bound of the array mentioned in
7043 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
7045 tree
7046 array_ref_low_bound (tree exp)
7048 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
7050 /* If a lower bound is specified in EXP, use it. */
7051 if (TREE_OPERAND (exp, 2))
7052 return TREE_OPERAND (exp, 2);
7054 /* Otherwise, if there is a domain type and it has a lower bound, use it,
7055 substituting for a PLACEHOLDER_EXPR as needed. */
7056 if (domain_type && TYPE_MIN_VALUE (domain_type))
7057 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
7059 /* Otherwise, return a zero of the appropriate type. */
7060 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
7063 /* Returns true if REF is an array reference to an array at the end of
7064 a structure. If this is the case, the array may be allocated larger
7065 than its upper bound implies. */
7067 bool
7068 array_at_struct_end_p (tree ref)
7070 if (TREE_CODE (ref) != ARRAY_REF
7071 && TREE_CODE (ref) != ARRAY_RANGE_REF)
7072 return false;
7074 while (handled_component_p (ref))
7076 /* If the reference chain contains a component reference to a
7077 non-union type and there follows another field the reference
7078 is not at the end of a structure. */
7079 if (TREE_CODE (ref) == COMPONENT_REF
7080 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
7082 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
7083 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
7084 nextf = DECL_CHAIN (nextf);
7085 if (nextf)
7086 return false;
7089 ref = TREE_OPERAND (ref, 0);
7092 /* If the reference is based on a declared entity, the size of the array
7093 is constrained by its given domain. */
7094 if (DECL_P (ref))
7095 return false;
7097 return true;
7100 /* Return a tree representing the upper bound of the array mentioned in
7101 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
7103 tree
7104 array_ref_up_bound (tree exp)
7106 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
7108 /* If there is a domain type and it has an upper bound, use it, substituting
7109 for a PLACEHOLDER_EXPR as needed. */
7110 if (domain_type && TYPE_MAX_VALUE (domain_type))
7111 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
7113 /* Otherwise fail. */
7114 return NULL_TREE;
7117 /* Return a tree representing the offset, in bytes, of the field referenced
7118 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
7120 tree
7121 component_ref_field_offset (tree exp)
7123 tree aligned_offset = TREE_OPERAND (exp, 2);
7124 tree field = TREE_OPERAND (exp, 1);
7125 location_t loc = EXPR_LOCATION (exp);
7127 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
7128 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
7129 value. */
7130 if (aligned_offset)
7132 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
7133 sizetype from another type of the same width and signedness. */
7134 if (TREE_TYPE (aligned_offset) != sizetype)
7135 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
7136 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
7137 size_int (DECL_OFFSET_ALIGN (field)
7138 / BITS_PER_UNIT));
7141 /* Otherwise, take the offset from that of the field. Substitute
7142 any PLACEHOLDER_EXPR that we have. */
7143 else
7144 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
7147 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7149 static unsigned HOST_WIDE_INT
7150 target_align (const_tree target)
7152 /* We might have a chain of nested references with intermediate misaligning
7153 bitfields components, so need to recurse to find out. */
7155 unsigned HOST_WIDE_INT this_align, outer_align;
7157 switch (TREE_CODE (target))
7159 case BIT_FIELD_REF:
7160 return 1;
7162 case COMPONENT_REF:
7163 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7164 outer_align = target_align (TREE_OPERAND (target, 0));
7165 return MIN (this_align, outer_align);
7167 case ARRAY_REF:
7168 case ARRAY_RANGE_REF:
7169 this_align = TYPE_ALIGN (TREE_TYPE (target));
7170 outer_align = target_align (TREE_OPERAND (target, 0));
7171 return MIN (this_align, outer_align);
7173 CASE_CONVERT:
7174 case NON_LVALUE_EXPR:
7175 case VIEW_CONVERT_EXPR:
7176 this_align = TYPE_ALIGN (TREE_TYPE (target));
7177 outer_align = target_align (TREE_OPERAND (target, 0));
7178 return MAX (this_align, outer_align);
7180 default:
7181 return TYPE_ALIGN (TREE_TYPE (target));
7186 /* Given an rtx VALUE that may contain additions and multiplications, return
7187 an equivalent value that just refers to a register, memory, or constant.
7188 This is done by generating instructions to perform the arithmetic and
7189 returning a pseudo-register containing the value.
7191 The returned value may be a REG, SUBREG, MEM or constant. */
7194 force_operand (rtx value, rtx target)
7196 rtx op1, op2;
7197 /* Use subtarget as the target for operand 0 of a binary operation. */
7198 rtx subtarget = get_subtarget (target);
7199 enum rtx_code code = GET_CODE (value);
7201 /* Check for subreg applied to an expression produced by loop optimizer. */
7202 if (code == SUBREG
7203 && !REG_P (SUBREG_REG (value))
7204 && !MEM_P (SUBREG_REG (value)))
7206 value
7207 = simplify_gen_subreg (GET_MODE (value),
7208 force_reg (GET_MODE (SUBREG_REG (value)),
7209 force_operand (SUBREG_REG (value),
7210 NULL_RTX)),
7211 GET_MODE (SUBREG_REG (value)),
7212 SUBREG_BYTE (value));
7213 code = GET_CODE (value);
7216 /* Check for a PIC address load. */
7217 if ((code == PLUS || code == MINUS)
7218 && XEXP (value, 0) == pic_offset_table_rtx
7219 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7220 || GET_CODE (XEXP (value, 1)) == LABEL_REF
7221 || GET_CODE (XEXP (value, 1)) == CONST))
7223 if (!subtarget)
7224 subtarget = gen_reg_rtx (GET_MODE (value));
7225 emit_move_insn (subtarget, value);
7226 return subtarget;
7229 if (ARITHMETIC_P (value))
7231 op2 = XEXP (value, 1);
7232 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7233 subtarget = 0;
7234 if (code == MINUS && CONST_INT_P (op2))
7236 code = PLUS;
7237 op2 = negate_rtx (GET_MODE (value), op2);
7240 /* Check for an addition with OP2 a constant integer and our first
7241 operand a PLUS of a virtual register and something else. In that
7242 case, we want to emit the sum of the virtual register and the
7243 constant first and then add the other value. This allows virtual
7244 register instantiation to simply modify the constant rather than
7245 creating another one around this addition. */
7246 if (code == PLUS && CONST_INT_P (op2)
7247 && GET_CODE (XEXP (value, 0)) == PLUS
7248 && REG_P (XEXP (XEXP (value, 0), 0))
7249 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7250 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7252 rtx temp = expand_simple_binop (GET_MODE (value), code,
7253 XEXP (XEXP (value, 0), 0), op2,
7254 subtarget, 0, OPTAB_LIB_WIDEN);
7255 return expand_simple_binop (GET_MODE (value), code, temp,
7256 force_operand (XEXP (XEXP (value,
7257 0), 1), 0),
7258 target, 0, OPTAB_LIB_WIDEN);
7261 op1 = force_operand (XEXP (value, 0), subtarget);
7262 op2 = force_operand (op2, NULL_RTX);
7263 switch (code)
7265 case MULT:
7266 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7267 case DIV:
7268 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7269 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7270 target, 1, OPTAB_LIB_WIDEN);
7271 else
7272 return expand_divmod (0,
7273 FLOAT_MODE_P (GET_MODE (value))
7274 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7275 GET_MODE (value), op1, op2, target, 0);
7276 case MOD:
7277 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7278 target, 0);
7279 case UDIV:
7280 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7281 target, 1);
7282 case UMOD:
7283 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7284 target, 1);
7285 case ASHIFTRT:
7286 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7287 target, 0, OPTAB_LIB_WIDEN);
7288 default:
7289 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7290 target, 1, OPTAB_LIB_WIDEN);
7293 if (UNARY_P (value))
7295 if (!target)
7296 target = gen_reg_rtx (GET_MODE (value));
7297 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7298 switch (code)
7300 case ZERO_EXTEND:
7301 case SIGN_EXTEND:
7302 case TRUNCATE:
7303 case FLOAT_EXTEND:
7304 case FLOAT_TRUNCATE:
7305 convert_move (target, op1, code == ZERO_EXTEND);
7306 return target;
7308 case FIX:
7309 case UNSIGNED_FIX:
7310 expand_fix (target, op1, code == UNSIGNED_FIX);
7311 return target;
7313 case FLOAT:
7314 case UNSIGNED_FLOAT:
7315 expand_float (target, op1, code == UNSIGNED_FLOAT);
7316 return target;
7318 default:
7319 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7323 #ifdef INSN_SCHEDULING
7324 /* On machines that have insn scheduling, we want all memory reference to be
7325 explicit, so we need to deal with such paradoxical SUBREGs. */
7326 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7327 value
7328 = simplify_gen_subreg (GET_MODE (value),
7329 force_reg (GET_MODE (SUBREG_REG (value)),
7330 force_operand (SUBREG_REG (value),
7331 NULL_RTX)),
7332 GET_MODE (SUBREG_REG (value)),
7333 SUBREG_BYTE (value));
7334 #endif
7336 return value;
7339 /* Subroutine of expand_expr: return nonzero iff there is no way that
7340 EXP can reference X, which is being modified. TOP_P is nonzero if this
7341 call is going to be used to determine whether we need a temporary
7342 for EXP, as opposed to a recursive call to this function.
7344 It is always safe for this routine to return zero since it merely
7345 searches for optimization opportunities. */
7348 safe_from_p (const_rtx x, tree exp, int top_p)
7350 rtx exp_rtl = 0;
7351 int i, nops;
7353 if (x == 0
7354 /* If EXP has varying size, we MUST use a target since we currently
7355 have no way of allocating temporaries of variable size
7356 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7357 So we assume here that something at a higher level has prevented a
7358 clash. This is somewhat bogus, but the best we can do. Only
7359 do this when X is BLKmode and when we are at the top level. */
7360 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7361 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7362 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7363 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7364 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7365 != INTEGER_CST)
7366 && GET_MODE (x) == BLKmode)
7367 /* If X is in the outgoing argument area, it is always safe. */
7368 || (MEM_P (x)
7369 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7370 || (GET_CODE (XEXP (x, 0)) == PLUS
7371 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7372 return 1;
7374 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7375 find the underlying pseudo. */
7376 if (GET_CODE (x) == SUBREG)
7378 x = SUBREG_REG (x);
7379 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7380 return 0;
7383 /* Now look at our tree code and possibly recurse. */
7384 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7386 case tcc_declaration:
7387 exp_rtl = DECL_RTL_IF_SET (exp);
7388 break;
7390 case tcc_constant:
7391 return 1;
7393 case tcc_exceptional:
7394 if (TREE_CODE (exp) == TREE_LIST)
7396 while (1)
7398 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7399 return 0;
7400 exp = TREE_CHAIN (exp);
7401 if (!exp)
7402 return 1;
7403 if (TREE_CODE (exp) != TREE_LIST)
7404 return safe_from_p (x, exp, 0);
7407 else if (TREE_CODE (exp) == CONSTRUCTOR)
7409 constructor_elt *ce;
7410 unsigned HOST_WIDE_INT idx;
7412 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7413 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7414 || !safe_from_p (x, ce->value, 0))
7415 return 0;
7416 return 1;
7418 else if (TREE_CODE (exp) == ERROR_MARK)
7419 return 1; /* An already-visited SAVE_EXPR? */
7420 else
7421 return 0;
7423 case tcc_statement:
7424 /* The only case we look at here is the DECL_INITIAL inside a
7425 DECL_EXPR. */
7426 return (TREE_CODE (exp) != DECL_EXPR
7427 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7428 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7429 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7431 case tcc_binary:
7432 case tcc_comparison:
7433 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7434 return 0;
7435 /* Fall through. */
7437 case tcc_unary:
7438 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7440 case tcc_expression:
7441 case tcc_reference:
7442 case tcc_vl_exp:
7443 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7444 the expression. If it is set, we conflict iff we are that rtx or
7445 both are in memory. Otherwise, we check all operands of the
7446 expression recursively. */
7448 switch (TREE_CODE (exp))
7450 case ADDR_EXPR:
7451 /* If the operand is static or we are static, we can't conflict.
7452 Likewise if we don't conflict with the operand at all. */
7453 if (staticp (TREE_OPERAND (exp, 0))
7454 || TREE_STATIC (exp)
7455 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7456 return 1;
7458 /* Otherwise, the only way this can conflict is if we are taking
7459 the address of a DECL a that address if part of X, which is
7460 very rare. */
7461 exp = TREE_OPERAND (exp, 0);
7462 if (DECL_P (exp))
7464 if (!DECL_RTL_SET_P (exp)
7465 || !MEM_P (DECL_RTL (exp)))
7466 return 0;
7467 else
7468 exp_rtl = XEXP (DECL_RTL (exp), 0);
7470 break;
7472 case MEM_REF:
7473 if (MEM_P (x)
7474 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7475 get_alias_set (exp)))
7476 return 0;
7477 break;
7479 case CALL_EXPR:
7480 /* Assume that the call will clobber all hard registers and
7481 all of memory. */
7482 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7483 || MEM_P (x))
7484 return 0;
7485 break;
7487 case WITH_CLEANUP_EXPR:
7488 case CLEANUP_POINT_EXPR:
7489 /* Lowered by gimplify.c. */
7490 gcc_unreachable ();
7492 case SAVE_EXPR:
7493 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7495 default:
7496 break;
7499 /* If we have an rtx, we do not need to scan our operands. */
7500 if (exp_rtl)
7501 break;
7503 nops = TREE_OPERAND_LENGTH (exp);
7504 for (i = 0; i < nops; i++)
7505 if (TREE_OPERAND (exp, i) != 0
7506 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7507 return 0;
7509 break;
7511 case tcc_type:
7512 /* Should never get a type here. */
7513 gcc_unreachable ();
7516 /* If we have an rtl, find any enclosed object. Then see if we conflict
7517 with it. */
7518 if (exp_rtl)
7520 if (GET_CODE (exp_rtl) == SUBREG)
7522 exp_rtl = SUBREG_REG (exp_rtl);
7523 if (REG_P (exp_rtl)
7524 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7525 return 0;
7528 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7529 are memory and they conflict. */
7530 return ! (rtx_equal_p (x, exp_rtl)
7531 || (MEM_P (x) && MEM_P (exp_rtl)
7532 && true_dependence (exp_rtl, VOIDmode, x)));
7535 /* If we reach here, it is safe. */
7536 return 1;
7540 /* Return the highest power of two that EXP is known to be a multiple of.
7541 This is used in updating alignment of MEMs in array references. */
7543 unsigned HOST_WIDE_INT
7544 highest_pow2_factor (const_tree exp)
7546 unsigned HOST_WIDE_INT ret;
7547 int trailing_zeros = tree_ctz (exp);
7548 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7549 return BIGGEST_ALIGNMENT;
7550 ret = (unsigned HOST_WIDE_INT) 1 << trailing_zeros;
7551 if (ret > BIGGEST_ALIGNMENT)
7552 return BIGGEST_ALIGNMENT;
7553 return ret;
7556 /* Similar, except that the alignment requirements of TARGET are
7557 taken into account. Assume it is at least as aligned as its
7558 type, unless it is a COMPONENT_REF in which case the layout of
7559 the structure gives the alignment. */
7561 static unsigned HOST_WIDE_INT
7562 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7564 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7565 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7567 return MAX (factor, talign);
7570 #ifdef HAVE_conditional_move
7571 /* Convert the tree comparison code TCODE to the rtl one where the
7572 signedness is UNSIGNEDP. */
7574 static enum rtx_code
7575 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7577 enum rtx_code code;
7578 switch (tcode)
7580 case EQ_EXPR:
7581 code = EQ;
7582 break;
7583 case NE_EXPR:
7584 code = NE;
7585 break;
7586 case LT_EXPR:
7587 code = unsignedp ? LTU : LT;
7588 break;
7589 case LE_EXPR:
7590 code = unsignedp ? LEU : LE;
7591 break;
7592 case GT_EXPR:
7593 code = unsignedp ? GTU : GT;
7594 break;
7595 case GE_EXPR:
7596 code = unsignedp ? GEU : GE;
7597 break;
7598 case UNORDERED_EXPR:
7599 code = UNORDERED;
7600 break;
7601 case ORDERED_EXPR:
7602 code = ORDERED;
7603 break;
7604 case UNLT_EXPR:
7605 code = UNLT;
7606 break;
7607 case UNLE_EXPR:
7608 code = UNLE;
7609 break;
7610 case UNGT_EXPR:
7611 code = UNGT;
7612 break;
7613 case UNGE_EXPR:
7614 code = UNGE;
7615 break;
7616 case UNEQ_EXPR:
7617 code = UNEQ;
7618 break;
7619 case LTGT_EXPR:
7620 code = LTGT;
7621 break;
7623 default:
7624 gcc_unreachable ();
7626 return code;
7628 #endif
7630 /* Subroutine of expand_expr. Expand the two operands of a binary
7631 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7632 The value may be stored in TARGET if TARGET is nonzero. The
7633 MODIFIER argument is as documented by expand_expr. */
7635 static void
7636 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7637 enum expand_modifier modifier)
7639 if (! safe_from_p (target, exp1, 1))
7640 target = 0;
7641 if (operand_equal_p (exp0, exp1, 0))
7643 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7644 *op1 = copy_rtx (*op0);
7646 else
7648 /* If we need to preserve evaluation order, copy exp0 into its own
7649 temporary variable so that it can't be clobbered by exp1. */
7650 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7651 exp0 = save_expr (exp0);
7652 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7653 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7658 /* Return a MEM that contains constant EXP. DEFER is as for
7659 output_constant_def and MODIFIER is as for expand_expr. */
7661 static rtx
7662 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7664 rtx mem;
7666 mem = output_constant_def (exp, defer);
7667 if (modifier != EXPAND_INITIALIZER)
7668 mem = use_anchored_address (mem);
7669 return mem;
7672 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7673 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7675 static rtx
7676 expand_expr_addr_expr_1 (tree exp, rtx target, machine_mode tmode,
7677 enum expand_modifier modifier, addr_space_t as)
7679 rtx result, subtarget;
7680 tree inner, offset;
7681 HOST_WIDE_INT bitsize, bitpos;
7682 int unsignedp, reversep, volatilep = 0;
7683 machine_mode mode1;
7685 /* If we are taking the address of a constant and are at the top level,
7686 we have to use output_constant_def since we can't call force_const_mem
7687 at top level. */
7688 /* ??? This should be considered a front-end bug. We should not be
7689 generating ADDR_EXPR of something that isn't an LVALUE. The only
7690 exception here is STRING_CST. */
7691 if (CONSTANT_CLASS_P (exp))
7693 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7694 if (modifier < EXPAND_SUM)
7695 result = force_operand (result, target);
7696 return result;
7699 /* Everything must be something allowed by is_gimple_addressable. */
7700 switch (TREE_CODE (exp))
7702 case INDIRECT_REF:
7703 /* This case will happen via recursion for &a->b. */
7704 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7706 case MEM_REF:
7708 tree tem = TREE_OPERAND (exp, 0);
7709 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7710 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7711 return expand_expr (tem, target, tmode, modifier);
7714 case CONST_DECL:
7715 /* Expand the initializer like constants above. */
7716 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7717 0, modifier), 0);
7718 if (modifier < EXPAND_SUM)
7719 result = force_operand (result, target);
7720 return result;
7722 case REALPART_EXPR:
7723 /* The real part of the complex number is always first, therefore
7724 the address is the same as the address of the parent object. */
7725 offset = 0;
7726 bitpos = 0;
7727 inner = TREE_OPERAND (exp, 0);
7728 break;
7730 case IMAGPART_EXPR:
7731 /* The imaginary part of the complex number is always second.
7732 The expression is therefore always offset by the size of the
7733 scalar type. */
7734 offset = 0;
7735 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7736 inner = TREE_OPERAND (exp, 0);
7737 break;
7739 case COMPOUND_LITERAL_EXPR:
7740 /* Allow COMPOUND_LITERAL_EXPR in initializers, if e.g.
7741 rtl_for_decl_init is called on DECL_INITIAL with
7742 COMPOUNT_LITERAL_EXPRs in it, they aren't gimplified. */
7743 if (modifier == EXPAND_INITIALIZER
7744 && COMPOUND_LITERAL_EXPR_DECL (exp))
7745 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7746 target, tmode, modifier, as);
7747 /* FALLTHRU */
7748 default:
7749 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7750 expand_expr, as that can have various side effects; LABEL_DECLs for
7751 example, may not have their DECL_RTL set yet. Expand the rtl of
7752 CONSTRUCTORs too, which should yield a memory reference for the
7753 constructor's contents. Assume language specific tree nodes can
7754 be expanded in some interesting way. */
7755 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7756 if (DECL_P (exp)
7757 || TREE_CODE (exp) == CONSTRUCTOR
7758 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7760 result = expand_expr (exp, target, tmode,
7761 modifier == EXPAND_INITIALIZER
7762 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7764 /* If the DECL isn't in memory, then the DECL wasn't properly
7765 marked TREE_ADDRESSABLE, which will be either a front-end
7766 or a tree optimizer bug. */
7768 if (TREE_ADDRESSABLE (exp)
7769 && ! MEM_P (result)
7770 && ! targetm.calls.allocate_stack_slots_for_args ())
7772 error ("local frame unavailable (naked function?)");
7773 return result;
7775 else
7776 gcc_assert (MEM_P (result));
7777 result = XEXP (result, 0);
7779 /* ??? Is this needed anymore? */
7780 if (DECL_P (exp))
7781 TREE_USED (exp) = 1;
7783 if (modifier != EXPAND_INITIALIZER
7784 && modifier != EXPAND_CONST_ADDRESS
7785 && modifier != EXPAND_SUM)
7786 result = force_operand (result, target);
7787 return result;
7790 /* Pass FALSE as the last argument to get_inner_reference although
7791 we are expanding to RTL. The rationale is that we know how to
7792 handle "aligning nodes" here: we can just bypass them because
7793 they won't change the final object whose address will be returned
7794 (they actually exist only for that purpose). */
7795 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
7796 &unsignedp, &reversep, &volatilep, false);
7797 break;
7800 /* We must have made progress. */
7801 gcc_assert (inner != exp);
7803 subtarget = offset || bitpos ? NULL_RTX : target;
7804 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7805 inner alignment, force the inner to be sufficiently aligned. */
7806 if (CONSTANT_CLASS_P (inner)
7807 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7809 inner = copy_node (inner);
7810 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7811 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7812 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7814 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7816 if (offset)
7818 rtx tmp;
7820 if (modifier != EXPAND_NORMAL)
7821 result = force_operand (result, NULL);
7822 tmp = expand_expr (offset, NULL_RTX, tmode,
7823 modifier == EXPAND_INITIALIZER
7824 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7826 /* expand_expr is allowed to return an object in a mode other
7827 than TMODE. If it did, we need to convert. */
7828 if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
7829 tmp = convert_modes (tmode, GET_MODE (tmp),
7830 tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
7831 result = convert_memory_address_addr_space (tmode, result, as);
7832 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7834 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7835 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7836 else
7838 subtarget = bitpos ? NULL_RTX : target;
7839 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7840 1, OPTAB_LIB_WIDEN);
7844 if (bitpos)
7846 /* Someone beforehand should have rejected taking the address
7847 of such an object. */
7848 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7850 result = convert_memory_address_addr_space (tmode, result, as);
7851 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7852 if (modifier < EXPAND_SUM)
7853 result = force_operand (result, target);
7856 return result;
7859 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7860 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7862 static rtx
7863 expand_expr_addr_expr (tree exp, rtx target, machine_mode tmode,
7864 enum expand_modifier modifier)
7866 addr_space_t as = ADDR_SPACE_GENERIC;
7867 machine_mode address_mode = Pmode;
7868 machine_mode pointer_mode = ptr_mode;
7869 machine_mode rmode;
7870 rtx result;
7872 /* Target mode of VOIDmode says "whatever's natural". */
7873 if (tmode == VOIDmode)
7874 tmode = TYPE_MODE (TREE_TYPE (exp));
7876 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7878 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7879 address_mode = targetm.addr_space.address_mode (as);
7880 pointer_mode = targetm.addr_space.pointer_mode (as);
7883 /* We can get called with some Weird Things if the user does silliness
7884 like "(short) &a". In that case, convert_memory_address won't do
7885 the right thing, so ignore the given target mode. */
7886 if (tmode != address_mode && tmode != pointer_mode)
7887 tmode = address_mode;
7889 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7890 tmode, modifier, as);
7892 /* Despite expand_expr claims concerning ignoring TMODE when not
7893 strictly convenient, stuff breaks if we don't honor it. Note
7894 that combined with the above, we only do this for pointer modes. */
7895 rmode = GET_MODE (result);
7896 if (rmode == VOIDmode)
7897 rmode = tmode;
7898 if (rmode != tmode)
7899 result = convert_memory_address_addr_space (tmode, result, as);
7901 return result;
7904 /* Generate code for computing CONSTRUCTOR EXP.
7905 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7906 is TRUE, instead of creating a temporary variable in memory
7907 NULL is returned and the caller needs to handle it differently. */
7909 static rtx
7910 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7911 bool avoid_temp_mem)
7913 tree type = TREE_TYPE (exp);
7914 machine_mode mode = TYPE_MODE (type);
7916 /* Try to avoid creating a temporary at all. This is possible
7917 if all of the initializer is zero.
7918 FIXME: try to handle all [0..255] initializers we can handle
7919 with memset. */
7920 if (TREE_STATIC (exp)
7921 && !TREE_ADDRESSABLE (exp)
7922 && target != 0 && mode == BLKmode
7923 && all_zeros_p (exp))
7925 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7926 return target;
7929 /* All elts simple constants => refer to a constant in memory. But
7930 if this is a non-BLKmode mode, let it store a field at a time
7931 since that should make a CONST_INT, CONST_WIDE_INT or
7932 CONST_DOUBLE when we fold. Likewise, if we have a target we can
7933 use, it is best to store directly into the target unless the type
7934 is large enough that memcpy will be used. If we are making an
7935 initializer and all operands are constant, put it in memory as
7936 well.
7938 FIXME: Avoid trying to fill vector constructors piece-meal.
7939 Output them with output_constant_def below unless we're sure
7940 they're zeros. This should go away when vector initializers
7941 are treated like VECTOR_CST instead of arrays. */
7942 if ((TREE_STATIC (exp)
7943 && ((mode == BLKmode
7944 && ! (target != 0 && safe_from_p (target, exp, 1)))
7945 || TREE_ADDRESSABLE (exp)
7946 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
7947 && (! can_move_by_pieces
7948 (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
7949 TYPE_ALIGN (type)))
7950 && ! mostly_zeros_p (exp))))
7951 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7952 && TREE_CONSTANT (exp)))
7954 rtx constructor;
7956 if (avoid_temp_mem)
7957 return NULL_RTX;
7959 constructor = expand_expr_constant (exp, 1, modifier);
7961 if (modifier != EXPAND_CONST_ADDRESS
7962 && modifier != EXPAND_INITIALIZER
7963 && modifier != EXPAND_SUM)
7964 constructor = validize_mem (constructor);
7966 return constructor;
7969 /* Handle calls that pass values in multiple non-contiguous
7970 locations. The Irix 6 ABI has examples of this. */
7971 if (target == 0 || ! safe_from_p (target, exp, 1)
7972 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7974 if (avoid_temp_mem)
7975 return NULL_RTX;
7977 target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
7980 store_constructor (exp, target, 0, int_expr_size (exp), false);
7981 return target;
7985 /* expand_expr: generate code for computing expression EXP.
7986 An rtx for the computed value is returned. The value is never null.
7987 In the case of a void EXP, const0_rtx is returned.
7989 The value may be stored in TARGET if TARGET is nonzero.
7990 TARGET is just a suggestion; callers must assume that
7991 the rtx returned may not be the same as TARGET.
7993 If TARGET is CONST0_RTX, it means that the value will be ignored.
7995 If TMODE is not VOIDmode, it suggests generating the
7996 result in mode TMODE. But this is done only when convenient.
7997 Otherwise, TMODE is ignored and the value generated in its natural mode.
7998 TMODE is just a suggestion; callers must assume that
7999 the rtx returned may not have mode TMODE.
8001 Note that TARGET may have neither TMODE nor MODE. In that case, it
8002 probably will not be used.
8004 If MODIFIER is EXPAND_SUM then when EXP is an addition
8005 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
8006 or a nest of (PLUS ...) and (MINUS ...) where the terms are
8007 products as above, or REG or MEM, or constant.
8008 Ordinarily in such cases we would output mul or add instructions
8009 and then return a pseudo reg containing the sum.
8011 EXPAND_INITIALIZER is much like EXPAND_SUM except that
8012 it also marks a label as absolutely required (it can't be dead).
8013 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
8014 This is used for outputting expressions used in initializers.
8016 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
8017 with a constant address even if that address is not normally legitimate.
8018 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
8020 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
8021 a call parameter. Such targets require special care as we haven't yet
8022 marked TARGET so that it's safe from being trashed by libcalls. We
8023 don't want to use TARGET for anything but the final result;
8024 Intermediate values must go elsewhere. Additionally, calls to
8025 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
8027 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
8028 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
8029 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
8030 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
8031 recursively.
8033 If INNER_REFERENCE_P is true, we are expanding an inner reference.
8034 In this case, we don't adjust a returned MEM rtx that wouldn't be
8035 sufficiently aligned for its mode; instead, it's up to the caller
8036 to deal with it afterwards. This is used to make sure that unaligned
8037 base objects for which out-of-bounds accesses are supported, for
8038 example record types with trailing arrays, aren't realigned behind
8039 the back of the caller.
8040 The normal operating mode is to pass FALSE for this parameter. */
8043 expand_expr_real (tree exp, rtx target, machine_mode tmode,
8044 enum expand_modifier modifier, rtx *alt_rtl,
8045 bool inner_reference_p)
8047 rtx ret;
8049 /* Handle ERROR_MARK before anybody tries to access its type. */
8050 if (TREE_CODE (exp) == ERROR_MARK
8051 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
8053 ret = CONST0_RTX (tmode);
8054 return ret ? ret : const0_rtx;
8057 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
8058 inner_reference_p);
8059 return ret;
8062 /* Try to expand the conditional expression which is represented by
8063 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
8064 return the rtl reg which repsents the result. Otherwise return
8065 NULL_RTL. */
8067 static rtx
8068 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
8069 tree treeop1 ATTRIBUTE_UNUSED,
8070 tree treeop2 ATTRIBUTE_UNUSED)
8072 #ifdef HAVE_conditional_move
8073 rtx insn;
8074 rtx op00, op01, op1, op2;
8075 enum rtx_code comparison_code;
8076 machine_mode comparison_mode;
8077 gimple srcstmt;
8078 rtx temp;
8079 tree type = TREE_TYPE (treeop1);
8080 int unsignedp = TYPE_UNSIGNED (type);
8081 machine_mode mode = TYPE_MODE (type);
8082 machine_mode orig_mode = mode;
8084 /* If we cannot do a conditional move on the mode, try doing it
8085 with the promoted mode. */
8086 if (!can_conditionally_move_p (mode))
8088 mode = promote_mode (type, mode, &unsignedp);
8089 if (!can_conditionally_move_p (mode))
8090 return NULL_RTX;
8091 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
8093 else
8094 temp = assign_temp (type, 0, 1);
8096 start_sequence ();
8097 expand_operands (treeop1, treeop2,
8098 temp, &op1, &op2, EXPAND_NORMAL);
8100 if (TREE_CODE (treeop0) == SSA_NAME
8101 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
8103 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
8104 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
8105 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
8106 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
8107 comparison_mode = TYPE_MODE (type);
8108 unsignedp = TYPE_UNSIGNED (type);
8109 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8111 else if (TREE_CODE_CLASS (TREE_CODE (treeop0)) == tcc_comparison)
8113 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
8114 enum tree_code cmpcode = TREE_CODE (treeop0);
8115 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
8116 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
8117 unsignedp = TYPE_UNSIGNED (type);
8118 comparison_mode = TYPE_MODE (type);
8119 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8121 else
8123 op00 = expand_normal (treeop0);
8124 op01 = const0_rtx;
8125 comparison_code = NE;
8126 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
8129 if (GET_MODE (op1) != mode)
8130 op1 = gen_lowpart (mode, op1);
8132 if (GET_MODE (op2) != mode)
8133 op2 = gen_lowpart (mode, op2);
8135 /* Try to emit the conditional move. */
8136 insn = emit_conditional_move (temp, comparison_code,
8137 op00, op01, comparison_mode,
8138 op1, op2, mode,
8139 unsignedp);
8141 /* If we could do the conditional move, emit the sequence,
8142 and return. */
8143 if (insn)
8145 rtx_insn *seq = get_insns ();
8146 end_sequence ();
8147 emit_insn (seq);
8148 return convert_modes (orig_mode, mode, temp, 0);
8151 /* Otherwise discard the sequence and fall back to code with
8152 branches. */
8153 end_sequence ();
8154 #endif
8155 return NULL_RTX;
8159 expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
8160 enum expand_modifier modifier)
8162 rtx op0, op1, op2, temp;
8163 tree type;
8164 int unsignedp;
8165 machine_mode mode;
8166 enum tree_code code = ops->code;
8167 optab this_optab;
8168 rtx subtarget, original_target;
8169 int ignore;
8170 bool reduce_bit_field;
8171 location_t loc = ops->location;
8172 tree treeop0, treeop1, treeop2;
8173 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8174 ? reduce_to_bit_field_precision ((expr), \
8175 target, \
8176 type) \
8177 : (expr))
8179 type = ops->type;
8180 mode = TYPE_MODE (type);
8181 unsignedp = TYPE_UNSIGNED (type);
8183 treeop0 = ops->op0;
8184 treeop1 = ops->op1;
8185 treeop2 = ops->op2;
8187 /* We should be called only on simple (binary or unary) expressions,
8188 exactly those that are valid in gimple expressions that aren't
8189 GIMPLE_SINGLE_RHS (or invalid). */
8190 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8191 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8192 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8194 ignore = (target == const0_rtx
8195 || ((CONVERT_EXPR_CODE_P (code)
8196 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8197 && TREE_CODE (type) == VOID_TYPE));
8199 /* We should be called only if we need the result. */
8200 gcc_assert (!ignore);
8202 /* An operation in what may be a bit-field type needs the
8203 result to be reduced to the precision of the bit-field type,
8204 which is narrower than that of the type's mode. */
8205 reduce_bit_field = (INTEGRAL_TYPE_P (type)
8206 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8208 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8209 target = 0;
8211 /* Use subtarget as the target for operand 0 of a binary operation. */
8212 subtarget = get_subtarget (target);
8213 original_target = target;
8215 switch (code)
8217 case NON_LVALUE_EXPR:
8218 case PAREN_EXPR:
8219 CASE_CONVERT:
8220 if (treeop0 == error_mark_node)
8221 return const0_rtx;
8223 if (TREE_CODE (type) == UNION_TYPE)
8225 tree valtype = TREE_TYPE (treeop0);
8227 /* If both input and output are BLKmode, this conversion isn't doing
8228 anything except possibly changing memory attribute. */
8229 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8231 rtx result = expand_expr (treeop0, target, tmode,
8232 modifier);
8234 result = copy_rtx (result);
8235 set_mem_attributes (result, type, 0);
8236 return result;
8239 if (target == 0)
8241 if (TYPE_MODE (type) != BLKmode)
8242 target = gen_reg_rtx (TYPE_MODE (type));
8243 else
8244 target = assign_temp (type, 1, 1);
8247 if (MEM_P (target))
8248 /* Store data into beginning of memory target. */
8249 store_expr (treeop0,
8250 adjust_address (target, TYPE_MODE (valtype), 0),
8251 modifier == EXPAND_STACK_PARM,
8252 false, TYPE_REVERSE_STORAGE_ORDER (type));
8254 else
8256 gcc_assert (REG_P (target)
8257 && !TYPE_REVERSE_STORAGE_ORDER (type));
8259 /* Store this field into a union of the proper type. */
8260 store_field (target,
8261 MIN ((int_size_in_bytes (TREE_TYPE
8262 (treeop0))
8263 * BITS_PER_UNIT),
8264 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8265 0, 0, 0, TYPE_MODE (valtype), treeop0, 0,
8266 false, false);
8269 /* Return the entire union. */
8270 return target;
8273 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8275 op0 = expand_expr (treeop0, target, VOIDmode,
8276 modifier);
8278 /* If the signedness of the conversion differs and OP0 is
8279 a promoted SUBREG, clear that indication since we now
8280 have to do the proper extension. */
8281 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8282 && GET_CODE (op0) == SUBREG)
8283 SUBREG_PROMOTED_VAR_P (op0) = 0;
8285 return REDUCE_BIT_FIELD (op0);
8288 op0 = expand_expr (treeop0, NULL_RTX, mode,
8289 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8290 if (GET_MODE (op0) == mode)
8293 /* If OP0 is a constant, just convert it into the proper mode. */
8294 else if (CONSTANT_P (op0))
8296 tree inner_type = TREE_TYPE (treeop0);
8297 machine_mode inner_mode = GET_MODE (op0);
8299 if (inner_mode == VOIDmode)
8300 inner_mode = TYPE_MODE (inner_type);
8302 if (modifier == EXPAND_INITIALIZER)
8303 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8304 subreg_lowpart_offset (mode,
8305 inner_mode));
8306 else
8307 op0= convert_modes (mode, inner_mode, op0,
8308 TYPE_UNSIGNED (inner_type));
8311 else if (modifier == EXPAND_INITIALIZER)
8312 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8314 else if (target == 0)
8315 op0 = convert_to_mode (mode, op0,
8316 TYPE_UNSIGNED (TREE_TYPE
8317 (treeop0)));
8318 else
8320 convert_move (target, op0,
8321 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8322 op0 = target;
8325 return REDUCE_BIT_FIELD (op0);
8327 case ADDR_SPACE_CONVERT_EXPR:
8329 tree treeop0_type = TREE_TYPE (treeop0);
8330 addr_space_t as_to;
8331 addr_space_t as_from;
8333 gcc_assert (POINTER_TYPE_P (type));
8334 gcc_assert (POINTER_TYPE_P (treeop0_type));
8336 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8337 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8339 /* Conversions between pointers to the same address space should
8340 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8341 gcc_assert (as_to != as_from);
8343 /* Ask target code to handle conversion between pointers
8344 to overlapping address spaces. */
8345 if (targetm.addr_space.subset_p (as_to, as_from)
8346 || targetm.addr_space.subset_p (as_from, as_to))
8348 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8349 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8350 gcc_assert (op0);
8351 return op0;
8354 /* For disjoint address spaces, converting anything but
8355 a null pointer invokes undefined behaviour. We simply
8356 always return a null pointer here. */
8357 return CONST0_RTX (mode);
8360 case POINTER_PLUS_EXPR:
8361 /* Even though the sizetype mode and the pointer's mode can be different
8362 expand is able to handle this correctly and get the correct result out
8363 of the PLUS_EXPR code. */
8364 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8365 if sizetype precision is smaller than pointer precision. */
8366 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8367 treeop1 = fold_convert_loc (loc, type,
8368 fold_convert_loc (loc, ssizetype,
8369 treeop1));
8370 /* If sizetype precision is larger than pointer precision, truncate the
8371 offset to have matching modes. */
8372 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8373 treeop1 = fold_convert_loc (loc, type, treeop1);
8375 case PLUS_EXPR:
8376 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8377 something else, make sure we add the register to the constant and
8378 then to the other thing. This case can occur during strength
8379 reduction and doing it this way will produce better code if the
8380 frame pointer or argument pointer is eliminated.
8382 fold-const.c will ensure that the constant is always in the inner
8383 PLUS_EXPR, so the only case we need to do anything about is if
8384 sp, ap, or fp is our second argument, in which case we must swap
8385 the innermost first argument and our second argument. */
8387 if (TREE_CODE (treeop0) == PLUS_EXPR
8388 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8389 && TREE_CODE (treeop1) == VAR_DECL
8390 && (DECL_RTL (treeop1) == frame_pointer_rtx
8391 || DECL_RTL (treeop1) == stack_pointer_rtx
8392 || DECL_RTL (treeop1) == arg_pointer_rtx))
8394 gcc_unreachable ();
8397 /* If the result is to be ptr_mode and we are adding an integer to
8398 something, we might be forming a constant. So try to use
8399 plus_constant. If it produces a sum and we can't accept it,
8400 use force_operand. This allows P = &ARR[const] to generate
8401 efficient code on machines where a SYMBOL_REF is not a valid
8402 address.
8404 If this is an EXPAND_SUM call, always return the sum. */
8405 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8406 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8408 if (modifier == EXPAND_STACK_PARM)
8409 target = 0;
8410 if (TREE_CODE (treeop0) == INTEGER_CST
8411 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8412 && TREE_CONSTANT (treeop1))
8414 rtx constant_part;
8415 HOST_WIDE_INT wc;
8416 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
8418 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8419 EXPAND_SUM);
8420 /* Use wi::shwi to ensure that the constant is
8421 truncated according to the mode of OP1, then sign extended
8422 to a HOST_WIDE_INT. Using the constant directly can result
8423 in non-canonical RTL in a 64x32 cross compile. */
8424 wc = TREE_INT_CST_LOW (treeop0);
8425 constant_part =
8426 immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8427 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8428 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8429 op1 = force_operand (op1, target);
8430 return REDUCE_BIT_FIELD (op1);
8433 else if (TREE_CODE (treeop1) == INTEGER_CST
8434 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8435 && TREE_CONSTANT (treeop0))
8437 rtx constant_part;
8438 HOST_WIDE_INT wc;
8439 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
8441 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8442 (modifier == EXPAND_INITIALIZER
8443 ? EXPAND_INITIALIZER : EXPAND_SUM));
8444 if (! CONSTANT_P (op0))
8446 op1 = expand_expr (treeop1, NULL_RTX,
8447 VOIDmode, modifier);
8448 /* Return a PLUS if modifier says it's OK. */
8449 if (modifier == EXPAND_SUM
8450 || modifier == EXPAND_INITIALIZER)
8451 return simplify_gen_binary (PLUS, mode, op0, op1);
8452 goto binop2;
8454 /* Use wi::shwi to ensure that the constant is
8455 truncated according to the mode of OP1, then sign extended
8456 to a HOST_WIDE_INT. Using the constant directly can result
8457 in non-canonical RTL in a 64x32 cross compile. */
8458 wc = TREE_INT_CST_LOW (treeop1);
8459 constant_part
8460 = immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8461 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8462 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8463 op0 = force_operand (op0, target);
8464 return REDUCE_BIT_FIELD (op0);
8468 /* Use TER to expand pointer addition of a negated value
8469 as pointer subtraction. */
8470 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8471 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8472 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8473 && TREE_CODE (treeop1) == SSA_NAME
8474 && TYPE_MODE (TREE_TYPE (treeop0))
8475 == TYPE_MODE (TREE_TYPE (treeop1)))
8477 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8478 if (def)
8480 treeop1 = gimple_assign_rhs1 (def);
8481 code = MINUS_EXPR;
8482 goto do_minus;
8486 /* No sense saving up arithmetic to be done
8487 if it's all in the wrong mode to form part of an address.
8488 And force_operand won't know whether to sign-extend or
8489 zero-extend. */
8490 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8491 || mode != ptr_mode)
8493 expand_operands (treeop0, treeop1,
8494 subtarget, &op0, &op1, EXPAND_NORMAL);
8495 if (op0 == const0_rtx)
8496 return op1;
8497 if (op1 == const0_rtx)
8498 return op0;
8499 goto binop2;
8502 expand_operands (treeop0, treeop1,
8503 subtarget, &op0, &op1, modifier);
8504 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8506 case MINUS_EXPR:
8507 do_minus:
8508 /* For initializers, we are allowed to return a MINUS of two
8509 symbolic constants. Here we handle all cases when both operands
8510 are constant. */
8511 /* Handle difference of two symbolic constants,
8512 for the sake of an initializer. */
8513 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8514 && really_constant_p (treeop0)
8515 && really_constant_p (treeop1))
8517 expand_operands (treeop0, treeop1,
8518 NULL_RTX, &op0, &op1, modifier);
8520 /* If the last operand is a CONST_INT, use plus_constant of
8521 the negated constant. Else make the MINUS. */
8522 if (CONST_INT_P (op1))
8523 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8524 -INTVAL (op1)));
8525 else
8526 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8529 /* No sense saving up arithmetic to be done
8530 if it's all in the wrong mode to form part of an address.
8531 And force_operand won't know whether to sign-extend or
8532 zero-extend. */
8533 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8534 || mode != ptr_mode)
8535 goto binop;
8537 expand_operands (treeop0, treeop1,
8538 subtarget, &op0, &op1, modifier);
8540 /* Convert A - const to A + (-const). */
8541 if (CONST_INT_P (op1))
8543 op1 = negate_rtx (mode, op1);
8544 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8547 goto binop2;
8549 case WIDEN_MULT_PLUS_EXPR:
8550 case WIDEN_MULT_MINUS_EXPR:
8551 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8552 op2 = expand_normal (treeop2);
8553 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8554 target, unsignedp);
8555 return target;
8557 case WIDEN_MULT_EXPR:
8558 /* If first operand is constant, swap them.
8559 Thus the following special case checks need only
8560 check the second operand. */
8561 if (TREE_CODE (treeop0) == INTEGER_CST)
8563 tree t1 = treeop0;
8564 treeop0 = treeop1;
8565 treeop1 = t1;
8568 /* First, check if we have a multiplication of one signed and one
8569 unsigned operand. */
8570 if (TREE_CODE (treeop1) != INTEGER_CST
8571 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8572 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8574 machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8575 this_optab = usmul_widen_optab;
8576 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8577 != CODE_FOR_nothing)
8579 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8580 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8581 EXPAND_NORMAL);
8582 else
8583 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8584 EXPAND_NORMAL);
8585 /* op0 and op1 might still be constant, despite the above
8586 != INTEGER_CST check. Handle it. */
8587 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8589 op0 = convert_modes (innermode, mode, op0, true);
8590 op1 = convert_modes (innermode, mode, op1, false);
8591 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8592 target, unsignedp));
8594 goto binop3;
8597 /* Check for a multiplication with matching signedness. */
8598 else if ((TREE_CODE (treeop1) == INTEGER_CST
8599 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8600 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8601 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8603 tree op0type = TREE_TYPE (treeop0);
8604 machine_mode innermode = TYPE_MODE (op0type);
8605 bool zextend_p = TYPE_UNSIGNED (op0type);
8606 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8607 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8609 if (TREE_CODE (treeop0) != INTEGER_CST)
8611 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8612 != CODE_FOR_nothing)
8614 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8615 EXPAND_NORMAL);
8616 /* op0 and op1 might still be constant, despite the above
8617 != INTEGER_CST check. Handle it. */
8618 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8620 widen_mult_const:
8621 op0 = convert_modes (innermode, mode, op0, zextend_p);
8623 = convert_modes (innermode, mode, op1,
8624 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8625 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8626 target,
8627 unsignedp));
8629 temp = expand_widening_mult (mode, op0, op1, target,
8630 unsignedp, this_optab);
8631 return REDUCE_BIT_FIELD (temp);
8633 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8634 != CODE_FOR_nothing
8635 && innermode == word_mode)
8637 rtx htem, hipart;
8638 op0 = expand_normal (treeop0);
8639 if (TREE_CODE (treeop1) == INTEGER_CST)
8640 op1 = convert_modes (innermode, mode,
8641 expand_normal (treeop1),
8642 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8643 else
8644 op1 = expand_normal (treeop1);
8645 /* op0 and op1 might still be constant, despite the above
8646 != INTEGER_CST check. Handle it. */
8647 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8648 goto widen_mult_const;
8649 temp = expand_binop (mode, other_optab, op0, op1, target,
8650 unsignedp, OPTAB_LIB_WIDEN);
8651 hipart = gen_highpart (innermode, temp);
8652 htem = expand_mult_highpart_adjust (innermode, hipart,
8653 op0, op1, hipart,
8654 zextend_p);
8655 if (htem != hipart)
8656 emit_move_insn (hipart, htem);
8657 return REDUCE_BIT_FIELD (temp);
8661 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8662 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8663 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8664 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8666 case FMA_EXPR:
8668 optab opt = fma_optab;
8669 gimple def0, def2;
8671 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8672 call. */
8673 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8675 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8676 tree call_expr;
8678 gcc_assert (fn != NULL_TREE);
8679 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8680 return expand_builtin (call_expr, target, subtarget, mode, false);
8683 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8684 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8686 op0 = op2 = NULL;
8688 if (def0 && def2
8689 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8691 opt = fnms_optab;
8692 op0 = expand_normal (gimple_assign_rhs1 (def0));
8693 op2 = expand_normal (gimple_assign_rhs1 (def2));
8695 else if (def0
8696 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8698 opt = fnma_optab;
8699 op0 = expand_normal (gimple_assign_rhs1 (def0));
8701 else if (def2
8702 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8704 opt = fms_optab;
8705 op2 = expand_normal (gimple_assign_rhs1 (def2));
8708 if (op0 == NULL)
8709 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8710 if (op2 == NULL)
8711 op2 = expand_normal (treeop2);
8712 op1 = expand_normal (treeop1);
8714 return expand_ternary_op (TYPE_MODE (type), opt,
8715 op0, op1, op2, target, 0);
8718 case MULT_EXPR:
8719 /* If this is a fixed-point operation, then we cannot use the code
8720 below because "expand_mult" doesn't support sat/no-sat fixed-point
8721 multiplications. */
8722 if (ALL_FIXED_POINT_MODE_P (mode))
8723 goto binop;
8725 /* If first operand is constant, swap them.
8726 Thus the following special case checks need only
8727 check the second operand. */
8728 if (TREE_CODE (treeop0) == INTEGER_CST)
8730 tree t1 = treeop0;
8731 treeop0 = treeop1;
8732 treeop1 = t1;
8735 /* Attempt to return something suitable for generating an
8736 indexed address, for machines that support that. */
8738 if (modifier == EXPAND_SUM && mode == ptr_mode
8739 && tree_fits_shwi_p (treeop1))
8741 tree exp1 = treeop1;
8743 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8744 EXPAND_SUM);
8746 if (!REG_P (op0))
8747 op0 = force_operand (op0, NULL_RTX);
8748 if (!REG_P (op0))
8749 op0 = copy_to_mode_reg (mode, op0);
8751 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8752 gen_int_mode (tree_to_shwi (exp1),
8753 TYPE_MODE (TREE_TYPE (exp1)))));
8756 if (modifier == EXPAND_STACK_PARM)
8757 target = 0;
8759 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8760 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8762 case TRUNC_DIV_EXPR:
8763 case FLOOR_DIV_EXPR:
8764 case CEIL_DIV_EXPR:
8765 case ROUND_DIV_EXPR:
8766 case EXACT_DIV_EXPR:
8767 /* If this is a fixed-point operation, then we cannot use the code
8768 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8769 divisions. */
8770 if (ALL_FIXED_POINT_MODE_P (mode))
8771 goto binop;
8773 if (modifier == EXPAND_STACK_PARM)
8774 target = 0;
8775 /* Possible optimization: compute the dividend with EXPAND_SUM
8776 then if the divisor is constant can optimize the case
8777 where some terms of the dividend have coeffs divisible by it. */
8778 expand_operands (treeop0, treeop1,
8779 subtarget, &op0, &op1, EXPAND_NORMAL);
8780 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8782 case RDIV_EXPR:
8783 goto binop;
8785 case MULT_HIGHPART_EXPR:
8786 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8787 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8788 gcc_assert (temp);
8789 return temp;
8791 case TRUNC_MOD_EXPR:
8792 case FLOOR_MOD_EXPR:
8793 case CEIL_MOD_EXPR:
8794 case ROUND_MOD_EXPR:
8795 if (modifier == EXPAND_STACK_PARM)
8796 target = 0;
8797 expand_operands (treeop0, treeop1,
8798 subtarget, &op0, &op1, EXPAND_NORMAL);
8799 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8801 case FIXED_CONVERT_EXPR:
8802 op0 = expand_normal (treeop0);
8803 if (target == 0 || modifier == EXPAND_STACK_PARM)
8804 target = gen_reg_rtx (mode);
8806 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8807 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8808 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8809 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8810 else
8811 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8812 return target;
8814 case FIX_TRUNC_EXPR:
8815 op0 = expand_normal (treeop0);
8816 if (target == 0 || modifier == EXPAND_STACK_PARM)
8817 target = gen_reg_rtx (mode);
8818 expand_fix (target, op0, unsignedp);
8819 return target;
8821 case FLOAT_EXPR:
8822 op0 = expand_normal (treeop0);
8823 if (target == 0 || modifier == EXPAND_STACK_PARM)
8824 target = gen_reg_rtx (mode);
8825 /* expand_float can't figure out what to do if FROM has VOIDmode.
8826 So give it the correct mode. With -O, cse will optimize this. */
8827 if (GET_MODE (op0) == VOIDmode)
8828 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8829 op0);
8830 expand_float (target, op0,
8831 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8832 return target;
8834 case NEGATE_EXPR:
8835 op0 = expand_expr (treeop0, subtarget,
8836 VOIDmode, EXPAND_NORMAL);
8837 if (modifier == EXPAND_STACK_PARM)
8838 target = 0;
8839 temp = expand_unop (mode,
8840 optab_for_tree_code (NEGATE_EXPR, type,
8841 optab_default),
8842 op0, target, 0);
8843 gcc_assert (temp);
8844 return REDUCE_BIT_FIELD (temp);
8846 case ABS_EXPR:
8847 op0 = expand_expr (treeop0, subtarget,
8848 VOIDmode, EXPAND_NORMAL);
8849 if (modifier == EXPAND_STACK_PARM)
8850 target = 0;
8852 /* ABS_EXPR is not valid for complex arguments. */
8853 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8854 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8856 /* Unsigned abs is simply the operand. Testing here means we don't
8857 risk generating incorrect code below. */
8858 if (TYPE_UNSIGNED (type))
8859 return op0;
8861 return expand_abs (mode, op0, target, unsignedp,
8862 safe_from_p (target, treeop0, 1));
8864 case MAX_EXPR:
8865 case MIN_EXPR:
8866 target = original_target;
8867 if (target == 0
8868 || modifier == EXPAND_STACK_PARM
8869 || (MEM_P (target) && MEM_VOLATILE_P (target))
8870 || GET_MODE (target) != mode
8871 || (REG_P (target)
8872 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8873 target = gen_reg_rtx (mode);
8874 expand_operands (treeop0, treeop1,
8875 target, &op0, &op1, EXPAND_NORMAL);
8877 /* First try to do it with a special MIN or MAX instruction.
8878 If that does not win, use a conditional jump to select the proper
8879 value. */
8880 this_optab = optab_for_tree_code (code, type, optab_default);
8881 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8882 OPTAB_WIDEN);
8883 if (temp != 0)
8884 return temp;
8886 /* At this point, a MEM target is no longer useful; we will get better
8887 code without it. */
8889 if (! REG_P (target))
8890 target = gen_reg_rtx (mode);
8892 /* If op1 was placed in target, swap op0 and op1. */
8893 if (target != op0 && target == op1)
8895 temp = op0;
8896 op0 = op1;
8897 op1 = temp;
8900 /* We generate better code and avoid problems with op1 mentioning
8901 target by forcing op1 into a pseudo if it isn't a constant. */
8902 if (! CONSTANT_P (op1))
8903 op1 = force_reg (mode, op1);
8906 enum rtx_code comparison_code;
8907 rtx cmpop1 = op1;
8909 if (code == MAX_EXPR)
8910 comparison_code = unsignedp ? GEU : GE;
8911 else
8912 comparison_code = unsignedp ? LEU : LE;
8914 /* Canonicalize to comparisons against 0. */
8915 if (op1 == const1_rtx)
8917 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8918 or (a != 0 ? a : 1) for unsigned.
8919 For MIN we are safe converting (a <= 1 ? a : 1)
8920 into (a <= 0 ? a : 1) */
8921 cmpop1 = const0_rtx;
8922 if (code == MAX_EXPR)
8923 comparison_code = unsignedp ? NE : GT;
8925 if (op1 == constm1_rtx && !unsignedp)
8927 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8928 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8929 cmpop1 = const0_rtx;
8930 if (code == MIN_EXPR)
8931 comparison_code = LT;
8933 #ifdef HAVE_conditional_move
8934 /* Use a conditional move if possible. */
8935 if (can_conditionally_move_p (mode))
8937 rtx insn;
8939 start_sequence ();
8941 /* Try to emit the conditional move. */
8942 insn = emit_conditional_move (target, comparison_code,
8943 op0, cmpop1, mode,
8944 op0, op1, mode,
8945 unsignedp);
8947 /* If we could do the conditional move, emit the sequence,
8948 and return. */
8949 if (insn)
8951 rtx_insn *seq = get_insns ();
8952 end_sequence ();
8953 emit_insn (seq);
8954 return target;
8957 /* Otherwise discard the sequence and fall back to code with
8958 branches. */
8959 end_sequence ();
8961 #endif
8962 if (target != op0)
8963 emit_move_insn (target, op0);
8965 temp = gen_label_rtx ();
8966 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8967 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8968 -1);
8970 emit_move_insn (target, op1);
8971 emit_label (temp);
8972 return target;
8974 case BIT_NOT_EXPR:
8975 op0 = expand_expr (treeop0, subtarget,
8976 VOIDmode, EXPAND_NORMAL);
8977 if (modifier == EXPAND_STACK_PARM)
8978 target = 0;
8979 /* In case we have to reduce the result to bitfield precision
8980 for unsigned bitfield expand this as XOR with a proper constant
8981 instead. */
8982 if (reduce_bit_field && TYPE_UNSIGNED (type))
8984 wide_int mask = wi::mask (TYPE_PRECISION (type),
8985 false, GET_MODE_PRECISION (mode));
8987 temp = expand_binop (mode, xor_optab, op0,
8988 immed_wide_int_const (mask, mode),
8989 target, 1, OPTAB_LIB_WIDEN);
8991 else
8992 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8993 gcc_assert (temp);
8994 return temp;
8996 /* ??? Can optimize bitwise operations with one arg constant.
8997 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8998 and (a bitwise1 b) bitwise2 b (etc)
8999 but that is probably not worth while. */
9001 case BIT_AND_EXPR:
9002 case BIT_IOR_EXPR:
9003 case BIT_XOR_EXPR:
9004 goto binop;
9006 case LROTATE_EXPR:
9007 case RROTATE_EXPR:
9008 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
9009 || (GET_MODE_PRECISION (TYPE_MODE (type))
9010 == TYPE_PRECISION (type)));
9011 /* fall through */
9013 case LSHIFT_EXPR:
9014 case RSHIFT_EXPR:
9015 /* If this is a fixed-point operation, then we cannot use the code
9016 below because "expand_shift" doesn't support sat/no-sat fixed-point
9017 shifts. */
9018 if (ALL_FIXED_POINT_MODE_P (mode))
9019 goto binop;
9021 if (! safe_from_p (subtarget, treeop1, 1))
9022 subtarget = 0;
9023 if (modifier == EXPAND_STACK_PARM)
9024 target = 0;
9025 op0 = expand_expr (treeop0, subtarget,
9026 VOIDmode, EXPAND_NORMAL);
9027 temp = expand_variable_shift (code, mode, op0, treeop1, target,
9028 unsignedp);
9029 if (code == LSHIFT_EXPR)
9030 temp = REDUCE_BIT_FIELD (temp);
9031 return temp;
9033 /* Could determine the answer when only additive constants differ. Also,
9034 the addition of one can be handled by changing the condition. */
9035 case LT_EXPR:
9036 case LE_EXPR:
9037 case GT_EXPR:
9038 case GE_EXPR:
9039 case EQ_EXPR:
9040 case NE_EXPR:
9041 case UNORDERED_EXPR:
9042 case ORDERED_EXPR:
9043 case UNLT_EXPR:
9044 case UNLE_EXPR:
9045 case UNGT_EXPR:
9046 case UNGE_EXPR:
9047 case UNEQ_EXPR:
9048 case LTGT_EXPR:
9049 temp = do_store_flag (ops,
9050 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
9051 tmode != VOIDmode ? tmode : mode);
9052 if (temp)
9053 return temp;
9055 /* Use a compare and a jump for BLKmode comparisons, or for function
9056 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
9058 if ((target == 0
9059 || modifier == EXPAND_STACK_PARM
9060 || ! safe_from_p (target, treeop0, 1)
9061 || ! safe_from_p (target, treeop1, 1)
9062 /* Make sure we don't have a hard reg (such as function's return
9063 value) live across basic blocks, if not optimizing. */
9064 || (!optimize && REG_P (target)
9065 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9066 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9068 emit_move_insn (target, const0_rtx);
9070 op1 = gen_label_rtx ();
9071 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
9073 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
9074 emit_move_insn (target, constm1_rtx);
9075 else
9076 emit_move_insn (target, const1_rtx);
9078 emit_label (op1);
9079 return target;
9081 case COMPLEX_EXPR:
9082 /* Get the rtx code of the operands. */
9083 op0 = expand_normal (treeop0);
9084 op1 = expand_normal (treeop1);
9086 if (!target)
9087 target = gen_reg_rtx (TYPE_MODE (type));
9088 else
9089 /* If target overlaps with op1, then either we need to force
9090 op1 into a pseudo (if target also overlaps with op0),
9091 or write the complex parts in reverse order. */
9092 switch (GET_CODE (target))
9094 case CONCAT:
9095 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
9097 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
9099 complex_expr_force_op1:
9100 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
9101 emit_move_insn (temp, op1);
9102 op1 = temp;
9103 break;
9105 complex_expr_swap_order:
9106 /* Move the imaginary (op1) and real (op0) parts to their
9107 location. */
9108 write_complex_part (target, op1, true);
9109 write_complex_part (target, op0, false);
9111 return target;
9113 break;
9114 case MEM:
9115 temp = adjust_address_nv (target,
9116 GET_MODE_INNER (GET_MODE (target)), 0);
9117 if (reg_overlap_mentioned_p (temp, op1))
9119 machine_mode imode = GET_MODE_INNER (GET_MODE (target));
9120 temp = adjust_address_nv (target, imode,
9121 GET_MODE_SIZE (imode));
9122 if (reg_overlap_mentioned_p (temp, op0))
9123 goto complex_expr_force_op1;
9124 goto complex_expr_swap_order;
9126 break;
9127 default:
9128 if (reg_overlap_mentioned_p (target, op1))
9130 if (reg_overlap_mentioned_p (target, op0))
9131 goto complex_expr_force_op1;
9132 goto complex_expr_swap_order;
9134 break;
9137 /* Move the real (op0) and imaginary (op1) parts to their location. */
9138 write_complex_part (target, op0, false);
9139 write_complex_part (target, op1, true);
9141 return target;
9143 case WIDEN_SUM_EXPR:
9145 tree oprnd0 = treeop0;
9146 tree oprnd1 = treeop1;
9148 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9149 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
9150 target, unsignedp);
9151 return target;
9154 case REDUC_MAX_EXPR:
9155 case REDUC_MIN_EXPR:
9156 case REDUC_PLUS_EXPR:
9158 op0 = expand_normal (treeop0);
9159 this_optab = optab_for_tree_code (code, type, optab_default);
9160 machine_mode vec_mode = TYPE_MODE (TREE_TYPE (treeop0));
9162 if (optab_handler (this_optab, vec_mode) != CODE_FOR_nothing)
9164 struct expand_operand ops[2];
9165 enum insn_code icode = optab_handler (this_optab, vec_mode);
9167 create_output_operand (&ops[0], target, mode);
9168 create_input_operand (&ops[1], op0, vec_mode);
9169 if (maybe_expand_insn (icode, 2, ops))
9171 target = ops[0].value;
9172 if (GET_MODE (target) != mode)
9173 return gen_lowpart (tmode, target);
9174 return target;
9177 /* Fall back to optab with vector result, and then extract scalar. */
9178 this_optab = scalar_reduc_to_vector (this_optab, type);
9179 temp = expand_unop (vec_mode, this_optab, op0, NULL_RTX, unsignedp);
9180 gcc_assert (temp);
9181 /* The tree code produces a scalar result, but (somewhat by convention)
9182 the optab produces a vector with the result in element 0 if
9183 little-endian, or element N-1 if big-endian. So pull the scalar
9184 result out of that element. */
9185 int index = BYTES_BIG_ENDIAN ? GET_MODE_NUNITS (vec_mode) - 1 : 0;
9186 int bitsize = GET_MODE_BITSIZE (GET_MODE_INNER (vec_mode));
9187 temp = extract_bit_field (temp, bitsize, bitsize * index, unsignedp,
9188 target, mode, mode, false);
9189 gcc_assert (temp);
9190 return temp;
9193 case VEC_RSHIFT_EXPR:
9195 target = expand_vec_shift_expr (ops, target);
9196 return target;
9199 case VEC_UNPACK_HI_EXPR:
9200 case VEC_UNPACK_LO_EXPR:
9202 op0 = expand_normal (treeop0);
9203 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9204 target, unsignedp);
9205 gcc_assert (temp);
9206 return temp;
9209 case VEC_UNPACK_FLOAT_HI_EXPR:
9210 case VEC_UNPACK_FLOAT_LO_EXPR:
9212 op0 = expand_normal (treeop0);
9213 /* The signedness is determined from input operand. */
9214 temp = expand_widen_pattern_expr
9215 (ops, op0, NULL_RTX, NULL_RTX,
9216 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9218 gcc_assert (temp);
9219 return temp;
9222 case VEC_WIDEN_MULT_HI_EXPR:
9223 case VEC_WIDEN_MULT_LO_EXPR:
9224 case VEC_WIDEN_MULT_EVEN_EXPR:
9225 case VEC_WIDEN_MULT_ODD_EXPR:
9226 case VEC_WIDEN_LSHIFT_HI_EXPR:
9227 case VEC_WIDEN_LSHIFT_LO_EXPR:
9228 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9229 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9230 target, unsignedp);
9231 gcc_assert (target);
9232 return target;
9234 case VEC_PACK_TRUNC_EXPR:
9235 case VEC_PACK_SAT_EXPR:
9236 case VEC_PACK_FIX_TRUNC_EXPR:
9237 mode = TYPE_MODE (TREE_TYPE (treeop0));
9238 goto binop;
9240 case VEC_PERM_EXPR:
9241 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9242 op2 = expand_normal (treeop2);
9244 /* Careful here: if the target doesn't support integral vector modes,
9245 a constant selection vector could wind up smooshed into a normal
9246 integral constant. */
9247 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9249 tree sel_type = TREE_TYPE (treeop2);
9250 machine_mode vmode
9251 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9252 TYPE_VECTOR_SUBPARTS (sel_type));
9253 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9254 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9255 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9257 else
9258 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9260 temp = expand_vec_perm (mode, op0, op1, op2, target);
9261 gcc_assert (temp);
9262 return temp;
9264 case DOT_PROD_EXPR:
9266 tree oprnd0 = treeop0;
9267 tree oprnd1 = treeop1;
9268 tree oprnd2 = treeop2;
9269 rtx op2;
9271 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9272 op2 = expand_normal (oprnd2);
9273 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9274 target, unsignedp);
9275 return target;
9278 case SAD_EXPR:
9280 tree oprnd0 = treeop0;
9281 tree oprnd1 = treeop1;
9282 tree oprnd2 = treeop2;
9283 rtx op2;
9285 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9286 op2 = expand_normal (oprnd2);
9287 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9288 target, unsignedp);
9289 return target;
9292 case REALIGN_LOAD_EXPR:
9294 tree oprnd0 = treeop0;
9295 tree oprnd1 = treeop1;
9296 tree oprnd2 = treeop2;
9297 rtx op2;
9299 this_optab = optab_for_tree_code (code, type, optab_default);
9300 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9301 op2 = expand_normal (oprnd2);
9302 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9303 target, unsignedp);
9304 gcc_assert (temp);
9305 return temp;
9308 case COND_EXPR:
9309 /* A COND_EXPR with its type being VOID_TYPE represents a
9310 conditional jump and is handled in
9311 expand_gimple_cond_expr. */
9312 gcc_assert (!VOID_TYPE_P (type));
9314 /* Note that COND_EXPRs whose type is a structure or union
9315 are required to be constructed to contain assignments of
9316 a temporary variable, so that we can evaluate them here
9317 for side effect only. If type is void, we must do likewise. */
9319 gcc_assert (!TREE_ADDRESSABLE (type)
9320 && !ignore
9321 && TREE_TYPE (treeop1) != void_type_node
9322 && TREE_TYPE (treeop2) != void_type_node);
9324 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9325 if (temp)
9326 return temp;
9328 /* If we are not to produce a result, we have no target. Otherwise,
9329 if a target was specified use it; it will not be used as an
9330 intermediate target unless it is safe. If no target, use a
9331 temporary. */
9333 if (modifier != EXPAND_STACK_PARM
9334 && original_target
9335 && safe_from_p (original_target, treeop0, 1)
9336 && GET_MODE (original_target) == mode
9337 && !MEM_P (original_target))
9338 temp = original_target;
9339 else
9340 temp = assign_temp (type, 0, 1);
9342 do_pending_stack_adjust ();
9343 NO_DEFER_POP;
9344 op0 = gen_label_rtx ();
9345 op1 = gen_label_rtx ();
9346 jumpifnot (treeop0, op0, -1);
9347 store_expr (treeop1, temp,
9348 modifier == EXPAND_STACK_PARM,
9349 false, false);
9351 emit_jump_insn (gen_jump (op1));
9352 emit_barrier ();
9353 emit_label (op0);
9354 store_expr (treeop2, temp,
9355 modifier == EXPAND_STACK_PARM,
9356 false, false);
9358 emit_label (op1);
9359 OK_DEFER_POP;
9360 return temp;
9362 case VEC_COND_EXPR:
9363 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9364 return target;
9366 default:
9367 gcc_unreachable ();
9370 /* Here to do an ordinary binary operator. */
9371 binop:
9372 expand_operands (treeop0, treeop1,
9373 subtarget, &op0, &op1, EXPAND_NORMAL);
9374 binop2:
9375 this_optab = optab_for_tree_code (code, type, optab_default);
9376 binop3:
9377 if (modifier == EXPAND_STACK_PARM)
9378 target = 0;
9379 temp = expand_binop (mode, this_optab, op0, op1, target,
9380 unsignedp, OPTAB_LIB_WIDEN);
9381 gcc_assert (temp);
9382 /* Bitwise operations do not need bitfield reduction as we expect their
9383 operands being properly truncated. */
9384 if (code == BIT_XOR_EXPR
9385 || code == BIT_AND_EXPR
9386 || code == BIT_IOR_EXPR)
9387 return temp;
9388 return REDUCE_BIT_FIELD (temp);
9390 #undef REDUCE_BIT_FIELD
9393 /* Return TRUE if expression STMT is suitable for replacement.
9394 Never consider memory loads as replaceable, because those don't ever lead
9395 into constant expressions. */
9397 static bool
9398 stmt_is_replaceable_p (gimple stmt)
9400 if (ssa_is_replaceable_p (stmt))
9402 /* Don't move around loads. */
9403 if (!gimple_assign_single_p (stmt)
9404 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9405 return true;
9407 return false;
9411 expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
9412 enum expand_modifier modifier, rtx *alt_rtl,
9413 bool inner_reference_p)
9415 rtx op0, op1, temp, decl_rtl;
9416 tree type;
9417 int unsignedp;
9418 machine_mode mode;
9419 enum tree_code code = TREE_CODE (exp);
9420 rtx subtarget, original_target;
9421 int ignore;
9422 tree context;
9423 bool reduce_bit_field;
9424 location_t loc = EXPR_LOCATION (exp);
9425 struct separate_ops ops;
9426 tree treeop0, treeop1, treeop2;
9427 tree ssa_name = NULL_TREE;
9428 gimple g;
9430 type = TREE_TYPE (exp);
9431 mode = TYPE_MODE (type);
9432 unsignedp = TYPE_UNSIGNED (type);
9434 treeop0 = treeop1 = treeop2 = NULL_TREE;
9435 if (!VL_EXP_CLASS_P (exp))
9436 switch (TREE_CODE_LENGTH (code))
9438 default:
9439 case 3: treeop2 = TREE_OPERAND (exp, 2);
9440 case 2: treeop1 = TREE_OPERAND (exp, 1);
9441 case 1: treeop0 = TREE_OPERAND (exp, 0);
9442 case 0: break;
9444 ops.code = code;
9445 ops.type = type;
9446 ops.op0 = treeop0;
9447 ops.op1 = treeop1;
9448 ops.op2 = treeop2;
9449 ops.location = loc;
9451 ignore = (target == const0_rtx
9452 || ((CONVERT_EXPR_CODE_P (code)
9453 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9454 && TREE_CODE (type) == VOID_TYPE));
9456 /* An operation in what may be a bit-field type needs the
9457 result to be reduced to the precision of the bit-field type,
9458 which is narrower than that of the type's mode. */
9459 reduce_bit_field = (!ignore
9460 && INTEGRAL_TYPE_P (type)
9461 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9463 /* If we are going to ignore this result, we need only do something
9464 if there is a side-effect somewhere in the expression. If there
9465 is, short-circuit the most common cases here. Note that we must
9466 not call expand_expr with anything but const0_rtx in case this
9467 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9469 if (ignore)
9471 if (! TREE_SIDE_EFFECTS (exp))
9472 return const0_rtx;
9474 /* Ensure we reference a volatile object even if value is ignored, but
9475 don't do this if all we are doing is taking its address. */
9476 if (TREE_THIS_VOLATILE (exp)
9477 && TREE_CODE (exp) != FUNCTION_DECL
9478 && mode != VOIDmode && mode != BLKmode
9479 && modifier != EXPAND_CONST_ADDRESS)
9481 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9482 if (MEM_P (temp))
9483 copy_to_reg (temp);
9484 return const0_rtx;
9487 if (TREE_CODE_CLASS (code) == tcc_unary
9488 || code == BIT_FIELD_REF
9489 || code == COMPONENT_REF
9490 || code == INDIRECT_REF)
9491 return expand_expr (treeop0, const0_rtx, VOIDmode,
9492 modifier);
9494 else if (TREE_CODE_CLASS (code) == tcc_binary
9495 || TREE_CODE_CLASS (code) == tcc_comparison
9496 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9498 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9499 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9500 return const0_rtx;
9503 target = 0;
9506 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9507 target = 0;
9509 /* Use subtarget as the target for operand 0 of a binary operation. */
9510 subtarget = get_subtarget (target);
9511 original_target = target;
9513 switch (code)
9515 case LABEL_DECL:
9517 tree function = decl_function_context (exp);
9519 temp = label_rtx (exp);
9520 temp = gen_rtx_LABEL_REF (Pmode, temp);
9522 if (function != current_function_decl
9523 && function != 0)
9524 LABEL_REF_NONLOCAL_P (temp) = 1;
9526 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9527 return temp;
9530 case SSA_NAME:
9531 /* ??? ivopts calls expander, without any preparation from
9532 out-of-ssa. So fake instructions as if this was an access to the
9533 base variable. This unnecessarily allocates a pseudo, see how we can
9534 reuse it, if partition base vars have it set already. */
9535 if (!currently_expanding_to_rtl)
9537 tree var = SSA_NAME_VAR (exp);
9538 if (var && DECL_RTL_SET_P (var))
9539 return DECL_RTL (var);
9540 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9541 LAST_VIRTUAL_REGISTER + 1);
9544 g = get_gimple_for_ssa_name (exp);
9545 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9546 if (g == NULL
9547 && modifier == EXPAND_INITIALIZER
9548 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9549 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9550 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9551 g = SSA_NAME_DEF_STMT (exp);
9552 if (g)
9554 rtx r;
9555 ops.code = gimple_assign_rhs_code (g);
9556 switch (get_gimple_rhs_class (ops.code))
9558 case GIMPLE_TERNARY_RHS:
9559 ops.op2 = gimple_assign_rhs3 (g);
9560 /* Fallthru */
9561 case GIMPLE_BINARY_RHS:
9562 ops.op1 = gimple_assign_rhs2 (g);
9563 /* Fallthru */
9564 case GIMPLE_UNARY_RHS:
9565 ops.op0 = gimple_assign_rhs1 (g);
9566 ops.type = TREE_TYPE (gimple_assign_lhs (g));
9567 ops.location = gimple_location (g);
9568 r = expand_expr_real_2 (&ops, target, tmode, modifier);
9569 break;
9570 case GIMPLE_SINGLE_RHS:
9572 location_t saved_loc = curr_insn_location ();
9573 set_curr_insn_location (gimple_location (g));
9574 r = expand_expr_real (gimple_assign_rhs1 (g), target,
9575 tmode, modifier, NULL, inner_reference_p);
9576 set_curr_insn_location (saved_loc);
9577 break;
9579 default:
9580 gcc_unreachable ();
9582 if (REG_P (r) && !REG_EXPR (r))
9583 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9584 return r;
9587 ssa_name = exp;
9588 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9589 exp = SSA_NAME_VAR (ssa_name);
9590 goto expand_decl_rtl;
9592 case PARM_DECL:
9593 case VAR_DECL:
9594 /* If a static var's type was incomplete when the decl was written,
9595 but the type is complete now, lay out the decl now. */
9596 if (DECL_SIZE (exp) == 0
9597 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9598 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9599 layout_decl (exp, 0);
9601 /* ... fall through ... */
9603 case FUNCTION_DECL:
9604 case RESULT_DECL:
9605 decl_rtl = DECL_RTL (exp);
9606 expand_decl_rtl:
9607 gcc_assert (decl_rtl);
9608 decl_rtl = copy_rtx (decl_rtl);
9609 /* Record writes to register variables. */
9610 if (modifier == EXPAND_WRITE
9611 && REG_P (decl_rtl)
9612 && HARD_REGISTER_P (decl_rtl))
9613 add_to_hard_reg_set (&crtl->asm_clobbers,
9614 GET_MODE (decl_rtl), REGNO (decl_rtl));
9616 /* Ensure variable marked as used even if it doesn't go through
9617 a parser. If it hasn't be used yet, write out an external
9618 definition. */
9619 TREE_USED (exp) = 1;
9621 /* Show we haven't gotten RTL for this yet. */
9622 temp = 0;
9624 /* Variables inherited from containing functions should have
9625 been lowered by this point. */
9626 context = decl_function_context (exp);
9627 gcc_assert (SCOPE_FILE_SCOPE_P (context)
9628 || context == current_function_decl
9629 || TREE_STATIC (exp)
9630 || DECL_EXTERNAL (exp)
9631 /* ??? C++ creates functions that are not TREE_STATIC. */
9632 || TREE_CODE (exp) == FUNCTION_DECL);
9634 /* This is the case of an array whose size is to be determined
9635 from its initializer, while the initializer is still being parsed.
9636 ??? We aren't parsing while expanding anymore. */
9638 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9639 temp = validize_mem (decl_rtl);
9641 /* If DECL_RTL is memory, we are in the normal case and the
9642 address is not valid, get the address into a register. */
9644 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9646 if (alt_rtl)
9647 *alt_rtl = decl_rtl;
9648 decl_rtl = use_anchored_address (decl_rtl);
9649 if (modifier != EXPAND_CONST_ADDRESS
9650 && modifier != EXPAND_SUM
9651 && !memory_address_addr_space_p (DECL_MODE (exp),
9652 XEXP (decl_rtl, 0),
9653 MEM_ADDR_SPACE (decl_rtl)))
9654 temp = replace_equiv_address (decl_rtl,
9655 copy_rtx (XEXP (decl_rtl, 0)));
9658 /* If we got something, return it. But first, set the alignment
9659 if the address is a register. */
9660 if (temp != 0)
9662 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9663 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9665 return temp;
9668 /* If the mode of DECL_RTL does not match that of the decl,
9669 there are two cases: we are dealing with a BLKmode value
9670 that is returned in a register, or we are dealing with
9671 a promoted value. In the latter case, return a SUBREG
9672 of the wanted mode, but mark it so that we know that it
9673 was already extended. */
9674 if (REG_P (decl_rtl)
9675 && DECL_MODE (exp) != BLKmode
9676 && GET_MODE (decl_rtl) != DECL_MODE (exp))
9678 machine_mode pmode;
9680 /* Get the signedness to be used for this variable. Ensure we get
9681 the same mode we got when the variable was declared. */
9682 if (code == SSA_NAME
9683 && (g = SSA_NAME_DEF_STMT (ssa_name))
9684 && gimple_code (g) == GIMPLE_CALL
9685 && !gimple_call_internal_p (g))
9686 pmode = promote_function_mode (type, mode, &unsignedp,
9687 gimple_call_fntype (g),
9689 else
9690 pmode = promote_decl_mode (exp, &unsignedp);
9691 gcc_assert (GET_MODE (decl_rtl) == pmode);
9693 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9694 SUBREG_PROMOTED_VAR_P (temp) = 1;
9695 SUBREG_PROMOTED_SET (temp, unsignedp);
9696 return temp;
9699 return decl_rtl;
9701 case INTEGER_CST:
9702 /* Given that TYPE_PRECISION (type) is not always equal to
9703 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9704 the former to the latter according to the signedness of the
9705 type. */
9706 temp = immed_wide_int_const (wide_int::from
9707 (exp,
9708 GET_MODE_PRECISION (TYPE_MODE (type)),
9709 TYPE_SIGN (type)),
9710 TYPE_MODE (type));
9711 return temp;
9713 case VECTOR_CST:
9715 tree tmp = NULL_TREE;
9716 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9717 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9718 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9719 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9720 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9721 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9722 return const_vector_from_tree (exp);
9723 if (GET_MODE_CLASS (mode) == MODE_INT)
9725 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9726 if (type_for_mode)
9727 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9729 if (!tmp)
9731 vec<constructor_elt, va_gc> *v;
9732 unsigned i;
9733 vec_alloc (v, VECTOR_CST_NELTS (exp));
9734 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9735 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9736 tmp = build_constructor (type, v);
9738 return expand_expr (tmp, ignore ? const0_rtx : target,
9739 tmode, modifier);
9742 case CONST_DECL:
9743 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9745 case REAL_CST:
9746 /* If optimized, generate immediate CONST_DOUBLE
9747 which will be turned into memory by reload if necessary.
9749 We used to force a register so that loop.c could see it. But
9750 this does not allow gen_* patterns to perform optimizations with
9751 the constants. It also produces two insns in cases like "x = 1.0;".
9752 On most machines, floating-point constants are not permitted in
9753 many insns, so we'd end up copying it to a register in any case.
9755 Now, we do the copying in expand_binop, if appropriate. */
9756 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9757 TYPE_MODE (TREE_TYPE (exp)));
9759 case FIXED_CST:
9760 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9761 TYPE_MODE (TREE_TYPE (exp)));
9763 case COMPLEX_CST:
9764 /* Handle evaluating a complex constant in a CONCAT target. */
9765 if (original_target && GET_CODE (original_target) == CONCAT)
9767 machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9768 rtx rtarg, itarg;
9770 rtarg = XEXP (original_target, 0);
9771 itarg = XEXP (original_target, 1);
9773 /* Move the real and imaginary parts separately. */
9774 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9775 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9777 if (op0 != rtarg)
9778 emit_move_insn (rtarg, op0);
9779 if (op1 != itarg)
9780 emit_move_insn (itarg, op1);
9782 return original_target;
9785 /* ... fall through ... */
9787 case STRING_CST:
9788 temp = expand_expr_constant (exp, 1, modifier);
9790 /* temp contains a constant address.
9791 On RISC machines where a constant address isn't valid,
9792 make some insns to get that address into a register. */
9793 if (modifier != EXPAND_CONST_ADDRESS
9794 && modifier != EXPAND_INITIALIZER
9795 && modifier != EXPAND_SUM
9796 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9797 MEM_ADDR_SPACE (temp)))
9798 return replace_equiv_address (temp,
9799 copy_rtx (XEXP (temp, 0)));
9800 return temp;
9802 case SAVE_EXPR:
9804 tree val = treeop0;
9805 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
9806 inner_reference_p);
9808 if (!SAVE_EXPR_RESOLVED_P (exp))
9810 /* We can indeed still hit this case, typically via builtin
9811 expanders calling save_expr immediately before expanding
9812 something. Assume this means that we only have to deal
9813 with non-BLKmode values. */
9814 gcc_assert (GET_MODE (ret) != BLKmode);
9816 val = build_decl (curr_insn_location (),
9817 VAR_DECL, NULL, TREE_TYPE (exp));
9818 DECL_ARTIFICIAL (val) = 1;
9819 DECL_IGNORED_P (val) = 1;
9820 treeop0 = val;
9821 TREE_OPERAND (exp, 0) = treeop0;
9822 SAVE_EXPR_RESOLVED_P (exp) = 1;
9824 if (!CONSTANT_P (ret))
9825 ret = copy_to_reg (ret);
9826 SET_DECL_RTL (val, ret);
9829 return ret;
9833 case CONSTRUCTOR:
9834 /* If we don't need the result, just ensure we evaluate any
9835 subexpressions. */
9836 if (ignore)
9838 unsigned HOST_WIDE_INT idx;
9839 tree value;
9841 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9842 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9844 return const0_rtx;
9847 return expand_constructor (exp, target, modifier, false);
9849 case TARGET_MEM_REF:
9851 addr_space_t as
9852 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9853 enum insn_code icode;
9854 unsigned int align;
9856 op0 = addr_for_mem_ref (exp, as, true);
9857 op0 = memory_address_addr_space (mode, op0, as);
9858 temp = gen_rtx_MEM (mode, op0);
9859 set_mem_attributes (temp, exp, 0);
9860 set_mem_addr_space (temp, as);
9861 align = get_object_alignment (exp);
9862 if (modifier != EXPAND_WRITE
9863 && modifier != EXPAND_MEMORY
9864 && mode != BLKmode
9865 && align < GET_MODE_ALIGNMENT (mode)
9866 /* If the target does not have special handling for unaligned
9867 loads of mode then it can use regular moves for them. */
9868 && ((icode = optab_handler (movmisalign_optab, mode))
9869 != CODE_FOR_nothing))
9871 struct expand_operand ops[2];
9873 /* We've already validated the memory, and we're creating a
9874 new pseudo destination. The predicates really can't fail,
9875 nor can the generator. */
9876 create_output_operand (&ops[0], NULL_RTX, mode);
9877 create_fixed_operand (&ops[1], temp);
9878 expand_insn (icode, 2, ops);
9879 temp = ops[0].value;
9881 return temp;
9884 case MEM_REF:
9886 const bool reverse = REF_REVERSE_STORAGE_ORDER (exp);
9887 addr_space_t as
9888 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9889 machine_mode address_mode;
9890 tree base = TREE_OPERAND (exp, 0);
9891 gimple def_stmt;
9892 enum insn_code icode;
9893 unsigned align;
9894 /* Handle expansion of non-aliased memory with non-BLKmode. That
9895 might end up in a register. */
9896 if (mem_ref_refers_to_non_mem_p (exp))
9898 HOST_WIDE_INT offset = mem_ref_offset (exp).to_short_addr ();
9899 base = TREE_OPERAND (base, 0);
9900 if (offset == 0
9901 && !reverse
9902 && tree_fits_uhwi_p (TYPE_SIZE (type))
9903 && (GET_MODE_BITSIZE (DECL_MODE (base))
9904 == tree_to_uhwi (TYPE_SIZE (type))))
9905 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
9906 target, tmode, modifier);
9907 if (TYPE_MODE (type) == BLKmode)
9909 temp = assign_stack_temp (DECL_MODE (base),
9910 GET_MODE_SIZE (DECL_MODE (base)));
9911 store_expr (base, temp, 0, false, false);
9912 temp = adjust_address (temp, BLKmode, offset);
9913 set_mem_size (temp, int_size_in_bytes (type));
9914 return temp;
9916 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
9917 bitsize_int (offset * BITS_PER_UNIT));
9918 REF_REVERSE_STORAGE_ORDER (exp) = reverse;
9919 return expand_expr (exp, target, tmode, modifier);
9921 address_mode = targetm.addr_space.address_mode (as);
9922 base = TREE_OPERAND (exp, 0);
9923 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9925 tree mask = gimple_assign_rhs2 (def_stmt);
9926 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9927 gimple_assign_rhs1 (def_stmt), mask);
9928 TREE_OPERAND (exp, 0) = base;
9930 align = get_object_alignment (exp);
9931 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9932 op0 = memory_address_addr_space (mode, op0, as);
9933 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9935 rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
9936 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9937 op0 = memory_address_addr_space (mode, op0, as);
9939 temp = gen_rtx_MEM (mode, op0);
9940 set_mem_attributes (temp, exp, 0);
9941 set_mem_addr_space (temp, as);
9942 if (TREE_THIS_VOLATILE (exp))
9943 MEM_VOLATILE_P (temp) = 1;
9944 if (modifier != EXPAND_WRITE
9945 && modifier != EXPAND_MEMORY
9946 && !inner_reference_p
9947 && mode != BLKmode
9948 && align < GET_MODE_ALIGNMENT (mode))
9950 if ((icode = optab_handler (movmisalign_optab, mode))
9951 != CODE_FOR_nothing)
9953 struct expand_operand ops[2];
9955 /* We've already validated the memory, and we're creating a
9956 new pseudo destination. The predicates really can't fail,
9957 nor can the generator. */
9958 create_output_operand (&ops[0], NULL_RTX, mode);
9959 create_fixed_operand (&ops[1], temp);
9960 expand_insn (icode, 2, ops);
9961 temp = ops[0].value;
9963 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9964 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9965 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9966 (modifier == EXPAND_STACK_PARM
9967 ? NULL_RTX : target),
9968 mode, mode, false);
9970 if (reverse && modifier != EXPAND_WRITE)
9971 temp = flip_storage_order (mode, temp);
9972 return temp;
9975 case ARRAY_REF:
9978 tree array = treeop0;
9979 tree index = treeop1;
9980 tree init;
9982 /* Fold an expression like: "foo"[2].
9983 This is not done in fold so it won't happen inside &.
9984 Don't fold if this is for wide characters since it's too
9985 difficult to do correctly and this is a very rare case. */
9987 if (modifier != EXPAND_CONST_ADDRESS
9988 && modifier != EXPAND_INITIALIZER
9989 && modifier != EXPAND_MEMORY)
9991 tree t = fold_read_from_constant_string (exp);
9993 if (t)
9994 return expand_expr (t, target, tmode, modifier);
9997 /* If this is a constant index into a constant array,
9998 just get the value from the array. Handle both the cases when
9999 we have an explicit constructor and when our operand is a variable
10000 that was declared const. */
10002 if (modifier != EXPAND_CONST_ADDRESS
10003 && modifier != EXPAND_INITIALIZER
10004 && modifier != EXPAND_MEMORY
10005 && TREE_CODE (array) == CONSTRUCTOR
10006 && ! TREE_SIDE_EFFECTS (array)
10007 && TREE_CODE (index) == INTEGER_CST)
10009 unsigned HOST_WIDE_INT ix;
10010 tree field, value;
10012 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
10013 field, value)
10014 if (tree_int_cst_equal (field, index))
10016 if (!TREE_SIDE_EFFECTS (value))
10017 return expand_expr (fold (value), target, tmode, modifier);
10018 break;
10022 else if (optimize >= 1
10023 && modifier != EXPAND_CONST_ADDRESS
10024 && modifier != EXPAND_INITIALIZER
10025 && modifier != EXPAND_MEMORY
10026 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
10027 && TREE_CODE (index) == INTEGER_CST
10028 && (TREE_CODE (array) == VAR_DECL
10029 || TREE_CODE (array) == CONST_DECL)
10030 && (init = ctor_for_folding (array)) != error_mark_node)
10032 if (init == NULL_TREE)
10034 tree value = build_zero_cst (type);
10035 if (TREE_CODE (value) == CONSTRUCTOR)
10037 /* If VALUE is a CONSTRUCTOR, this optimization is only
10038 useful if this doesn't store the CONSTRUCTOR into
10039 memory. If it does, it is more efficient to just
10040 load the data from the array directly. */
10041 rtx ret = expand_constructor (value, target,
10042 modifier, true);
10043 if (ret == NULL_RTX)
10044 value = NULL_TREE;
10047 if (value)
10048 return expand_expr (value, target, tmode, modifier);
10050 else if (TREE_CODE (init) == CONSTRUCTOR)
10052 unsigned HOST_WIDE_INT ix;
10053 tree field, value;
10055 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
10056 field, value)
10057 if (tree_int_cst_equal (field, index))
10059 if (TREE_SIDE_EFFECTS (value))
10060 break;
10062 if (TREE_CODE (value) == CONSTRUCTOR)
10064 /* If VALUE is a CONSTRUCTOR, this
10065 optimization is only useful if
10066 this doesn't store the CONSTRUCTOR
10067 into memory. If it does, it is more
10068 efficient to just load the data from
10069 the array directly. */
10070 rtx ret = expand_constructor (value, target,
10071 modifier, true);
10072 if (ret == NULL_RTX)
10073 break;
10076 return
10077 expand_expr (fold (value), target, tmode, modifier);
10080 else if (TREE_CODE (init) == STRING_CST)
10082 tree low_bound = array_ref_low_bound (exp);
10083 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
10085 /* Optimize the special case of a zero lower bound.
10087 We convert the lower bound to sizetype to avoid problems
10088 with constant folding. E.g. suppose the lower bound is
10089 1 and its mode is QI. Without the conversion
10090 (ARRAY + (INDEX - (unsigned char)1))
10091 becomes
10092 (ARRAY + (-(unsigned char)1) + INDEX)
10093 which becomes
10094 (ARRAY + 255 + INDEX). Oops! */
10095 if (!integer_zerop (low_bound))
10096 index1 = size_diffop_loc (loc, index1,
10097 fold_convert_loc (loc, sizetype,
10098 low_bound));
10100 if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
10102 tree type = TREE_TYPE (TREE_TYPE (init));
10103 machine_mode mode = TYPE_MODE (type);
10105 if (GET_MODE_CLASS (mode) == MODE_INT
10106 && GET_MODE_SIZE (mode) == 1)
10107 return gen_int_mode (TREE_STRING_POINTER (init)
10108 [TREE_INT_CST_LOW (index1)],
10109 mode);
10114 goto normal_inner_ref;
10116 case COMPONENT_REF:
10117 /* If the operand is a CONSTRUCTOR, we can just extract the
10118 appropriate field if it is present. */
10119 if (TREE_CODE (treeop0) == CONSTRUCTOR)
10121 unsigned HOST_WIDE_INT idx;
10122 tree field, value;
10124 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
10125 idx, field, value)
10126 if (field == treeop1
10127 /* We can normally use the value of the field in the
10128 CONSTRUCTOR. However, if this is a bitfield in
10129 an integral mode that we can fit in a HOST_WIDE_INT,
10130 we must mask only the number of bits in the bitfield,
10131 since this is done implicitly by the constructor. If
10132 the bitfield does not meet either of those conditions,
10133 we can't do this optimization. */
10134 && (! DECL_BIT_FIELD (field)
10135 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
10136 && (GET_MODE_PRECISION (DECL_MODE (field))
10137 <= HOST_BITS_PER_WIDE_INT))))
10139 if (DECL_BIT_FIELD (field)
10140 && modifier == EXPAND_STACK_PARM)
10141 target = 0;
10142 op0 = expand_expr (value, target, tmode, modifier);
10143 if (DECL_BIT_FIELD (field))
10145 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
10146 machine_mode imode = TYPE_MODE (TREE_TYPE (field));
10148 if (TYPE_UNSIGNED (TREE_TYPE (field)))
10150 op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
10151 imode);
10152 op0 = expand_and (imode, op0, op1, target);
10154 else
10156 int count = GET_MODE_PRECISION (imode) - bitsize;
10158 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
10159 target, 0);
10160 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
10161 target, 0);
10165 return op0;
10168 goto normal_inner_ref;
10170 case BIT_FIELD_REF:
10171 case ARRAY_RANGE_REF:
10172 normal_inner_ref:
10174 machine_mode mode1, mode2;
10175 HOST_WIDE_INT bitsize, bitpos;
10176 tree offset;
10177 int reversep, volatilep = 0, must_force_mem;
10178 tree tem
10179 = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
10180 &unsignedp, &reversep, &volatilep, true);
10181 rtx orig_op0, memloc;
10182 bool mem_attrs_from_type = false;
10184 /* If we got back the original object, something is wrong. Perhaps
10185 we are evaluating an expression too early. In any event, don't
10186 infinitely recurse. */
10187 gcc_assert (tem != exp);
10189 /* If TEM's type is a union of variable size, pass TARGET to the inner
10190 computation, since it will need a temporary and TARGET is known
10191 to have to do. This occurs in unchecked conversion in Ada. */
10192 orig_op0 = op0
10193 = expand_expr_real (tem,
10194 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10195 && COMPLETE_TYPE_P (TREE_TYPE (tem))
10196 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10197 != INTEGER_CST)
10198 && modifier != EXPAND_STACK_PARM
10199 ? target : NULL_RTX),
10200 VOIDmode,
10201 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10202 NULL, true);
10204 /* If the field has a mode, we want to access it in the
10205 field's mode, not the computed mode.
10206 If a MEM has VOIDmode (external with incomplete type),
10207 use BLKmode for it instead. */
10208 if (MEM_P (op0))
10210 if (mode1 != VOIDmode)
10211 op0 = adjust_address (op0, mode1, 0);
10212 else if (GET_MODE (op0) == VOIDmode)
10213 op0 = adjust_address (op0, BLKmode, 0);
10216 mode2
10217 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10219 /* If we have either an offset, a BLKmode result, or a reference
10220 outside the underlying object, we must force it to memory.
10221 Such a case can occur in Ada if we have unchecked conversion
10222 of an expression from a scalar type to an aggregate type or
10223 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10224 passed a partially uninitialized object or a view-conversion
10225 to a larger size. */
10226 must_force_mem = (offset
10227 || mode1 == BLKmode
10228 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
10230 /* Handle CONCAT first. */
10231 if (GET_CODE (op0) == CONCAT && !must_force_mem)
10233 if (bitpos == 0
10234 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
10235 return op0;
10236 if (bitpos == 0
10237 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10238 && bitsize)
10240 op0 = XEXP (op0, 0);
10241 mode2 = GET_MODE (op0);
10243 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10244 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
10245 && bitpos
10246 && bitsize)
10248 op0 = XEXP (op0, 1);
10249 bitpos = 0;
10250 mode2 = GET_MODE (op0);
10252 else
10253 /* Otherwise force into memory. */
10254 must_force_mem = 1;
10257 /* If this is a constant, put it in a register if it is a legitimate
10258 constant and we don't need a memory reference. */
10259 if (CONSTANT_P (op0)
10260 && mode2 != BLKmode
10261 && targetm.legitimate_constant_p (mode2, op0)
10262 && !must_force_mem)
10263 op0 = force_reg (mode2, op0);
10265 /* Otherwise, if this is a constant, try to force it to the constant
10266 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10267 is a legitimate constant. */
10268 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10269 op0 = validize_mem (memloc);
10271 /* Otherwise, if this is a constant or the object is not in memory
10272 and need be, put it there. */
10273 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10275 memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10276 emit_move_insn (memloc, op0);
10277 op0 = memloc;
10278 mem_attrs_from_type = true;
10281 if (offset)
10283 machine_mode address_mode;
10284 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10285 EXPAND_SUM);
10287 gcc_assert (MEM_P (op0));
10289 address_mode = get_address_mode (op0);
10290 if (GET_MODE (offset_rtx) != address_mode)
10291 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10293 /* See the comment in expand_assignment for the rationale. */
10294 if (mode1 != VOIDmode
10295 && bitpos != 0
10296 && bitsize > 0
10297 && (bitpos % bitsize) == 0
10298 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
10299 && MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
10301 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10302 bitpos = 0;
10305 op0 = offset_address (op0, offset_rtx,
10306 highest_pow2_factor (offset));
10309 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10310 record its alignment as BIGGEST_ALIGNMENT. */
10311 if (MEM_P (op0) && bitpos == 0 && offset != 0
10312 && is_aligning_offset (offset, tem))
10313 set_mem_align (op0, BIGGEST_ALIGNMENT);
10315 /* Don't forget about volatility even if this is a bitfield. */
10316 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10318 if (op0 == orig_op0)
10319 op0 = copy_rtx (op0);
10321 MEM_VOLATILE_P (op0) = 1;
10324 /* In cases where an aligned union has an unaligned object
10325 as a field, we might be extracting a BLKmode value from
10326 an integer-mode (e.g., SImode) object. Handle this case
10327 by doing the extract into an object as wide as the field
10328 (which we know to be the width of a basic mode), then
10329 storing into memory, and changing the mode to BLKmode. */
10330 if (mode1 == VOIDmode
10331 || REG_P (op0) || GET_CODE (op0) == SUBREG
10332 || (mode1 != BLKmode && ! direct_load[(int) mode1]
10333 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10334 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10335 && modifier != EXPAND_CONST_ADDRESS
10336 && modifier != EXPAND_INITIALIZER
10337 && modifier != EXPAND_MEMORY)
10338 /* If the bitfield is volatile and the bitsize
10339 is narrower than the access size of the bitfield,
10340 we need to extract bitfields from the access. */
10341 || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10342 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10343 && mode1 != BLKmode
10344 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)
10345 /* If the field isn't aligned enough to fetch as a memref,
10346 fetch it as a bit field. */
10347 || (mode1 != BLKmode
10348 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10349 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10350 || (MEM_P (op0)
10351 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10352 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10353 && modifier != EXPAND_MEMORY
10354 && ((modifier == EXPAND_CONST_ADDRESS
10355 || modifier == EXPAND_INITIALIZER)
10356 ? STRICT_ALIGNMENT
10357 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10358 || (bitpos % BITS_PER_UNIT != 0)))
10359 /* If the type and the field are a constant size and the
10360 size of the type isn't the same size as the bitfield,
10361 we must use bitfield operations. */
10362 || (bitsize >= 0
10363 && TYPE_SIZE (TREE_TYPE (exp))
10364 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10365 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10366 bitsize)))
10368 machine_mode ext_mode = mode;
10370 if (ext_mode == BLKmode
10371 && ! (target != 0 && MEM_P (op0)
10372 && MEM_P (target)
10373 && bitpos % BITS_PER_UNIT == 0))
10374 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10376 if (ext_mode == BLKmode)
10378 if (target == 0)
10379 target = assign_temp (type, 1, 1);
10381 /* ??? Unlike the similar test a few lines below, this one is
10382 very likely obsolete. */
10383 if (bitsize == 0)
10384 return target;
10386 /* In this case, BITPOS must start at a byte boundary and
10387 TARGET, if specified, must be a MEM. */
10388 gcc_assert (MEM_P (op0)
10389 && (!target || MEM_P (target))
10390 && !(bitpos % BITS_PER_UNIT));
10392 emit_block_move (target,
10393 adjust_address (op0, VOIDmode,
10394 bitpos / BITS_PER_UNIT),
10395 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10396 / BITS_PER_UNIT),
10397 (modifier == EXPAND_STACK_PARM
10398 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10400 return target;
10403 /* If we have nothing to extract, the result will be 0 for targets
10404 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10405 return 0 for the sake of consistency, as reading a zero-sized
10406 bitfield is valid in Ada and the value is fully specified. */
10407 if (bitsize == 0)
10408 return const0_rtx;
10410 op0 = validize_mem (op0);
10412 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10413 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10415 /* If the result has a record type and the extraction is done in
10416 an integral mode, then the field may be not aligned on a byte
10417 boundary; in this case, if it has reverse storage order, it
10418 needs to be extracted as a scalar field with reverse storage
10419 order and put back into memory order afterwards. */
10420 if (TREE_CODE (type) == RECORD_TYPE
10421 && GET_MODE_CLASS (ext_mode) == MODE_INT)
10422 reversep = TYPE_REVERSE_STORAGE_ORDER (type);
10424 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10425 (modifier == EXPAND_STACK_PARM
10426 ? NULL_RTX : target),
10427 ext_mode, ext_mode, reversep);
10429 /* If the result has a record type and the mode of OP0 is an
10430 integral mode then, if BITSIZE is narrower than this mode
10431 and this is a big-endian machine, we must put the field
10432 into the high-order bits. And we must also put it back
10433 into memory order if it has been previously reversed. */
10434 if (TREE_CODE (type) == RECORD_TYPE
10435 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
10437 HOST_WIDE_INT size = GET_MODE_BITSIZE (GET_MODE (op0));
10439 if (bitsize < size
10440 && reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
10441 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10442 size - bitsize, op0, 1);
10444 if (reversep)
10445 op0 = flip_storage_order (GET_MODE (op0), op0);
10448 /* If the result type is BLKmode, store the data into a temporary
10449 of the appropriate type, but with the mode corresponding to the
10450 mode for the data we have (op0's mode). */
10451 if (mode == BLKmode)
10453 rtx new_rtx
10454 = assign_stack_temp_for_type (ext_mode,
10455 GET_MODE_BITSIZE (ext_mode),
10456 type);
10457 emit_move_insn (new_rtx, op0);
10458 op0 = copy_rtx (new_rtx);
10459 PUT_MODE (op0, BLKmode);
10462 return op0;
10465 /* If the result is BLKmode, use that to access the object
10466 now as well. */
10467 if (mode == BLKmode)
10468 mode1 = BLKmode;
10470 /* Get a reference to just this component. */
10471 if (modifier == EXPAND_CONST_ADDRESS
10472 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10473 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10474 else
10475 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10477 if (op0 == orig_op0)
10478 op0 = copy_rtx (op0);
10480 /* If op0 is a temporary because of forcing to memory, pass only the
10481 type to set_mem_attributes so that the original expression is never
10482 marked as ADDRESSABLE through MEM_EXPR of the temporary. */
10483 if (mem_attrs_from_type)
10484 set_mem_attributes (op0, type, 0);
10485 else
10486 set_mem_attributes (op0, exp, 0);
10488 if (REG_P (XEXP (op0, 0)))
10489 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10491 MEM_VOLATILE_P (op0) |= volatilep;
10493 if (reversep)
10494 op0 = flip_storage_order (mode1, op0);
10496 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10497 || modifier == EXPAND_CONST_ADDRESS
10498 || modifier == EXPAND_INITIALIZER)
10499 return op0;
10501 if (target == 0)
10502 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10504 convert_move (target, op0, unsignedp);
10505 return target;
10508 case OBJ_TYPE_REF:
10509 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10511 case CALL_EXPR:
10512 /* All valid uses of __builtin_va_arg_pack () are removed during
10513 inlining. */
10514 if (CALL_EXPR_VA_ARG_PACK (exp))
10515 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10517 tree fndecl = get_callee_fndecl (exp), attr;
10519 if (fndecl
10520 && (attr = lookup_attribute ("error",
10521 DECL_ATTRIBUTES (fndecl))) != NULL)
10522 error ("%Kcall to %qs declared with attribute error: %s",
10523 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10524 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10525 if (fndecl
10526 && (attr = lookup_attribute ("warning",
10527 DECL_ATTRIBUTES (fndecl))) != NULL)
10528 warning_at (tree_nonartificial_location (exp),
10529 0, "%Kcall to %qs declared with attribute warning: %s",
10530 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10531 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10533 /* Check for a built-in function. */
10534 if (fndecl && DECL_BUILT_IN (fndecl))
10536 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10537 return expand_builtin (exp, target, subtarget, tmode, ignore);
10540 return expand_call (exp, target, ignore);
10542 case VIEW_CONVERT_EXPR:
10543 op0 = NULL_RTX;
10545 /* If we are converting to BLKmode, try to avoid an intermediate
10546 temporary by fetching an inner memory reference. */
10547 if (mode == BLKmode
10548 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10549 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10550 && handled_component_p (treeop0))
10552 machine_mode mode1;
10553 HOST_WIDE_INT bitsize, bitpos;
10554 tree offset;
10555 int unsignedp, reversep, volatilep = 0;
10556 tree tem
10557 = get_inner_reference (treeop0, &bitsize, &bitpos, &offset, &mode1,
10558 &unsignedp, &reversep, &volatilep, true);
10559 rtx orig_op0;
10561 /* ??? We should work harder and deal with non-zero offsets. */
10562 if (!offset
10563 && (bitpos % BITS_PER_UNIT) == 0
10564 && !reversep
10565 && bitsize >= 0
10566 && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10568 /* See the normal_inner_ref case for the rationale. */
10569 orig_op0
10570 = expand_expr_real (tem,
10571 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10572 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10573 != INTEGER_CST)
10574 && modifier != EXPAND_STACK_PARM
10575 ? target : NULL_RTX),
10576 VOIDmode,
10577 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10578 NULL, true);
10580 if (MEM_P (orig_op0))
10582 op0 = orig_op0;
10584 /* Get a reference to just this component. */
10585 if (modifier == EXPAND_CONST_ADDRESS
10586 || modifier == EXPAND_SUM
10587 || modifier == EXPAND_INITIALIZER)
10588 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10589 else
10590 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10592 if (op0 == orig_op0)
10593 op0 = copy_rtx (op0);
10595 set_mem_attributes (op0, treeop0, 0);
10596 if (REG_P (XEXP (op0, 0)))
10597 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10599 MEM_VOLATILE_P (op0) |= volatilep;
10604 if (!op0)
10605 op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
10606 NULL, inner_reference_p);
10608 /* If the input and output modes are both the same, we are done. */
10609 if (mode == GET_MODE (op0))
10611 /* If neither mode is BLKmode, and both modes are the same size
10612 then we can use gen_lowpart. */
10613 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10614 && (GET_MODE_PRECISION (mode)
10615 == GET_MODE_PRECISION (GET_MODE (op0)))
10616 && !COMPLEX_MODE_P (GET_MODE (op0)))
10618 if (GET_CODE (op0) == SUBREG)
10619 op0 = force_reg (GET_MODE (op0), op0);
10620 temp = gen_lowpart_common (mode, op0);
10621 if (temp)
10622 op0 = temp;
10623 else
10625 if (!REG_P (op0) && !MEM_P (op0))
10626 op0 = force_reg (GET_MODE (op0), op0);
10627 op0 = gen_lowpart (mode, op0);
10630 /* If both types are integral, convert from one mode to the other. */
10631 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10632 op0 = convert_modes (mode, GET_MODE (op0), op0,
10633 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10634 /* If the output type is a bit-field type, do an extraction. */
10635 else if (reduce_bit_field)
10636 return extract_bit_field (op0, TYPE_PRECISION (type), 0,
10637 TYPE_UNSIGNED (type), NULL_RTX,
10638 mode, mode, false);
10639 /* As a last resort, spill op0 to memory, and reload it in a
10640 different mode. */
10641 else if (!MEM_P (op0))
10643 /* If the operand is not a MEM, force it into memory. Since we
10644 are going to be changing the mode of the MEM, don't call
10645 force_const_mem for constants because we don't allow pool
10646 constants to change mode. */
10647 tree inner_type = TREE_TYPE (treeop0);
10649 gcc_assert (!TREE_ADDRESSABLE (exp));
10651 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10652 target
10653 = assign_stack_temp_for_type
10654 (TYPE_MODE (inner_type),
10655 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10657 emit_move_insn (target, op0);
10658 op0 = target;
10661 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10662 output type is such that the operand is known to be aligned, indicate
10663 that it is. Otherwise, we need only be concerned about alignment for
10664 non-BLKmode results. */
10665 if (MEM_P (op0))
10667 enum insn_code icode;
10669 if (TYPE_ALIGN_OK (type))
10671 /* ??? Copying the MEM without substantially changing it might
10672 run afoul of the code handling volatile memory references in
10673 store_expr, which assumes that TARGET is returned unmodified
10674 if it has been used. */
10675 op0 = copy_rtx (op0);
10676 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10678 else if (modifier != EXPAND_WRITE
10679 && modifier != EXPAND_MEMORY
10680 && !inner_reference_p
10681 && mode != BLKmode
10682 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10684 /* If the target does have special handling for unaligned
10685 loads of mode then use them. */
10686 if ((icode = optab_handler (movmisalign_optab, mode))
10687 != CODE_FOR_nothing)
10689 rtx reg, insn;
10691 op0 = adjust_address (op0, mode, 0);
10692 /* We've already validated the memory, and we're creating a
10693 new pseudo destination. The predicates really can't
10694 fail. */
10695 reg = gen_reg_rtx (mode);
10697 /* Nor can the insn generator. */
10698 insn = GEN_FCN (icode) (reg, op0);
10699 emit_insn (insn);
10700 return reg;
10702 else if (STRICT_ALIGNMENT)
10704 tree inner_type = TREE_TYPE (treeop0);
10705 HOST_WIDE_INT temp_size
10706 = MAX (int_size_in_bytes (inner_type),
10707 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10708 rtx new_rtx
10709 = assign_stack_temp_for_type (mode, temp_size, type);
10710 rtx new_with_op0_mode
10711 = adjust_address (new_rtx, GET_MODE (op0), 0);
10713 gcc_assert (!TREE_ADDRESSABLE (exp));
10715 if (GET_MODE (op0) == BLKmode)
10716 emit_block_move (new_with_op0_mode, op0,
10717 GEN_INT (GET_MODE_SIZE (mode)),
10718 (modifier == EXPAND_STACK_PARM
10719 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10720 else
10721 emit_move_insn (new_with_op0_mode, op0);
10723 op0 = new_rtx;
10727 op0 = adjust_address (op0, mode, 0);
10730 return op0;
10732 case MODIFY_EXPR:
10734 tree lhs = treeop0;
10735 tree rhs = treeop1;
10736 gcc_assert (ignore);
10738 /* Check for |= or &= of a bitfield of size one into another bitfield
10739 of size 1. In this case, (unless we need the result of the
10740 assignment) we can do this more efficiently with a
10741 test followed by an assignment, if necessary.
10743 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10744 things change so we do, this code should be enhanced to
10745 support it. */
10746 if (TREE_CODE (lhs) == COMPONENT_REF
10747 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10748 || TREE_CODE (rhs) == BIT_AND_EXPR)
10749 && TREE_OPERAND (rhs, 0) == lhs
10750 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10751 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10752 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10754 rtx_code_label *label = gen_label_rtx ();
10755 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10756 do_jump (TREE_OPERAND (rhs, 1),
10757 value ? label : 0,
10758 value ? 0 : label, -1);
10759 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10760 false);
10761 do_pending_stack_adjust ();
10762 emit_label (label);
10763 return const0_rtx;
10766 expand_assignment (lhs, rhs, false);
10767 return const0_rtx;
10770 case ADDR_EXPR:
10771 return expand_expr_addr_expr (exp, target, tmode, modifier);
10773 case REALPART_EXPR:
10774 op0 = expand_normal (treeop0);
10775 return read_complex_part (op0, false);
10777 case IMAGPART_EXPR:
10778 op0 = expand_normal (treeop0);
10779 return read_complex_part (op0, true);
10781 case RETURN_EXPR:
10782 case LABEL_EXPR:
10783 case GOTO_EXPR:
10784 case SWITCH_EXPR:
10785 case ASM_EXPR:
10786 /* Expanded in cfgexpand.c. */
10787 gcc_unreachable ();
10789 case TRY_CATCH_EXPR:
10790 case CATCH_EXPR:
10791 case EH_FILTER_EXPR:
10792 case TRY_FINALLY_EXPR:
10793 /* Lowered by tree-eh.c. */
10794 gcc_unreachable ();
10796 case WITH_CLEANUP_EXPR:
10797 case CLEANUP_POINT_EXPR:
10798 case TARGET_EXPR:
10799 case CASE_LABEL_EXPR:
10800 case VA_ARG_EXPR:
10801 case BIND_EXPR:
10802 case INIT_EXPR:
10803 case CONJ_EXPR:
10804 case COMPOUND_EXPR:
10805 case PREINCREMENT_EXPR:
10806 case PREDECREMENT_EXPR:
10807 case POSTINCREMENT_EXPR:
10808 case POSTDECREMENT_EXPR:
10809 case LOOP_EXPR:
10810 case EXIT_EXPR:
10811 case COMPOUND_LITERAL_EXPR:
10812 /* Lowered by gimplify.c. */
10813 gcc_unreachable ();
10815 case FDESC_EXPR:
10816 /* Function descriptors are not valid except for as
10817 initialization constants, and should not be expanded. */
10818 gcc_unreachable ();
10820 case WITH_SIZE_EXPR:
10821 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10822 have pulled out the size to use in whatever context it needed. */
10823 return expand_expr_real (treeop0, original_target, tmode,
10824 modifier, alt_rtl, inner_reference_p);
10826 default:
10827 return expand_expr_real_2 (&ops, target, tmode, modifier);
10831 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10832 signedness of TYPE), possibly returning the result in TARGET. */
10833 static rtx
10834 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10836 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10837 if (target && GET_MODE (target) != GET_MODE (exp))
10838 target = 0;
10839 /* For constant values, reduce using build_int_cst_type. */
10840 if (CONST_INT_P (exp))
10842 HOST_WIDE_INT value = INTVAL (exp);
10843 tree t = build_int_cst_type (type, value);
10844 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10846 else if (TYPE_UNSIGNED (type))
10848 machine_mode mode = GET_MODE (exp);
10849 rtx mask = immed_wide_int_const
10850 (wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
10851 return expand_and (mode, exp, mask, target);
10853 else
10855 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10856 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10857 exp, count, target, 0);
10858 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10859 exp, count, target, 0);
10863 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10864 when applied to the address of EXP produces an address known to be
10865 aligned more than BIGGEST_ALIGNMENT. */
10867 static int
10868 is_aligning_offset (const_tree offset, const_tree exp)
10870 /* Strip off any conversions. */
10871 while (CONVERT_EXPR_P (offset))
10872 offset = TREE_OPERAND (offset, 0);
10874 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10875 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10876 if (TREE_CODE (offset) != BIT_AND_EXPR
10877 || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
10878 || compare_tree_int (TREE_OPERAND (offset, 1),
10879 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10880 || exact_log2 (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1) < 0)
10881 return 0;
10883 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10884 It must be NEGATE_EXPR. Then strip any more conversions. */
10885 offset = TREE_OPERAND (offset, 0);
10886 while (CONVERT_EXPR_P (offset))
10887 offset = TREE_OPERAND (offset, 0);
10889 if (TREE_CODE (offset) != NEGATE_EXPR)
10890 return 0;
10892 offset = TREE_OPERAND (offset, 0);
10893 while (CONVERT_EXPR_P (offset))
10894 offset = TREE_OPERAND (offset, 0);
10896 /* This must now be the address of EXP. */
10897 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10900 /* Return the tree node if an ARG corresponds to a string constant or zero
10901 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10902 in bytes within the string that ARG is accessing. The type of the
10903 offset will be `sizetype'. */
10905 tree
10906 string_constant (tree arg, tree *ptr_offset)
10908 tree array, offset, lower_bound;
10909 STRIP_NOPS (arg);
10911 if (TREE_CODE (arg) == ADDR_EXPR)
10913 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10915 *ptr_offset = size_zero_node;
10916 return TREE_OPERAND (arg, 0);
10918 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10920 array = TREE_OPERAND (arg, 0);
10921 offset = size_zero_node;
10923 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10925 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10926 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10927 if (TREE_CODE (array) != STRING_CST
10928 && TREE_CODE (array) != VAR_DECL)
10929 return 0;
10931 /* Check if the array has a nonzero lower bound. */
10932 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10933 if (!integer_zerop (lower_bound))
10935 /* If the offset and base aren't both constants, return 0. */
10936 if (TREE_CODE (lower_bound) != INTEGER_CST)
10937 return 0;
10938 if (TREE_CODE (offset) != INTEGER_CST)
10939 return 0;
10940 /* Adjust offset by the lower bound. */
10941 offset = size_diffop (fold_convert (sizetype, offset),
10942 fold_convert (sizetype, lower_bound));
10945 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10947 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10948 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10949 if (TREE_CODE (array) != ADDR_EXPR)
10950 return 0;
10951 array = TREE_OPERAND (array, 0);
10952 if (TREE_CODE (array) != STRING_CST
10953 && TREE_CODE (array) != VAR_DECL)
10954 return 0;
10956 else
10957 return 0;
10959 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10961 tree arg0 = TREE_OPERAND (arg, 0);
10962 tree arg1 = TREE_OPERAND (arg, 1);
10964 STRIP_NOPS (arg0);
10965 STRIP_NOPS (arg1);
10967 if (TREE_CODE (arg0) == ADDR_EXPR
10968 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10969 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10971 array = TREE_OPERAND (arg0, 0);
10972 offset = arg1;
10974 else if (TREE_CODE (arg1) == ADDR_EXPR
10975 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10976 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10978 array = TREE_OPERAND (arg1, 0);
10979 offset = arg0;
10981 else
10982 return 0;
10984 else
10985 return 0;
10987 if (TREE_CODE (array) == STRING_CST)
10989 *ptr_offset = fold_convert (sizetype, offset);
10990 return array;
10992 else if (TREE_CODE (array) == VAR_DECL
10993 || TREE_CODE (array) == CONST_DECL)
10995 int length;
10996 tree init = ctor_for_folding (array);
10998 /* Variables initialized to string literals can be handled too. */
10999 if (init == error_mark_node
11000 || !init
11001 || TREE_CODE (init) != STRING_CST)
11002 return 0;
11004 /* Avoid const char foo[4] = "abcde"; */
11005 if (DECL_SIZE_UNIT (array) == NULL_TREE
11006 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
11007 || (length = TREE_STRING_LENGTH (init)) <= 0
11008 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
11009 return 0;
11011 /* If variable is bigger than the string literal, OFFSET must be constant
11012 and inside of the bounds of the string literal. */
11013 offset = fold_convert (sizetype, offset);
11014 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
11015 && (! tree_fits_uhwi_p (offset)
11016 || compare_tree_int (offset, length) >= 0))
11017 return 0;
11019 *ptr_offset = offset;
11020 return init;
11023 return 0;
11026 /* Generate code to calculate OPS, and exploded expression
11027 using a store-flag instruction and return an rtx for the result.
11028 OPS reflects a comparison.
11030 If TARGET is nonzero, store the result there if convenient.
11032 Return zero if there is no suitable set-flag instruction
11033 available on this machine.
11035 Once expand_expr has been called on the arguments of the comparison,
11036 we are committed to doing the store flag, since it is not safe to
11037 re-evaluate the expression. We emit the store-flag insn by calling
11038 emit_store_flag, but only expand the arguments if we have a reason
11039 to believe that emit_store_flag will be successful. If we think that
11040 it will, but it isn't, we have to simulate the store-flag with a
11041 set/jump/set sequence. */
11043 static rtx
11044 do_store_flag (sepops ops, rtx target, machine_mode mode)
11046 enum rtx_code code;
11047 tree arg0, arg1, type;
11048 tree tem;
11049 machine_mode operand_mode;
11050 int unsignedp;
11051 rtx op0, op1;
11052 rtx subtarget = target;
11053 location_t loc = ops->location;
11055 arg0 = ops->op0;
11056 arg1 = ops->op1;
11058 /* Don't crash if the comparison was erroneous. */
11059 if (arg0 == error_mark_node || arg1 == error_mark_node)
11060 return const0_rtx;
11062 type = TREE_TYPE (arg0);
11063 operand_mode = TYPE_MODE (type);
11064 unsignedp = TYPE_UNSIGNED (type);
11066 /* We won't bother with BLKmode store-flag operations because it would mean
11067 passing a lot of information to emit_store_flag. */
11068 if (operand_mode == BLKmode)
11069 return 0;
11071 /* We won't bother with store-flag operations involving function pointers
11072 when function pointers must be canonicalized before comparisons. */
11073 #ifdef HAVE_canonicalize_funcptr_for_compare
11074 if (HAVE_canonicalize_funcptr_for_compare
11075 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
11076 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
11077 == FUNCTION_TYPE))
11078 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
11079 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
11080 == FUNCTION_TYPE))))
11081 return 0;
11082 #endif
11084 STRIP_NOPS (arg0);
11085 STRIP_NOPS (arg1);
11087 /* For vector typed comparisons emit code to generate the desired
11088 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
11089 expander for this. */
11090 if (TREE_CODE (ops->type) == VECTOR_TYPE)
11092 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
11093 tree if_true = constant_boolean_node (true, ops->type);
11094 tree if_false = constant_boolean_node (false, ops->type);
11095 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
11098 /* Get the rtx comparison code to use. We know that EXP is a comparison
11099 operation of some type. Some comparisons against 1 and -1 can be
11100 converted to comparisons with zero. Do so here so that the tests
11101 below will be aware that we have a comparison with zero. These
11102 tests will not catch constants in the first operand, but constants
11103 are rarely passed as the first operand. */
11105 switch (ops->code)
11107 case EQ_EXPR:
11108 code = EQ;
11109 break;
11110 case NE_EXPR:
11111 code = NE;
11112 break;
11113 case LT_EXPR:
11114 if (integer_onep (arg1))
11115 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11116 else
11117 code = unsignedp ? LTU : LT;
11118 break;
11119 case LE_EXPR:
11120 if (! unsignedp && integer_all_onesp (arg1))
11121 arg1 = integer_zero_node, code = LT;
11122 else
11123 code = unsignedp ? LEU : LE;
11124 break;
11125 case GT_EXPR:
11126 if (! unsignedp && integer_all_onesp (arg1))
11127 arg1 = integer_zero_node, code = GE;
11128 else
11129 code = unsignedp ? GTU : GT;
11130 break;
11131 case GE_EXPR:
11132 if (integer_onep (arg1))
11133 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11134 else
11135 code = unsignedp ? GEU : GE;
11136 break;
11138 case UNORDERED_EXPR:
11139 code = UNORDERED;
11140 break;
11141 case ORDERED_EXPR:
11142 code = ORDERED;
11143 break;
11144 case UNLT_EXPR:
11145 code = UNLT;
11146 break;
11147 case UNLE_EXPR:
11148 code = UNLE;
11149 break;
11150 case UNGT_EXPR:
11151 code = UNGT;
11152 break;
11153 case UNGE_EXPR:
11154 code = UNGE;
11155 break;
11156 case UNEQ_EXPR:
11157 code = UNEQ;
11158 break;
11159 case LTGT_EXPR:
11160 code = LTGT;
11161 break;
11163 default:
11164 gcc_unreachable ();
11167 /* Put a constant second. */
11168 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
11169 || TREE_CODE (arg0) == FIXED_CST)
11171 tem = arg0; arg0 = arg1; arg1 = tem;
11172 code = swap_condition (code);
11175 /* If this is an equality or inequality test of a single bit, we can
11176 do this by shifting the bit being tested to the low-order bit and
11177 masking the result with the constant 1. If the condition was EQ,
11178 we xor it with 1. This does not require an scc insn and is faster
11179 than an scc insn even if we have it.
11181 The code to make this transformation was moved into fold_single_bit_test,
11182 so we just call into the folder and expand its result. */
11184 if ((code == NE || code == EQ)
11185 && integer_zerop (arg1)
11186 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
11188 gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
11189 if (srcstmt
11190 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
11192 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
11193 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
11194 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
11195 gimple_assign_rhs1 (srcstmt),
11196 gimple_assign_rhs2 (srcstmt));
11197 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
11198 if (temp)
11199 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
11203 if (! get_subtarget (target)
11204 || GET_MODE (subtarget) != operand_mode)
11205 subtarget = 0;
11207 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
11209 if (target == 0)
11210 target = gen_reg_rtx (mode);
11212 /* Try a cstore if possible. */
11213 return emit_store_flag_force (target, code, op0, op1,
11214 operand_mode, unsignedp,
11215 (TYPE_PRECISION (ops->type) == 1
11216 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
11220 /* Stubs in case we haven't got a casesi insn. */
11221 #ifndef HAVE_casesi
11222 # define HAVE_casesi 0
11223 # define gen_casesi(a, b, c, d, e) (0)
11224 # define CODE_FOR_casesi CODE_FOR_nothing
11225 #endif
11227 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11228 0 otherwise (i.e. if there is no casesi instruction).
11230 DEFAULT_PROBABILITY is the probability of jumping to the default
11231 label. */
11233 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
11234 rtx table_label, rtx default_label, rtx fallback_label,
11235 int default_probability)
11237 struct expand_operand ops[5];
11238 machine_mode index_mode = SImode;
11239 rtx op1, op2, index;
11241 if (! HAVE_casesi)
11242 return 0;
11244 /* Convert the index to SImode. */
11245 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11247 machine_mode omode = TYPE_MODE (index_type);
11248 rtx rangertx = expand_normal (range);
11250 /* We must handle the endpoints in the original mode. */
11251 index_expr = build2 (MINUS_EXPR, index_type,
11252 index_expr, minval);
11253 minval = integer_zero_node;
11254 index = expand_normal (index_expr);
11255 if (default_label)
11256 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11257 omode, 1, default_label,
11258 default_probability);
11259 /* Now we can safely truncate. */
11260 index = convert_to_mode (index_mode, index, 0);
11262 else
11264 if (TYPE_MODE (index_type) != index_mode)
11266 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
11267 index_expr = fold_convert (index_type, index_expr);
11270 index = expand_normal (index_expr);
11273 do_pending_stack_adjust ();
11275 op1 = expand_normal (minval);
11276 op2 = expand_normal (range);
11278 create_input_operand (&ops[0], index, index_mode);
11279 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11280 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11281 create_fixed_operand (&ops[3], table_label);
11282 create_fixed_operand (&ops[4], (default_label
11283 ? default_label
11284 : fallback_label));
11285 expand_jump_insn (CODE_FOR_casesi, 5, ops);
11286 return 1;
11289 /* Attempt to generate a tablejump instruction; same concept. */
11290 #ifndef HAVE_tablejump
11291 #define HAVE_tablejump 0
11292 #define gen_tablejump(x, y) (0)
11293 #endif
11295 /* Subroutine of the next function.
11297 INDEX is the value being switched on, with the lowest value
11298 in the table already subtracted.
11299 MODE is its expected mode (needed if INDEX is constant).
11300 RANGE is the length of the jump table.
11301 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11303 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11304 index value is out of range.
11305 DEFAULT_PROBABILITY is the probability of jumping to
11306 the default label. */
11308 static void
11309 do_tablejump (rtx index, machine_mode mode, rtx range, rtx table_label,
11310 rtx default_label, int default_probability)
11312 rtx temp, vector;
11314 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11315 cfun->cfg->max_jumptable_ents = INTVAL (range);
11317 /* Do an unsigned comparison (in the proper mode) between the index
11318 expression and the value which represents the length of the range.
11319 Since we just finished subtracting the lower bound of the range
11320 from the index expression, this comparison allows us to simultaneously
11321 check that the original index expression value is both greater than
11322 or equal to the minimum value of the range and less than or equal to
11323 the maximum value of the range. */
11325 if (default_label)
11326 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11327 default_label, default_probability);
11330 /* If index is in range, it must fit in Pmode.
11331 Convert to Pmode so we can index with it. */
11332 if (mode != Pmode)
11333 index = convert_to_mode (Pmode, index, 1);
11335 /* Don't let a MEM slip through, because then INDEX that comes
11336 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11337 and break_out_memory_refs will go to work on it and mess it up. */
11338 #ifdef PIC_CASE_VECTOR_ADDRESS
11339 if (flag_pic && !REG_P (index))
11340 index = copy_to_mode_reg (Pmode, index);
11341 #endif
11343 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11344 GET_MODE_SIZE, because this indicates how large insns are. The other
11345 uses should all be Pmode, because they are addresses. This code
11346 could fail if addresses and insns are not the same size. */
11347 index = simplify_gen_binary (MULT, Pmode, index,
11348 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
11349 Pmode));
11350 index = simplify_gen_binary (PLUS, Pmode, index,
11351 gen_rtx_LABEL_REF (Pmode, table_label));
11353 #ifdef PIC_CASE_VECTOR_ADDRESS
11354 if (flag_pic)
11355 index = PIC_CASE_VECTOR_ADDRESS (index);
11356 else
11357 #endif
11358 index = memory_address (CASE_VECTOR_MODE, index);
11359 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11360 vector = gen_const_mem (CASE_VECTOR_MODE, index);
11361 convert_move (temp, vector, 0);
11363 emit_jump_insn (gen_tablejump (temp, table_label));
11365 /* If we are generating PIC code or if the table is PC-relative, the
11366 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11367 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11368 emit_barrier ();
11372 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11373 rtx table_label, rtx default_label, int default_probability)
11375 rtx index;
11377 if (! HAVE_tablejump)
11378 return 0;
11380 index_expr = fold_build2 (MINUS_EXPR, index_type,
11381 fold_convert (index_type, index_expr),
11382 fold_convert (index_type, minval));
11383 index = expand_normal (index_expr);
11384 do_pending_stack_adjust ();
11386 do_tablejump (index, TYPE_MODE (index_type),
11387 convert_modes (TYPE_MODE (index_type),
11388 TYPE_MODE (TREE_TYPE (range)),
11389 expand_normal (range),
11390 TYPE_UNSIGNED (TREE_TYPE (range))),
11391 table_label, default_label, default_probability);
11392 return 1;
11395 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11396 static rtx
11397 const_vector_from_tree (tree exp)
11399 rtvec v;
11400 unsigned i;
11401 int units;
11402 tree elt;
11403 machine_mode inner, mode;
11405 mode = TYPE_MODE (TREE_TYPE (exp));
11407 if (initializer_zerop (exp))
11408 return CONST0_RTX (mode);
11410 units = GET_MODE_NUNITS (mode);
11411 inner = GET_MODE_INNER (mode);
11413 v = rtvec_alloc (units);
11415 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11417 elt = VECTOR_CST_ELT (exp, i);
11419 if (TREE_CODE (elt) == REAL_CST)
11420 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11421 inner);
11422 else if (TREE_CODE (elt) == FIXED_CST)
11423 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11424 inner);
11425 else
11426 RTVEC_ELT (v, i) = immed_wide_int_const (elt, inner);
11429 return gen_rtx_CONST_VECTOR (mode, v);
11432 /* Build a decl for a personality function given a language prefix. */
11434 tree
11435 build_personality_function (const char *lang)
11437 const char *unwind_and_version;
11438 tree decl, type;
11439 char *name;
11441 switch (targetm_common.except_unwind_info (&global_options))
11443 case UI_NONE:
11444 return NULL;
11445 case UI_SJLJ:
11446 unwind_and_version = "_sj0";
11447 break;
11448 case UI_DWARF2:
11449 case UI_TARGET:
11450 unwind_and_version = "_v0";
11451 break;
11452 case UI_SEH:
11453 unwind_and_version = "_seh0";
11454 break;
11455 default:
11456 gcc_unreachable ();
11459 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11461 type = build_function_type_list (integer_type_node, integer_type_node,
11462 long_long_unsigned_type_node,
11463 ptr_type_node, ptr_type_node, NULL_TREE);
11464 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11465 get_identifier (name), type);
11466 DECL_ARTIFICIAL (decl) = 1;
11467 DECL_EXTERNAL (decl) = 1;
11468 TREE_PUBLIC (decl) = 1;
11470 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11471 are the flags assigned by targetm.encode_section_info. */
11472 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11474 return decl;
11477 /* Extracts the personality function of DECL and returns the corresponding
11478 libfunc. */
11481 get_personality_function (tree decl)
11483 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11484 enum eh_personality_kind pk;
11486 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11487 if (pk == eh_personality_none)
11488 return NULL;
11490 if (!personality
11491 && pk == eh_personality_any)
11492 personality = lang_hooks.eh_personality ();
11494 if (pk == eh_personality_lang)
11495 gcc_assert (personality != NULL_TREE);
11497 return XEXP (DECL_RTL (personality), 0);
11500 #include "gt-expr.h"