* gfortran.dg/debug/pr46756.f: Remove XFAIL for AIX.
[official-gcc.git] / gcc / expr.c
blob11c25315238bbfa26447ece896312bde1f646e1d
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "predict.h"
29 #include "tm_p.h"
30 #include "ssa.h"
31 #include "expmed.h"
32 #include "optabs.h"
33 #include "regs.h"
34 #include "emit-rtl.h"
35 #include "recog.h"
36 #include "cgraph.h"
37 #include "diagnostic.h"
38 #include "alias.h"
39 #include "fold-const.h"
40 #include "stor-layout.h"
41 #include "attribs.h"
42 #include "varasm.h"
43 #include "except.h"
44 #include "insn-attr.h"
45 #include "dojump.h"
46 #include "explow.h"
47 #include "calls.h"
48 #include "stmt.h"
49 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
50 #include "expr.h"
51 #include "optabs-tree.h"
52 #include "libfuncs.h"
53 #include "reload.h"
54 #include "langhooks.h"
55 #include "common/common-target.h"
56 #include "tree-ssa-live.h"
57 #include "tree-outof-ssa.h"
58 #include "tree-ssa-address.h"
59 #include "builtins.h"
60 #include "tree-chkp.h"
61 #include "rtl-chkp.h"
62 #include "ccmp.h"
65 /* If this is nonzero, we do not bother generating VOLATILE
66 around volatile memory references, and we are willing to
67 output indirect addresses. If cse is to follow, we reject
68 indirect addresses so a useful potential cse is generated;
69 if it is used only once, instruction combination will produce
70 the same indirect address eventually. */
71 int cse_not_expected;
73 /* This structure is used by move_by_pieces to describe the move to
74 be performed. */
75 struct move_by_pieces_d
77 rtx to;
78 rtx to_addr;
79 int autinc_to;
80 int explicit_inc_to;
81 rtx from;
82 rtx from_addr;
83 int autinc_from;
84 int explicit_inc_from;
85 unsigned HOST_WIDE_INT len;
86 HOST_WIDE_INT offset;
87 int reverse;
90 /* This structure is used by store_by_pieces to describe the clear to
91 be performed. */
93 struct store_by_pieces_d
95 rtx to;
96 rtx to_addr;
97 int autinc_to;
98 int explicit_inc_to;
99 unsigned HOST_WIDE_INT len;
100 HOST_WIDE_INT offset;
101 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode);
102 void *constfundata;
103 int reverse;
106 static void move_by_pieces_1 (insn_gen_fn, machine_mode,
107 struct move_by_pieces_d *);
108 static bool block_move_libcall_safe_for_call_parm (void);
109 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
110 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
111 unsigned HOST_WIDE_INT);
112 static tree emit_block_move_libcall_fn (int);
113 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
114 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, machine_mode);
115 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
116 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
117 static void store_by_pieces_2 (insn_gen_fn, machine_mode,
118 struct store_by_pieces_d *);
119 static tree clear_storage_libcall_fn (int);
120 static rtx_insn *compress_float_constant (rtx, rtx);
121 static rtx get_subtarget (rtx);
122 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
123 HOST_WIDE_INT, machine_mode,
124 tree, int, alias_set_type);
125 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
126 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
127 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
128 machine_mode, tree, alias_set_type, bool);
130 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
132 static int is_aligning_offset (const_tree, const_tree);
133 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
134 static rtx do_store_flag (sepops, rtx, machine_mode);
135 #ifdef PUSH_ROUNDING
136 static void emit_single_push_insn (machine_mode, rtx, tree);
137 #endif
138 static void do_tablejump (rtx, machine_mode, rtx, rtx, rtx, int);
139 static rtx const_vector_from_tree (tree);
140 static tree tree_expr_size (const_tree);
141 static HOST_WIDE_INT int_expr_size (tree);
144 /* This is run to set up which modes can be used
145 directly in memory and to initialize the block move optab. It is run
146 at the beginning of compilation and when the target is reinitialized. */
148 void
149 init_expr_target (void)
151 rtx insn, pat;
152 machine_mode mode;
153 int num_clobbers;
154 rtx mem, mem1;
155 rtx reg;
157 /* Try indexing by frame ptr and try by stack ptr.
158 It is known that on the Convex the stack ptr isn't a valid index.
159 With luck, one or the other is valid on any machine. */
160 mem = gen_rtx_MEM (word_mode, stack_pointer_rtx);
161 mem1 = gen_rtx_MEM (word_mode, frame_pointer_rtx);
163 /* A scratch register we can modify in-place below to avoid
164 useless RTL allocations. */
165 reg = gen_rtx_REG (word_mode, LAST_VIRTUAL_REGISTER + 1);
167 insn = rtx_alloc (INSN);
168 pat = gen_rtx_SET (NULL_RTX, NULL_RTX);
169 PATTERN (insn) = pat;
171 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
172 mode = (machine_mode) ((int) mode + 1))
174 int regno;
176 direct_load[(int) mode] = direct_store[(int) mode] = 0;
177 PUT_MODE (mem, mode);
178 PUT_MODE (mem1, mode);
180 /* See if there is some register that can be used in this mode and
181 directly loaded or stored from memory. */
183 if (mode != VOIDmode && mode != BLKmode)
184 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
185 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
186 regno++)
188 if (! HARD_REGNO_MODE_OK (regno, mode))
189 continue;
191 set_mode_and_regno (reg, mode, regno);
193 SET_SRC (pat) = mem;
194 SET_DEST (pat) = reg;
195 if (recog (pat, insn, &num_clobbers) >= 0)
196 direct_load[(int) mode] = 1;
198 SET_SRC (pat) = mem1;
199 SET_DEST (pat) = reg;
200 if (recog (pat, insn, &num_clobbers) >= 0)
201 direct_load[(int) mode] = 1;
203 SET_SRC (pat) = reg;
204 SET_DEST (pat) = mem;
205 if (recog (pat, insn, &num_clobbers) >= 0)
206 direct_store[(int) mode] = 1;
208 SET_SRC (pat) = reg;
209 SET_DEST (pat) = mem1;
210 if (recog (pat, insn, &num_clobbers) >= 0)
211 direct_store[(int) mode] = 1;
215 mem = gen_rtx_MEM (VOIDmode, gen_raw_REG (Pmode, LAST_VIRTUAL_REGISTER + 1));
217 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
218 mode = GET_MODE_WIDER_MODE (mode))
220 machine_mode srcmode;
221 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
222 srcmode = GET_MODE_WIDER_MODE (srcmode))
224 enum insn_code ic;
226 ic = can_extend_p (mode, srcmode, 0);
227 if (ic == CODE_FOR_nothing)
228 continue;
230 PUT_MODE (mem, srcmode);
232 if (insn_operand_matches (ic, 1, mem))
233 float_extend_from_mem[mode][srcmode] = true;
238 /* This is run at the start of compiling a function. */
240 void
241 init_expr (void)
243 memset (&crtl->expr, 0, sizeof (crtl->expr));
246 /* Copy data from FROM to TO, where the machine modes are not the same.
247 Both modes may be integer, or both may be floating, or both may be
248 fixed-point.
249 UNSIGNEDP should be nonzero if FROM is an unsigned type.
250 This causes zero-extension instead of sign-extension. */
252 void
253 convert_move (rtx to, rtx from, int unsignedp)
255 machine_mode to_mode = GET_MODE (to);
256 machine_mode from_mode = GET_MODE (from);
257 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
258 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
259 enum insn_code code;
260 rtx libcall;
262 /* rtx code for making an equivalent value. */
263 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
264 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
267 gcc_assert (to_real == from_real);
268 gcc_assert (to_mode != BLKmode);
269 gcc_assert (from_mode != BLKmode);
271 /* If the source and destination are already the same, then there's
272 nothing to do. */
273 if (to == from)
274 return;
276 /* If FROM is a SUBREG that indicates that we have already done at least
277 the required extension, strip it. We don't handle such SUBREGs as
278 TO here. */
280 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
281 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
282 >= GET_MODE_PRECISION (to_mode))
283 && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp))
284 from = gen_lowpart (to_mode, from), from_mode = to_mode;
286 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
288 if (to_mode == from_mode
289 || (from_mode == VOIDmode && CONSTANT_P (from)))
291 emit_move_insn (to, from);
292 return;
295 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
297 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
299 if (VECTOR_MODE_P (to_mode))
300 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
301 else
302 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
304 emit_move_insn (to, from);
305 return;
308 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
310 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
311 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
312 return;
315 if (to_real)
317 rtx value;
318 rtx_insn *insns;
319 convert_optab tab;
321 gcc_assert ((GET_MODE_PRECISION (from_mode)
322 != GET_MODE_PRECISION (to_mode))
323 || (DECIMAL_FLOAT_MODE_P (from_mode)
324 != DECIMAL_FLOAT_MODE_P (to_mode)));
326 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
327 /* Conversion between decimal float and binary float, same size. */
328 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
329 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
330 tab = sext_optab;
331 else
332 tab = trunc_optab;
334 /* Try converting directly if the insn is supported. */
336 code = convert_optab_handler (tab, to_mode, from_mode);
337 if (code != CODE_FOR_nothing)
339 emit_unop_insn (code, to, from,
340 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
341 return;
344 /* Otherwise use a libcall. */
345 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
347 /* Is this conversion implemented yet? */
348 gcc_assert (libcall);
350 start_sequence ();
351 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
352 1, from, from_mode);
353 insns = get_insns ();
354 end_sequence ();
355 emit_libcall_block (insns, to, value,
356 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
357 from)
358 : gen_rtx_FLOAT_EXTEND (to_mode, from));
359 return;
362 /* Handle pointer conversion. */ /* SPEE 900220. */
363 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
365 convert_optab ctab;
367 if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
368 ctab = trunc_optab;
369 else if (unsignedp)
370 ctab = zext_optab;
371 else
372 ctab = sext_optab;
374 if (convert_optab_handler (ctab, to_mode, from_mode)
375 != CODE_FOR_nothing)
377 emit_unop_insn (convert_optab_handler (ctab, to_mode, from_mode),
378 to, from, UNKNOWN);
379 return;
383 /* Targets are expected to provide conversion insns between PxImode and
384 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
385 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
387 machine_mode full_mode
388 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
390 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
391 != CODE_FOR_nothing);
393 if (full_mode != from_mode)
394 from = convert_to_mode (full_mode, from, unsignedp);
395 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
396 to, from, UNKNOWN);
397 return;
399 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
401 rtx new_from;
402 machine_mode full_mode
403 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
404 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
405 enum insn_code icode;
407 icode = convert_optab_handler (ctab, full_mode, from_mode);
408 gcc_assert (icode != CODE_FOR_nothing);
410 if (to_mode == full_mode)
412 emit_unop_insn (icode, to, from, UNKNOWN);
413 return;
416 new_from = gen_reg_rtx (full_mode);
417 emit_unop_insn (icode, new_from, from, UNKNOWN);
419 /* else proceed to integer conversions below. */
420 from_mode = full_mode;
421 from = new_from;
424 /* Make sure both are fixed-point modes or both are not. */
425 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
426 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
427 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
429 /* If we widen from_mode to to_mode and they are in the same class,
430 we won't saturate the result.
431 Otherwise, always saturate the result to play safe. */
432 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
433 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
434 expand_fixed_convert (to, from, 0, 0);
435 else
436 expand_fixed_convert (to, from, 0, 1);
437 return;
440 /* Now both modes are integers. */
442 /* Handle expanding beyond a word. */
443 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
444 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
446 rtx_insn *insns;
447 rtx lowpart;
448 rtx fill_value;
449 rtx lowfrom;
450 int i;
451 machine_mode lowpart_mode;
452 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
454 /* Try converting directly if the insn is supported. */
455 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
456 != CODE_FOR_nothing)
458 /* If FROM is a SUBREG, put it into a register. Do this
459 so that we always generate the same set of insns for
460 better cse'ing; if an intermediate assignment occurred,
461 we won't be doing the operation directly on the SUBREG. */
462 if (optimize > 0 && GET_CODE (from) == SUBREG)
463 from = force_reg (from_mode, from);
464 emit_unop_insn (code, to, from, equiv_code);
465 return;
467 /* Next, try converting via full word. */
468 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
469 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
470 != CODE_FOR_nothing))
472 rtx word_to = gen_reg_rtx (word_mode);
473 if (REG_P (to))
475 if (reg_overlap_mentioned_p (to, from))
476 from = force_reg (from_mode, from);
477 emit_clobber (to);
479 convert_move (word_to, from, unsignedp);
480 emit_unop_insn (code, to, word_to, equiv_code);
481 return;
484 /* No special multiword conversion insn; do it by hand. */
485 start_sequence ();
487 /* Since we will turn this into a no conflict block, we must ensure the
488 the source does not overlap the target so force it into an isolated
489 register when maybe so. Likewise for any MEM input, since the
490 conversion sequence might require several references to it and we
491 must ensure we're getting the same value every time. */
493 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
494 from = force_reg (from_mode, from);
496 /* Get a copy of FROM widened to a word, if necessary. */
497 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
498 lowpart_mode = word_mode;
499 else
500 lowpart_mode = from_mode;
502 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
504 lowpart = gen_lowpart (lowpart_mode, to);
505 emit_move_insn (lowpart, lowfrom);
507 /* Compute the value to put in each remaining word. */
508 if (unsignedp)
509 fill_value = const0_rtx;
510 else
511 fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
512 LT, lowfrom, const0_rtx,
513 lowpart_mode, 0, -1);
515 /* Fill the remaining words. */
516 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
518 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
519 rtx subword = operand_subword (to, index, 1, to_mode);
521 gcc_assert (subword);
523 if (fill_value != subword)
524 emit_move_insn (subword, fill_value);
527 insns = get_insns ();
528 end_sequence ();
530 emit_insn (insns);
531 return;
534 /* Truncating multi-word to a word or less. */
535 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
536 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
538 if (!((MEM_P (from)
539 && ! MEM_VOLATILE_P (from)
540 && direct_load[(int) to_mode]
541 && ! mode_dependent_address_p (XEXP (from, 0),
542 MEM_ADDR_SPACE (from)))
543 || REG_P (from)
544 || GET_CODE (from) == SUBREG))
545 from = force_reg (from_mode, from);
546 convert_move (to, gen_lowpart (word_mode, from), 0);
547 return;
550 /* Now follow all the conversions between integers
551 no more than a word long. */
553 /* For truncation, usually we can just refer to FROM in a narrower mode. */
554 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
555 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
557 if (!((MEM_P (from)
558 && ! MEM_VOLATILE_P (from)
559 && direct_load[(int) to_mode]
560 && ! mode_dependent_address_p (XEXP (from, 0),
561 MEM_ADDR_SPACE (from)))
562 || REG_P (from)
563 || GET_CODE (from) == SUBREG))
564 from = force_reg (from_mode, from);
565 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
566 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
567 from = copy_to_reg (from);
568 emit_move_insn (to, gen_lowpart (to_mode, from));
569 return;
572 /* Handle extension. */
573 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
575 /* Convert directly if that works. */
576 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
577 != CODE_FOR_nothing)
579 emit_unop_insn (code, to, from, equiv_code);
580 return;
582 else
584 machine_mode intermediate;
585 rtx tmp;
586 int shift_amount;
588 /* Search for a mode to convert via. */
589 for (intermediate = from_mode; intermediate != VOIDmode;
590 intermediate = GET_MODE_WIDER_MODE (intermediate))
591 if (((can_extend_p (to_mode, intermediate, unsignedp)
592 != CODE_FOR_nothing)
593 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
594 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
595 && (can_extend_p (intermediate, from_mode, unsignedp)
596 != CODE_FOR_nothing))
598 convert_move (to, convert_to_mode (intermediate, from,
599 unsignedp), unsignedp);
600 return;
603 /* No suitable intermediate mode.
604 Generate what we need with shifts. */
605 shift_amount = (GET_MODE_PRECISION (to_mode)
606 - GET_MODE_PRECISION (from_mode));
607 from = gen_lowpart (to_mode, force_reg (from_mode, from));
608 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
609 to, unsignedp);
610 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
611 to, unsignedp);
612 if (tmp != to)
613 emit_move_insn (to, tmp);
614 return;
618 /* Support special truncate insns for certain modes. */
619 if (convert_optab_handler (trunc_optab, to_mode,
620 from_mode) != CODE_FOR_nothing)
622 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
623 to, from, UNKNOWN);
624 return;
627 /* Handle truncation of volatile memrefs, and so on;
628 the things that couldn't be truncated directly,
629 and for which there was no special instruction.
631 ??? Code above formerly short-circuited this, for most integer
632 mode pairs, with a force_reg in from_mode followed by a recursive
633 call to this routine. Appears always to have been wrong. */
634 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
636 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
637 emit_move_insn (to, temp);
638 return;
641 /* Mode combination is not recognized. */
642 gcc_unreachable ();
645 /* Return an rtx for a value that would result
646 from converting X to mode MODE.
647 Both X and MODE may be floating, or both integer.
648 UNSIGNEDP is nonzero if X is an unsigned value.
649 This can be done by referring to a part of X in place
650 or by copying to a new temporary with conversion. */
653 convert_to_mode (machine_mode mode, rtx x, int unsignedp)
655 return convert_modes (mode, VOIDmode, x, unsignedp);
658 /* Return an rtx for a value that would result
659 from converting X from mode OLDMODE to mode MODE.
660 Both modes may be floating, or both integer.
661 UNSIGNEDP is nonzero if X is an unsigned value.
663 This can be done by referring to a part of X in place
664 or by copying to a new temporary with conversion.
666 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
669 convert_modes (machine_mode mode, machine_mode oldmode, rtx x, int unsignedp)
671 rtx temp;
673 /* If FROM is a SUBREG that indicates that we have already done at least
674 the required extension, strip it. */
676 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
677 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
678 && SUBREG_CHECK_PROMOTED_SIGN (x, unsignedp))
679 x = gen_lowpart (mode, SUBREG_REG (x));
681 if (GET_MODE (x) != VOIDmode)
682 oldmode = GET_MODE (x);
684 if (mode == oldmode)
685 return x;
687 if (CONST_SCALAR_INT_P (x) && GET_MODE_CLASS (mode) == MODE_INT)
689 /* If the caller did not tell us the old mode, then there is not
690 much to do with respect to canonicalization. We have to
691 assume that all the bits are significant. */
692 if (GET_MODE_CLASS (oldmode) != MODE_INT)
693 oldmode = MAX_MODE_INT;
694 wide_int w = wide_int::from (std::make_pair (x, oldmode),
695 GET_MODE_PRECISION (mode),
696 unsignedp ? UNSIGNED : SIGNED);
697 return immed_wide_int_const (w, mode);
700 /* We can do this with a gen_lowpart if both desired and current modes
701 are integer, and this is either a constant integer, a register, or a
702 non-volatile MEM. */
703 if (GET_MODE_CLASS (mode) == MODE_INT
704 && GET_MODE_CLASS (oldmode) == MODE_INT
705 && GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
706 && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) mode])
707 || (REG_P (x)
708 && (!HARD_REGISTER_P (x)
709 || HARD_REGNO_MODE_OK (REGNO (x), mode))
710 && TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (x)))))
712 return gen_lowpart (mode, x);
714 /* Converting from integer constant into mode is always equivalent to an
715 subreg operation. */
716 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
718 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
719 return simplify_gen_subreg (mode, x, oldmode, 0);
722 temp = gen_reg_rtx (mode);
723 convert_move (temp, x, unsignedp);
724 return temp;
727 /* Return the largest alignment we can use for doing a move (or store)
728 of MAX_PIECES. ALIGN is the largest alignment we could use. */
730 static unsigned int
731 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
733 machine_mode tmode;
735 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
736 if (align >= GET_MODE_ALIGNMENT (tmode))
737 align = GET_MODE_ALIGNMENT (tmode);
738 else
740 machine_mode tmode, xmode;
742 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
743 tmode != VOIDmode;
744 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
745 if (GET_MODE_SIZE (tmode) > max_pieces
746 || SLOW_UNALIGNED_ACCESS (tmode, align))
747 break;
749 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
752 return align;
755 /* Return the widest integer mode no wider than SIZE. If no such mode
756 can be found, return VOIDmode. */
758 static machine_mode
759 widest_int_mode_for_size (unsigned int size)
761 machine_mode tmode, mode = VOIDmode;
763 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
764 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
765 if (GET_MODE_SIZE (tmode) < size)
766 mode = tmode;
768 return mode;
771 /* Determine whether the LEN bytes can be moved by using several move
772 instructions. Return nonzero if a call to move_by_pieces should
773 succeed. */
776 can_move_by_pieces (unsigned HOST_WIDE_INT len,
777 unsigned int align)
779 return targetm.use_by_pieces_infrastructure_p (len, align, MOVE_BY_PIECES,
780 optimize_insn_for_speed_p ());
783 /* Generate several move instructions to copy LEN bytes from block FROM to
784 block TO. (These are MEM rtx's with BLKmode).
786 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
787 used to push FROM to the stack.
789 ALIGN is maximum stack alignment we can assume.
791 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
792 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
793 stpcpy. */
796 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
797 unsigned int align, int endp)
799 struct move_by_pieces_d data;
800 machine_mode to_addr_mode;
801 machine_mode from_addr_mode = get_address_mode (from);
802 rtx to_addr, from_addr = XEXP (from, 0);
803 unsigned int max_size = MOVE_MAX_PIECES + 1;
804 enum insn_code icode;
806 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
808 data.offset = 0;
809 data.from_addr = from_addr;
810 if (to)
812 to_addr_mode = get_address_mode (to);
813 to_addr = XEXP (to, 0);
814 data.to = to;
815 data.autinc_to
816 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
817 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
818 data.reverse
819 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
821 else
823 to_addr_mode = VOIDmode;
824 to_addr = NULL_RTX;
825 data.to = NULL_RTX;
826 data.autinc_to = 1;
827 if (STACK_GROWS_DOWNWARD)
828 data.reverse = 1;
829 else
830 data.reverse = 0;
832 data.to_addr = to_addr;
833 data.from = from;
834 data.autinc_from
835 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
836 || GET_CODE (from_addr) == POST_INC
837 || GET_CODE (from_addr) == POST_DEC);
839 data.explicit_inc_from = 0;
840 data.explicit_inc_to = 0;
841 if (data.reverse) data.offset = len;
842 data.len = len;
844 /* If copying requires more than two move insns,
845 copy addresses to registers (to make displacements shorter)
846 and use post-increment if available. */
847 if (!(data.autinc_from && data.autinc_to)
848 && move_by_pieces_ninsns (len, align, max_size) > 2)
850 /* Find the mode of the largest move...
851 MODE might not be used depending on the definitions of the
852 USE_* macros below. */
853 machine_mode mode ATTRIBUTE_UNUSED
854 = widest_int_mode_for_size (max_size);
856 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
858 data.from_addr = copy_to_mode_reg (from_addr_mode,
859 plus_constant (from_addr_mode,
860 from_addr, len));
861 data.autinc_from = 1;
862 data.explicit_inc_from = -1;
864 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
866 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
867 data.autinc_from = 1;
868 data.explicit_inc_from = 1;
870 if (!data.autinc_from && CONSTANT_P (from_addr))
871 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
872 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
874 data.to_addr = copy_to_mode_reg (to_addr_mode,
875 plus_constant (to_addr_mode,
876 to_addr, len));
877 data.autinc_to = 1;
878 data.explicit_inc_to = -1;
880 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
882 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
883 data.autinc_to = 1;
884 data.explicit_inc_to = 1;
886 if (!data.autinc_to && CONSTANT_P (to_addr))
887 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
890 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
892 /* First move what we can in the largest integer mode, then go to
893 successively smaller modes. */
895 while (max_size > 1 && data.len > 0)
897 machine_mode mode = widest_int_mode_for_size (max_size);
899 if (mode == VOIDmode)
900 break;
902 icode = optab_handler (mov_optab, mode);
903 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
904 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
906 max_size = GET_MODE_SIZE (mode);
909 /* The code above should have handled everything. */
910 gcc_assert (!data.len);
912 if (endp)
914 rtx to1;
916 gcc_assert (!data.reverse);
917 if (data.autinc_to)
919 if (endp == 2)
921 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
922 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
923 else
924 data.to_addr = copy_to_mode_reg (to_addr_mode,
925 plus_constant (to_addr_mode,
926 data.to_addr,
927 -1));
929 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
930 data.offset);
932 else
934 if (endp == 2)
935 --data.offset;
936 to1 = adjust_address (data.to, QImode, data.offset);
938 return to1;
940 else
941 return data.to;
944 /* Return number of insns required to move L bytes by pieces.
945 ALIGN (in bits) is maximum alignment we can assume. */
947 unsigned HOST_WIDE_INT
948 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
949 unsigned int max_size)
951 unsigned HOST_WIDE_INT n_insns = 0;
953 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
955 while (max_size > 1 && l > 0)
957 machine_mode mode;
958 enum insn_code icode;
960 mode = widest_int_mode_for_size (max_size);
962 if (mode == VOIDmode)
963 break;
965 icode = optab_handler (mov_optab, mode);
966 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
967 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
969 max_size = GET_MODE_SIZE (mode);
972 gcc_assert (!l);
973 return n_insns;
976 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
977 with move instructions for mode MODE. GENFUN is the gen_... function
978 to make a move insn for that mode. DATA has all the other info. */
980 static void
981 move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
982 struct move_by_pieces_d *data)
984 unsigned int size = GET_MODE_SIZE (mode);
985 rtx to1 = NULL_RTX, from1;
987 while (data->len >= size)
989 if (data->reverse)
990 data->offset -= size;
992 if (data->to)
994 if (data->autinc_to)
995 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
996 data->offset);
997 else
998 to1 = adjust_address (data->to, mode, data->offset);
1001 if (data->autinc_from)
1002 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1003 data->offset);
1004 else
1005 from1 = adjust_address (data->from, mode, data->offset);
1007 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1008 emit_insn (gen_add2_insn (data->to_addr,
1009 gen_int_mode (-(HOST_WIDE_INT) size,
1010 GET_MODE (data->to_addr))));
1011 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1012 emit_insn (gen_add2_insn (data->from_addr,
1013 gen_int_mode (-(HOST_WIDE_INT) size,
1014 GET_MODE (data->from_addr))));
1016 if (data->to)
1017 emit_insn ((*genfun) (to1, from1));
1018 else
1020 #ifdef PUSH_ROUNDING
1021 emit_single_push_insn (mode, from1, NULL);
1022 #else
1023 gcc_unreachable ();
1024 #endif
1027 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1028 emit_insn (gen_add2_insn (data->to_addr,
1029 gen_int_mode (size,
1030 GET_MODE (data->to_addr))));
1031 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1032 emit_insn (gen_add2_insn (data->from_addr,
1033 gen_int_mode (size,
1034 GET_MODE (data->from_addr))));
1036 if (! data->reverse)
1037 data->offset += size;
1039 data->len -= size;
1043 /* Emit code to move a block Y to a block X. This may be done with
1044 string-move instructions, with multiple scalar move instructions,
1045 or with a library call.
1047 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1048 SIZE is an rtx that says how long they are.
1049 ALIGN is the maximum alignment we can assume they have.
1050 METHOD describes what kind of copy this is, and what mechanisms may be used.
1051 MIN_SIZE is the minimal size of block to move
1052 MAX_SIZE is the maximal size of block to move, if it can not be represented
1053 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1055 Return the address of the new block, if memcpy is called and returns it,
1056 0 otherwise. */
1059 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1060 unsigned int expected_align, HOST_WIDE_INT expected_size,
1061 unsigned HOST_WIDE_INT min_size,
1062 unsigned HOST_WIDE_INT max_size,
1063 unsigned HOST_WIDE_INT probable_max_size)
1065 bool may_use_call;
1066 rtx retval = 0;
1067 unsigned int align;
1069 gcc_assert (size);
1070 if (CONST_INT_P (size)
1071 && INTVAL (size) == 0)
1072 return 0;
1074 switch (method)
1076 case BLOCK_OP_NORMAL:
1077 case BLOCK_OP_TAILCALL:
1078 may_use_call = true;
1079 break;
1081 case BLOCK_OP_CALL_PARM:
1082 may_use_call = block_move_libcall_safe_for_call_parm ();
1084 /* Make inhibit_defer_pop nonzero around the library call
1085 to force it to pop the arguments right away. */
1086 NO_DEFER_POP;
1087 break;
1089 case BLOCK_OP_NO_LIBCALL:
1090 may_use_call = false;
1091 break;
1093 default:
1094 gcc_unreachable ();
1097 gcc_assert (MEM_P (x) && MEM_P (y));
1098 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1099 gcc_assert (align >= BITS_PER_UNIT);
1101 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1102 block copy is more efficient for other large modes, e.g. DCmode. */
1103 x = adjust_address (x, BLKmode, 0);
1104 y = adjust_address (y, BLKmode, 0);
1106 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1107 can be incorrect is coming from __builtin_memcpy. */
1108 if (CONST_INT_P (size))
1110 x = shallow_copy_rtx (x);
1111 y = shallow_copy_rtx (y);
1112 set_mem_size (x, INTVAL (size));
1113 set_mem_size (y, INTVAL (size));
1116 if (CONST_INT_P (size) && can_move_by_pieces (INTVAL (size), align))
1117 move_by_pieces (x, y, INTVAL (size), align, 0);
1118 else if (emit_block_move_via_movmem (x, y, size, align,
1119 expected_align, expected_size,
1120 min_size, max_size, probable_max_size))
1122 else if (may_use_call
1123 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1124 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1126 /* Since x and y are passed to a libcall, mark the corresponding
1127 tree EXPR as addressable. */
1128 tree y_expr = MEM_EXPR (y);
1129 tree x_expr = MEM_EXPR (x);
1130 if (y_expr)
1131 mark_addressable (y_expr);
1132 if (x_expr)
1133 mark_addressable (x_expr);
1134 retval = emit_block_move_via_libcall (x, y, size,
1135 method == BLOCK_OP_TAILCALL);
1138 else
1139 emit_block_move_via_loop (x, y, size, align);
1141 if (method == BLOCK_OP_CALL_PARM)
1142 OK_DEFER_POP;
1144 return retval;
1148 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1150 unsigned HOST_WIDE_INT max, min = 0;
1151 if (GET_CODE (size) == CONST_INT)
1152 min = max = UINTVAL (size);
1153 else
1154 max = GET_MODE_MASK (GET_MODE (size));
1155 return emit_block_move_hints (x, y, size, method, 0, -1,
1156 min, max, max);
1159 /* A subroutine of emit_block_move. Returns true if calling the
1160 block move libcall will not clobber any parameters which may have
1161 already been placed on the stack. */
1163 static bool
1164 block_move_libcall_safe_for_call_parm (void)
1166 #if defined (REG_PARM_STACK_SPACE)
1167 tree fn;
1168 #endif
1170 /* If arguments are pushed on the stack, then they're safe. */
1171 if (PUSH_ARGS)
1172 return true;
1174 /* If registers go on the stack anyway, any argument is sure to clobber
1175 an outgoing argument. */
1176 #if defined (REG_PARM_STACK_SPACE)
1177 fn = emit_block_move_libcall_fn (false);
1178 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1179 depend on its argument. */
1180 (void) fn;
1181 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1182 && REG_PARM_STACK_SPACE (fn) != 0)
1183 return false;
1184 #endif
1186 /* If any argument goes in memory, then it might clobber an outgoing
1187 argument. */
1189 CUMULATIVE_ARGS args_so_far_v;
1190 cumulative_args_t args_so_far;
1191 tree fn, arg;
1193 fn = emit_block_move_libcall_fn (false);
1194 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1195 args_so_far = pack_cumulative_args (&args_so_far_v);
1197 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1198 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1200 machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1201 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1202 NULL_TREE, true);
1203 if (!tmp || !REG_P (tmp))
1204 return false;
1205 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1206 return false;
1207 targetm.calls.function_arg_advance (args_so_far, mode,
1208 NULL_TREE, true);
1211 return true;
1214 /* A subroutine of emit_block_move. Expand a movmem pattern;
1215 return true if successful. */
1217 static bool
1218 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1219 unsigned int expected_align, HOST_WIDE_INT expected_size,
1220 unsigned HOST_WIDE_INT min_size,
1221 unsigned HOST_WIDE_INT max_size,
1222 unsigned HOST_WIDE_INT probable_max_size)
1224 int save_volatile_ok = volatile_ok;
1225 machine_mode mode;
1227 if (expected_align < align)
1228 expected_align = align;
1229 if (expected_size != -1)
1231 if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1232 expected_size = probable_max_size;
1233 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1234 expected_size = min_size;
1237 /* Since this is a move insn, we don't care about volatility. */
1238 volatile_ok = 1;
1240 /* Try the most limited insn first, because there's no point
1241 including more than one in the machine description unless
1242 the more limited one has some advantage. */
1244 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1245 mode = GET_MODE_WIDER_MODE (mode))
1247 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1249 if (code != CODE_FOR_nothing
1250 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1251 here because if SIZE is less than the mode mask, as it is
1252 returned by the macro, it will definitely be less than the
1253 actual mode mask. Since SIZE is within the Pmode address
1254 space, we limit MODE to Pmode. */
1255 && ((CONST_INT_P (size)
1256 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1257 <= (GET_MODE_MASK (mode) >> 1)))
1258 || max_size <= (GET_MODE_MASK (mode) >> 1)
1259 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1261 struct expand_operand ops[9];
1262 unsigned int nops;
1264 /* ??? When called via emit_block_move_for_call, it'd be
1265 nice if there were some way to inform the backend, so
1266 that it doesn't fail the expansion because it thinks
1267 emitting the libcall would be more efficient. */
1268 nops = insn_data[(int) code].n_generator_args;
1269 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1271 create_fixed_operand (&ops[0], x);
1272 create_fixed_operand (&ops[1], y);
1273 /* The check above guarantees that this size conversion is valid. */
1274 create_convert_operand_to (&ops[2], size, mode, true);
1275 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1276 if (nops >= 6)
1278 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1279 create_integer_operand (&ops[5], expected_size);
1281 if (nops >= 8)
1283 create_integer_operand (&ops[6], min_size);
1284 /* If we can not represent the maximal size,
1285 make parameter NULL. */
1286 if ((HOST_WIDE_INT) max_size != -1)
1287 create_integer_operand (&ops[7], max_size);
1288 else
1289 create_fixed_operand (&ops[7], NULL);
1291 if (nops == 9)
1293 /* If we can not represent the maximal size,
1294 make parameter NULL. */
1295 if ((HOST_WIDE_INT) probable_max_size != -1)
1296 create_integer_operand (&ops[8], probable_max_size);
1297 else
1298 create_fixed_operand (&ops[8], NULL);
1300 if (maybe_expand_insn (code, nops, ops))
1302 volatile_ok = save_volatile_ok;
1303 return true;
1308 volatile_ok = save_volatile_ok;
1309 return false;
1312 /* A subroutine of emit_block_move. Expand a call to memcpy.
1313 Return the return value from memcpy, 0 otherwise. */
1316 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1318 rtx dst_addr, src_addr;
1319 tree call_expr, fn, src_tree, dst_tree, size_tree;
1320 machine_mode size_mode;
1321 rtx retval;
1323 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1324 pseudos. We can then place those new pseudos into a VAR_DECL and
1325 use them later. */
1327 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1328 src_addr = copy_addr_to_reg (XEXP (src, 0));
1330 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1331 src_addr = convert_memory_address (ptr_mode, src_addr);
1333 dst_tree = make_tree (ptr_type_node, dst_addr);
1334 src_tree = make_tree (ptr_type_node, src_addr);
1336 size_mode = TYPE_MODE (sizetype);
1338 size = convert_to_mode (size_mode, size, 1);
1339 size = copy_to_mode_reg (size_mode, size);
1341 /* It is incorrect to use the libcall calling conventions to call
1342 memcpy in this context. This could be a user call to memcpy and
1343 the user may wish to examine the return value from memcpy. For
1344 targets where libcalls and normal calls have different conventions
1345 for returning pointers, we could end up generating incorrect code. */
1347 size_tree = make_tree (sizetype, size);
1349 fn = emit_block_move_libcall_fn (true);
1350 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1351 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1353 retval = expand_normal (call_expr);
1355 return retval;
1358 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1359 for the function we use for block copies. */
1361 static GTY(()) tree block_move_fn;
1363 void
1364 init_block_move_fn (const char *asmspec)
1366 if (!block_move_fn)
1368 tree args, fn, attrs, attr_args;
1370 fn = get_identifier ("memcpy");
1371 args = build_function_type_list (ptr_type_node, ptr_type_node,
1372 const_ptr_type_node, sizetype,
1373 NULL_TREE);
1375 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1376 DECL_EXTERNAL (fn) = 1;
1377 TREE_PUBLIC (fn) = 1;
1378 DECL_ARTIFICIAL (fn) = 1;
1379 TREE_NOTHROW (fn) = 1;
1380 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1381 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1383 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1384 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1386 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1388 block_move_fn = fn;
1391 if (asmspec)
1392 set_user_assembler_name (block_move_fn, asmspec);
1395 static tree
1396 emit_block_move_libcall_fn (int for_call)
1398 static bool emitted_extern;
1400 if (!block_move_fn)
1401 init_block_move_fn (NULL);
1403 if (for_call && !emitted_extern)
1405 emitted_extern = true;
1406 make_decl_rtl (block_move_fn);
1409 return block_move_fn;
1412 /* A subroutine of emit_block_move. Copy the data via an explicit
1413 loop. This is used only when libcalls are forbidden. */
1414 /* ??? It'd be nice to copy in hunks larger than QImode. */
1416 static void
1417 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1418 unsigned int align ATTRIBUTE_UNUSED)
1420 rtx_code_label *cmp_label, *top_label;
1421 rtx iter, x_addr, y_addr, tmp;
1422 machine_mode x_addr_mode = get_address_mode (x);
1423 machine_mode y_addr_mode = get_address_mode (y);
1424 machine_mode iter_mode;
1426 iter_mode = GET_MODE (size);
1427 if (iter_mode == VOIDmode)
1428 iter_mode = word_mode;
1430 top_label = gen_label_rtx ();
1431 cmp_label = gen_label_rtx ();
1432 iter = gen_reg_rtx (iter_mode);
1434 emit_move_insn (iter, const0_rtx);
1436 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1437 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1438 do_pending_stack_adjust ();
1440 emit_jump (cmp_label);
1441 emit_label (top_label);
1443 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1444 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1446 if (x_addr_mode != y_addr_mode)
1447 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1448 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1450 x = change_address (x, QImode, x_addr);
1451 y = change_address (y, QImode, y_addr);
1453 emit_move_insn (x, y);
1455 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1456 true, OPTAB_LIB_WIDEN);
1457 if (tmp != iter)
1458 emit_move_insn (iter, tmp);
1460 emit_label (cmp_label);
1462 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1463 true, top_label, REG_BR_PROB_BASE * 90 / 100);
1466 /* Copy all or part of a value X into registers starting at REGNO.
1467 The number of registers to be filled is NREGS. */
1469 void
1470 move_block_to_reg (int regno, rtx x, int nregs, machine_mode mode)
1472 if (nregs == 0)
1473 return;
1475 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1476 x = validize_mem (force_const_mem (mode, x));
1478 /* See if the machine can do this with a load multiple insn. */
1479 if (targetm.have_load_multiple ())
1481 rtx_insn *last = get_last_insn ();
1482 rtx first = gen_rtx_REG (word_mode, regno);
1483 if (rtx_insn *pat = targetm.gen_load_multiple (first, x,
1484 GEN_INT (nregs)))
1486 emit_insn (pat);
1487 return;
1489 else
1490 delete_insns_since (last);
1493 for (int i = 0; i < nregs; i++)
1494 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1495 operand_subword_force (x, i, mode));
1498 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1499 The number of registers to be filled is NREGS. */
1501 void
1502 move_block_from_reg (int regno, rtx x, int nregs)
1504 if (nregs == 0)
1505 return;
1507 /* See if the machine can do this with a store multiple insn. */
1508 if (targetm.have_store_multiple ())
1510 rtx_insn *last = get_last_insn ();
1511 rtx first = gen_rtx_REG (word_mode, regno);
1512 if (rtx_insn *pat = targetm.gen_store_multiple (x, first,
1513 GEN_INT (nregs)))
1515 emit_insn (pat);
1516 return;
1518 else
1519 delete_insns_since (last);
1522 for (int i = 0; i < nregs; i++)
1524 rtx tem = operand_subword (x, i, 1, BLKmode);
1526 gcc_assert (tem);
1528 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1532 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1533 ORIG, where ORIG is a non-consecutive group of registers represented by
1534 a PARALLEL. The clone is identical to the original except in that the
1535 original set of registers is replaced by a new set of pseudo registers.
1536 The new set has the same modes as the original set. */
1539 gen_group_rtx (rtx orig)
1541 int i, length;
1542 rtx *tmps;
1544 gcc_assert (GET_CODE (orig) == PARALLEL);
1546 length = XVECLEN (orig, 0);
1547 tmps = XALLOCAVEC (rtx, length);
1549 /* Skip a NULL entry in first slot. */
1550 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1552 if (i)
1553 tmps[0] = 0;
1555 for (; i < length; i++)
1557 machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1558 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1560 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1563 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1566 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1567 except that values are placed in TMPS[i], and must later be moved
1568 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1570 static void
1571 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1573 rtx src;
1574 int start, i;
1575 machine_mode m = GET_MODE (orig_src);
1577 gcc_assert (GET_CODE (dst) == PARALLEL);
1579 if (m != VOIDmode
1580 && !SCALAR_INT_MODE_P (m)
1581 && !MEM_P (orig_src)
1582 && GET_CODE (orig_src) != CONCAT)
1584 machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1585 if (imode == BLKmode)
1586 src = assign_stack_temp (GET_MODE (orig_src), ssize);
1587 else
1588 src = gen_reg_rtx (imode);
1589 if (imode != BLKmode)
1590 src = gen_lowpart (GET_MODE (orig_src), src);
1591 emit_move_insn (src, orig_src);
1592 /* ...and back again. */
1593 if (imode != BLKmode)
1594 src = gen_lowpart (imode, src);
1595 emit_group_load_1 (tmps, dst, src, type, ssize);
1596 return;
1599 /* Check for a NULL entry, used to indicate that the parameter goes
1600 both on the stack and in registers. */
1601 if (XEXP (XVECEXP (dst, 0, 0), 0))
1602 start = 0;
1603 else
1604 start = 1;
1606 /* Process the pieces. */
1607 for (i = start; i < XVECLEN (dst, 0); i++)
1609 machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1610 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1611 unsigned int bytelen = GET_MODE_SIZE (mode);
1612 int shift = 0;
1614 /* Handle trailing fragments that run over the size of the struct. */
1615 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1617 /* Arrange to shift the fragment to where it belongs.
1618 extract_bit_field loads to the lsb of the reg. */
1619 if (
1620 #ifdef BLOCK_REG_PADDING
1621 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1622 == (BYTES_BIG_ENDIAN ? upward : downward)
1623 #else
1624 BYTES_BIG_ENDIAN
1625 #endif
1627 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1628 bytelen = ssize - bytepos;
1629 gcc_assert (bytelen > 0);
1632 /* If we won't be loading directly from memory, protect the real source
1633 from strange tricks we might play; but make sure that the source can
1634 be loaded directly into the destination. */
1635 src = orig_src;
1636 if (!MEM_P (orig_src)
1637 && (!CONSTANT_P (orig_src)
1638 || (GET_MODE (orig_src) != mode
1639 && GET_MODE (orig_src) != VOIDmode)))
1641 if (GET_MODE (orig_src) == VOIDmode)
1642 src = gen_reg_rtx (mode);
1643 else
1644 src = gen_reg_rtx (GET_MODE (orig_src));
1646 emit_move_insn (src, orig_src);
1649 /* Optimize the access just a bit. */
1650 if (MEM_P (src)
1651 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1652 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1653 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1654 && bytelen == GET_MODE_SIZE (mode))
1656 tmps[i] = gen_reg_rtx (mode);
1657 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1659 else if (COMPLEX_MODE_P (mode)
1660 && GET_MODE (src) == mode
1661 && bytelen == GET_MODE_SIZE (mode))
1662 /* Let emit_move_complex do the bulk of the work. */
1663 tmps[i] = src;
1664 else if (GET_CODE (src) == CONCAT)
1666 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1667 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1669 if ((bytepos == 0 && bytelen == slen0)
1670 || (bytepos != 0 && bytepos + bytelen <= slen))
1672 /* The following assumes that the concatenated objects all
1673 have the same size. In this case, a simple calculation
1674 can be used to determine the object and the bit field
1675 to be extracted. */
1676 tmps[i] = XEXP (src, bytepos / slen0);
1677 if (! CONSTANT_P (tmps[i])
1678 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1679 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1680 (bytepos % slen0) * BITS_PER_UNIT,
1681 1, NULL_RTX, mode, mode);
1683 else
1685 rtx mem;
1687 gcc_assert (!bytepos);
1688 mem = assign_stack_temp (GET_MODE (src), slen);
1689 emit_move_insn (mem, src);
1690 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1691 0, 1, NULL_RTX, mode, mode);
1694 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1695 SIMD register, which is currently broken. While we get GCC
1696 to emit proper RTL for these cases, let's dump to memory. */
1697 else if (VECTOR_MODE_P (GET_MODE (dst))
1698 && REG_P (src))
1700 int slen = GET_MODE_SIZE (GET_MODE (src));
1701 rtx mem;
1703 mem = assign_stack_temp (GET_MODE (src), slen);
1704 emit_move_insn (mem, src);
1705 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1707 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1708 && XVECLEN (dst, 0) > 1)
1709 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
1710 else if (CONSTANT_P (src))
1712 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1714 if (len == ssize)
1715 tmps[i] = src;
1716 else
1718 rtx first, second;
1720 /* TODO: const_wide_int can have sizes other than this... */
1721 gcc_assert (2 * len == ssize);
1722 split_double (src, &first, &second);
1723 if (i)
1724 tmps[i] = second;
1725 else
1726 tmps[i] = first;
1729 else if (REG_P (src) && GET_MODE (src) == mode)
1730 tmps[i] = src;
1731 else
1732 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1733 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1734 mode, mode);
1736 if (shift)
1737 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1738 shift, tmps[i], 0);
1742 /* Emit code to move a block SRC of type TYPE to a block DST,
1743 where DST is non-consecutive registers represented by a PARALLEL.
1744 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1745 if not known. */
1747 void
1748 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1750 rtx *tmps;
1751 int i;
1753 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1754 emit_group_load_1 (tmps, dst, src, type, ssize);
1756 /* Copy the extracted pieces into the proper (probable) hard regs. */
1757 for (i = 0; i < XVECLEN (dst, 0); i++)
1759 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1760 if (d == NULL)
1761 continue;
1762 emit_move_insn (d, tmps[i]);
1766 /* Similar, but load SRC into new pseudos in a format that looks like
1767 PARALLEL. This can later be fed to emit_group_move to get things
1768 in the right place. */
1771 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1773 rtvec vec;
1774 int i;
1776 vec = rtvec_alloc (XVECLEN (parallel, 0));
1777 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1779 /* Convert the vector to look just like the original PARALLEL, except
1780 with the computed values. */
1781 for (i = 0; i < XVECLEN (parallel, 0); i++)
1783 rtx e = XVECEXP (parallel, 0, i);
1784 rtx d = XEXP (e, 0);
1786 if (d)
1788 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1789 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1791 RTVEC_ELT (vec, i) = e;
1794 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1797 /* Emit code to move a block SRC to block DST, where SRC and DST are
1798 non-consecutive groups of registers, each represented by a PARALLEL. */
1800 void
1801 emit_group_move (rtx dst, rtx src)
1803 int i;
1805 gcc_assert (GET_CODE (src) == PARALLEL
1806 && GET_CODE (dst) == PARALLEL
1807 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1809 /* Skip first entry if NULL. */
1810 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1811 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1812 XEXP (XVECEXP (src, 0, i), 0));
1815 /* Move a group of registers represented by a PARALLEL into pseudos. */
1818 emit_group_move_into_temps (rtx src)
1820 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1821 int i;
1823 for (i = 0; i < XVECLEN (src, 0); i++)
1825 rtx e = XVECEXP (src, 0, i);
1826 rtx d = XEXP (e, 0);
1828 if (d)
1829 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1830 RTVEC_ELT (vec, i) = e;
1833 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1836 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1837 where SRC is non-consecutive registers represented by a PARALLEL.
1838 SSIZE represents the total size of block ORIG_DST, or -1 if not
1839 known. */
1841 void
1842 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1844 rtx *tmps, dst;
1845 int start, finish, i;
1846 machine_mode m = GET_MODE (orig_dst);
1848 gcc_assert (GET_CODE (src) == PARALLEL);
1850 if (!SCALAR_INT_MODE_P (m)
1851 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1853 machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1854 if (imode == BLKmode)
1855 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1856 else
1857 dst = gen_reg_rtx (imode);
1858 emit_group_store (dst, src, type, ssize);
1859 if (imode != BLKmode)
1860 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1861 emit_move_insn (orig_dst, dst);
1862 return;
1865 /* Check for a NULL entry, used to indicate that the parameter goes
1866 both on the stack and in registers. */
1867 if (XEXP (XVECEXP (src, 0, 0), 0))
1868 start = 0;
1869 else
1870 start = 1;
1871 finish = XVECLEN (src, 0);
1873 tmps = XALLOCAVEC (rtx, finish);
1875 /* Copy the (probable) hard regs into pseudos. */
1876 for (i = start; i < finish; i++)
1878 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1879 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1881 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1882 emit_move_insn (tmps[i], reg);
1884 else
1885 tmps[i] = reg;
1888 /* If we won't be storing directly into memory, protect the real destination
1889 from strange tricks we might play. */
1890 dst = orig_dst;
1891 if (GET_CODE (dst) == PARALLEL)
1893 rtx temp;
1895 /* We can get a PARALLEL dst if there is a conditional expression in
1896 a return statement. In that case, the dst and src are the same,
1897 so no action is necessary. */
1898 if (rtx_equal_p (dst, src))
1899 return;
1901 /* It is unclear if we can ever reach here, but we may as well handle
1902 it. Allocate a temporary, and split this into a store/load to/from
1903 the temporary. */
1904 temp = assign_stack_temp (GET_MODE (dst), ssize);
1905 emit_group_store (temp, src, type, ssize);
1906 emit_group_load (dst, temp, type, ssize);
1907 return;
1909 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1911 machine_mode outer = GET_MODE (dst);
1912 machine_mode inner;
1913 HOST_WIDE_INT bytepos;
1914 bool done = false;
1915 rtx temp;
1917 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1918 dst = gen_reg_rtx (outer);
1920 /* Make life a bit easier for combine. */
1921 /* If the first element of the vector is the low part
1922 of the destination mode, use a paradoxical subreg to
1923 initialize the destination. */
1924 if (start < finish)
1926 inner = GET_MODE (tmps[start]);
1927 bytepos = subreg_lowpart_offset (inner, outer);
1928 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1930 temp = simplify_gen_subreg (outer, tmps[start],
1931 inner, 0);
1932 if (temp)
1934 emit_move_insn (dst, temp);
1935 done = true;
1936 start++;
1941 /* If the first element wasn't the low part, try the last. */
1942 if (!done
1943 && start < finish - 1)
1945 inner = GET_MODE (tmps[finish - 1]);
1946 bytepos = subreg_lowpart_offset (inner, outer);
1947 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1949 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1950 inner, 0);
1951 if (temp)
1953 emit_move_insn (dst, temp);
1954 done = true;
1955 finish--;
1960 /* Otherwise, simply initialize the result to zero. */
1961 if (!done)
1962 emit_move_insn (dst, CONST0_RTX (outer));
1965 /* Process the pieces. */
1966 for (i = start; i < finish; i++)
1968 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1969 machine_mode mode = GET_MODE (tmps[i]);
1970 unsigned int bytelen = GET_MODE_SIZE (mode);
1971 unsigned int adj_bytelen;
1972 rtx dest = dst;
1974 /* Handle trailing fragments that run over the size of the struct. */
1975 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1976 adj_bytelen = ssize - bytepos;
1977 else
1978 adj_bytelen = bytelen;
1980 if (GET_CODE (dst) == CONCAT)
1982 if (bytepos + adj_bytelen
1983 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1984 dest = XEXP (dst, 0);
1985 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1987 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1988 dest = XEXP (dst, 1);
1990 else
1992 machine_mode dest_mode = GET_MODE (dest);
1993 machine_mode tmp_mode = GET_MODE (tmps[i]);
1995 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
1997 if (GET_MODE_ALIGNMENT (dest_mode)
1998 >= GET_MODE_ALIGNMENT (tmp_mode))
2000 dest = assign_stack_temp (dest_mode,
2001 GET_MODE_SIZE (dest_mode));
2002 emit_move_insn (adjust_address (dest,
2003 tmp_mode,
2004 bytepos),
2005 tmps[i]);
2006 dst = dest;
2008 else
2010 dest = assign_stack_temp (tmp_mode,
2011 GET_MODE_SIZE (tmp_mode));
2012 emit_move_insn (dest, tmps[i]);
2013 dst = adjust_address (dest, dest_mode, bytepos);
2015 break;
2019 /* Handle trailing fragments that run over the size of the struct. */
2020 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2022 /* store_bit_field always takes its value from the lsb.
2023 Move the fragment to the lsb if it's not already there. */
2024 if (
2025 #ifdef BLOCK_REG_PADDING
2026 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2027 == (BYTES_BIG_ENDIAN ? upward : downward)
2028 #else
2029 BYTES_BIG_ENDIAN
2030 #endif
2033 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2034 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2035 shift, tmps[i], 0);
2038 /* Make sure not to write past the end of the struct. */
2039 store_bit_field (dest,
2040 adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2041 bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2042 VOIDmode, tmps[i]);
2045 /* Optimize the access just a bit. */
2046 else if (MEM_P (dest)
2047 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2048 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2049 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2050 && bytelen == GET_MODE_SIZE (mode))
2051 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2053 else
2054 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2055 0, 0, mode, tmps[i]);
2058 /* Copy from the pseudo into the (probable) hard reg. */
2059 if (orig_dst != dst)
2060 emit_move_insn (orig_dst, dst);
2063 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2064 of the value stored in X. */
2067 maybe_emit_group_store (rtx x, tree type)
2069 machine_mode mode = TYPE_MODE (type);
2070 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2071 if (GET_CODE (x) == PARALLEL)
2073 rtx result = gen_reg_rtx (mode);
2074 emit_group_store (result, x, type, int_size_in_bytes (type));
2075 return result;
2077 return x;
2080 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2082 This is used on targets that return BLKmode values in registers. */
2084 void
2085 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2087 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2088 rtx src = NULL, dst = NULL;
2089 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2090 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2091 machine_mode mode = GET_MODE (srcreg);
2092 machine_mode tmode = GET_MODE (target);
2093 machine_mode copy_mode;
2095 /* BLKmode registers created in the back-end shouldn't have survived. */
2096 gcc_assert (mode != BLKmode);
2098 /* If the structure doesn't take up a whole number of words, see whether
2099 SRCREG is padded on the left or on the right. If it's on the left,
2100 set PADDING_CORRECTION to the number of bits to skip.
2102 In most ABIs, the structure will be returned at the least end of
2103 the register, which translates to right padding on little-endian
2104 targets and left padding on big-endian targets. The opposite
2105 holds if the structure is returned at the most significant
2106 end of the register. */
2107 if (bytes % UNITS_PER_WORD != 0
2108 && (targetm.calls.return_in_msb (type)
2109 ? !BYTES_BIG_ENDIAN
2110 : BYTES_BIG_ENDIAN))
2111 padding_correction
2112 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2114 /* We can use a single move if we have an exact mode for the size. */
2115 else if (MEM_P (target)
2116 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2117 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2118 && bytes == GET_MODE_SIZE (mode))
2120 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2121 return;
2124 /* And if we additionally have the same mode for a register. */
2125 else if (REG_P (target)
2126 && GET_MODE (target) == mode
2127 && bytes == GET_MODE_SIZE (mode))
2129 emit_move_insn (target, srcreg);
2130 return;
2133 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2134 into a new pseudo which is a full word. */
2135 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2137 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2138 mode = word_mode;
2141 /* Copy the structure BITSIZE bits at a time. If the target lives in
2142 memory, take care of not reading/writing past its end by selecting
2143 a copy mode suited to BITSIZE. This should always be possible given
2144 how it is computed.
2146 If the target lives in register, make sure not to select a copy mode
2147 larger than the mode of the register.
2149 We could probably emit more efficient code for machines which do not use
2150 strict alignment, but it doesn't seem worth the effort at the current
2151 time. */
2153 copy_mode = word_mode;
2154 if (MEM_P (target))
2156 machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2157 if (mem_mode != BLKmode)
2158 copy_mode = mem_mode;
2160 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2161 copy_mode = tmode;
2163 for (bitpos = 0, xbitpos = padding_correction;
2164 bitpos < bytes * BITS_PER_UNIT;
2165 bitpos += bitsize, xbitpos += bitsize)
2167 /* We need a new source operand each time xbitpos is on a
2168 word boundary and when xbitpos == padding_correction
2169 (the first time through). */
2170 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2171 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2173 /* We need a new destination operand each time bitpos is on
2174 a word boundary. */
2175 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2176 dst = target;
2177 else if (bitpos % BITS_PER_WORD == 0)
2178 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2180 /* Use xbitpos for the source extraction (right justified) and
2181 bitpos for the destination store (left justified). */
2182 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2183 extract_bit_field (src, bitsize,
2184 xbitpos % BITS_PER_WORD, 1,
2185 NULL_RTX, copy_mode, copy_mode));
2189 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2190 register if it contains any data, otherwise return null.
2192 This is used on targets that return BLKmode values in registers. */
2195 copy_blkmode_to_reg (machine_mode mode, tree src)
2197 int i, n_regs;
2198 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2199 unsigned int bitsize;
2200 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2201 machine_mode dst_mode;
2203 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2205 x = expand_normal (src);
2207 bytes = int_size_in_bytes (TREE_TYPE (src));
2208 if (bytes == 0)
2209 return NULL_RTX;
2211 /* If the structure doesn't take up a whole number of words, see
2212 whether the register value should be padded on the left or on
2213 the right. Set PADDING_CORRECTION to the number of padding
2214 bits needed on the left side.
2216 In most ABIs, the structure will be returned at the least end of
2217 the register, which translates to right padding on little-endian
2218 targets and left padding on big-endian targets. The opposite
2219 holds if the structure is returned at the most significant
2220 end of the register. */
2221 if (bytes % UNITS_PER_WORD != 0
2222 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2223 ? !BYTES_BIG_ENDIAN
2224 : BYTES_BIG_ENDIAN))
2225 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2226 * BITS_PER_UNIT));
2228 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2229 dst_words = XALLOCAVEC (rtx, n_regs);
2230 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2232 /* Copy the structure BITSIZE bits at a time. */
2233 for (bitpos = 0, xbitpos = padding_correction;
2234 bitpos < bytes * BITS_PER_UNIT;
2235 bitpos += bitsize, xbitpos += bitsize)
2237 /* We need a new destination pseudo each time xbitpos is
2238 on a word boundary and when xbitpos == padding_correction
2239 (the first time through). */
2240 if (xbitpos % BITS_PER_WORD == 0
2241 || xbitpos == padding_correction)
2243 /* Generate an appropriate register. */
2244 dst_word = gen_reg_rtx (word_mode);
2245 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2247 /* Clear the destination before we move anything into it. */
2248 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2251 /* We need a new source operand each time bitpos is on a word
2252 boundary. */
2253 if (bitpos % BITS_PER_WORD == 0)
2254 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2256 /* Use bitpos for the source extraction (left justified) and
2257 xbitpos for the destination store (right justified). */
2258 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2259 0, 0, word_mode,
2260 extract_bit_field (src_word, bitsize,
2261 bitpos % BITS_PER_WORD, 1,
2262 NULL_RTX, word_mode, word_mode));
2265 if (mode == BLKmode)
2267 /* Find the smallest integer mode large enough to hold the
2268 entire structure. */
2269 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2270 mode != VOIDmode;
2271 mode = GET_MODE_WIDER_MODE (mode))
2272 /* Have we found a large enough mode? */
2273 if (GET_MODE_SIZE (mode) >= bytes)
2274 break;
2276 /* A suitable mode should have been found. */
2277 gcc_assert (mode != VOIDmode);
2280 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2281 dst_mode = word_mode;
2282 else
2283 dst_mode = mode;
2284 dst = gen_reg_rtx (dst_mode);
2286 for (i = 0; i < n_regs; i++)
2287 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2289 if (mode != dst_mode)
2290 dst = gen_lowpart (mode, dst);
2292 return dst;
2295 /* Add a USE expression for REG to the (possibly empty) list pointed
2296 to by CALL_FUSAGE. REG must denote a hard register. */
2298 void
2299 use_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2301 gcc_assert (REG_P (reg));
2303 if (!HARD_REGISTER_P (reg))
2304 return;
2306 *call_fusage
2307 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2310 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2311 to by CALL_FUSAGE. REG must denote a hard register. */
2313 void
2314 clobber_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2316 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2318 *call_fusage
2319 = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2322 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2323 starting at REGNO. All of these registers must be hard registers. */
2325 void
2326 use_regs (rtx *call_fusage, int regno, int nregs)
2328 int i;
2330 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2332 for (i = 0; i < nregs; i++)
2333 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2336 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2337 PARALLEL REGS. This is for calls that pass values in multiple
2338 non-contiguous locations. The Irix 6 ABI has examples of this. */
2340 void
2341 use_group_regs (rtx *call_fusage, rtx regs)
2343 int i;
2345 for (i = 0; i < XVECLEN (regs, 0); i++)
2347 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2349 /* A NULL entry means the parameter goes both on the stack and in
2350 registers. This can also be a MEM for targets that pass values
2351 partially on the stack and partially in registers. */
2352 if (reg != 0 && REG_P (reg))
2353 use_reg (call_fusage, reg);
2357 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2358 assigment and the code of the expresion on the RHS is CODE. Return
2359 NULL otherwise. */
2361 static gimple *
2362 get_def_for_expr (tree name, enum tree_code code)
2364 gimple *def_stmt;
2366 if (TREE_CODE (name) != SSA_NAME)
2367 return NULL;
2369 def_stmt = get_gimple_for_ssa_name (name);
2370 if (!def_stmt
2371 || gimple_assign_rhs_code (def_stmt) != code)
2372 return NULL;
2374 return def_stmt;
2377 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2378 assigment and the class of the expresion on the RHS is CLASS. Return
2379 NULL otherwise. */
2381 static gimple *
2382 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2384 gimple *def_stmt;
2386 if (TREE_CODE (name) != SSA_NAME)
2387 return NULL;
2389 def_stmt = get_gimple_for_ssa_name (name);
2390 if (!def_stmt
2391 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2392 return NULL;
2394 return def_stmt;
2398 /* Determine whether the LEN bytes generated by CONSTFUN can be
2399 stored to memory using several move instructions. CONSTFUNDATA is
2400 a pointer which will be passed as argument in every CONSTFUN call.
2401 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2402 a memset operation and false if it's a copy of a constant string.
2403 Return nonzero if a call to store_by_pieces should succeed. */
2406 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2407 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2408 void *constfundata, unsigned int align, bool memsetp)
2410 unsigned HOST_WIDE_INT l;
2411 unsigned int max_size;
2412 HOST_WIDE_INT offset = 0;
2413 machine_mode mode;
2414 enum insn_code icode;
2415 int reverse;
2416 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2417 rtx cst ATTRIBUTE_UNUSED;
2419 if (len == 0)
2420 return 1;
2422 if (!targetm.use_by_pieces_infrastructure_p (len, align,
2423 memsetp
2424 ? SET_BY_PIECES
2425 : STORE_BY_PIECES,
2426 optimize_insn_for_speed_p ()))
2427 return 0;
2429 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2431 /* We would first store what we can in the largest integer mode, then go to
2432 successively smaller modes. */
2434 for (reverse = 0;
2435 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2436 reverse++)
2438 l = len;
2439 max_size = STORE_MAX_PIECES + 1;
2440 while (max_size > 1 && l > 0)
2442 mode = widest_int_mode_for_size (max_size);
2444 if (mode == VOIDmode)
2445 break;
2447 icode = optab_handler (mov_optab, mode);
2448 if (icode != CODE_FOR_nothing
2449 && align >= GET_MODE_ALIGNMENT (mode))
2451 unsigned int size = GET_MODE_SIZE (mode);
2453 while (l >= size)
2455 if (reverse)
2456 offset -= size;
2458 cst = (*constfun) (constfundata, offset, mode);
2459 if (!targetm.legitimate_constant_p (mode, cst))
2460 return 0;
2462 if (!reverse)
2463 offset += size;
2465 l -= size;
2469 max_size = GET_MODE_SIZE (mode);
2472 /* The code above should have handled everything. */
2473 gcc_assert (!l);
2476 return 1;
2479 /* Generate several move instructions to store LEN bytes generated by
2480 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2481 pointer which will be passed as argument in every CONSTFUN call.
2482 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2483 a memset operation and false if it's a copy of a constant string.
2484 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2485 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2486 stpcpy. */
2489 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2490 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2491 void *constfundata, unsigned int align, bool memsetp, int endp)
2493 machine_mode to_addr_mode = get_address_mode (to);
2494 struct store_by_pieces_d data;
2496 if (len == 0)
2498 gcc_assert (endp != 2);
2499 return to;
2502 gcc_assert (targetm.use_by_pieces_infrastructure_p
2503 (len, align,
2504 memsetp
2505 ? SET_BY_PIECES
2506 : STORE_BY_PIECES,
2507 optimize_insn_for_speed_p ()));
2509 data.constfun = constfun;
2510 data.constfundata = constfundata;
2511 data.len = len;
2512 data.to = to;
2513 store_by_pieces_1 (&data, align);
2514 if (endp)
2516 rtx to1;
2518 gcc_assert (!data.reverse);
2519 if (data.autinc_to)
2521 if (endp == 2)
2523 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2524 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2525 else
2526 data.to_addr = copy_to_mode_reg (to_addr_mode,
2527 plus_constant (to_addr_mode,
2528 data.to_addr,
2529 -1));
2531 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2532 data.offset);
2534 else
2536 if (endp == 2)
2537 --data.offset;
2538 to1 = adjust_address (data.to, QImode, data.offset);
2540 return to1;
2542 else
2543 return data.to;
2546 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2547 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2549 static void
2550 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2552 struct store_by_pieces_d data;
2554 if (len == 0)
2555 return;
2557 data.constfun = clear_by_pieces_1;
2558 data.constfundata = NULL;
2559 data.len = len;
2560 data.to = to;
2561 store_by_pieces_1 (&data, align);
2564 /* Callback routine for clear_by_pieces.
2565 Return const0_rtx unconditionally. */
2567 static rtx
2568 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2569 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2570 machine_mode mode ATTRIBUTE_UNUSED)
2572 return const0_rtx;
2575 /* Subroutine of clear_by_pieces and store_by_pieces.
2576 Generate several move instructions to store LEN bytes of block TO. (A MEM
2577 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2579 static void
2580 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2581 unsigned int align ATTRIBUTE_UNUSED)
2583 machine_mode to_addr_mode = get_address_mode (data->to);
2584 rtx to_addr = XEXP (data->to, 0);
2585 unsigned int max_size = STORE_MAX_PIECES + 1;
2586 enum insn_code icode;
2588 data->offset = 0;
2589 data->to_addr = to_addr;
2590 data->autinc_to
2591 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2592 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2594 data->explicit_inc_to = 0;
2595 data->reverse
2596 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2597 if (data->reverse)
2598 data->offset = data->len;
2600 /* If storing requires more than two move insns,
2601 copy addresses to registers (to make displacements shorter)
2602 and use post-increment if available. */
2603 if (!data->autinc_to
2604 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2606 /* Determine the main mode we'll be using.
2607 MODE might not be used depending on the definitions of the
2608 USE_* macros below. */
2609 machine_mode mode ATTRIBUTE_UNUSED
2610 = widest_int_mode_for_size (max_size);
2612 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2614 data->to_addr = copy_to_mode_reg (to_addr_mode,
2615 plus_constant (to_addr_mode,
2616 to_addr,
2617 data->len));
2618 data->autinc_to = 1;
2619 data->explicit_inc_to = -1;
2622 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2623 && ! data->autinc_to)
2625 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2626 data->autinc_to = 1;
2627 data->explicit_inc_to = 1;
2630 if ( !data->autinc_to && CONSTANT_P (to_addr))
2631 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2634 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2636 /* First store what we can in the largest integer mode, then go to
2637 successively smaller modes. */
2639 while (max_size > 1 && data->len > 0)
2641 machine_mode mode = widest_int_mode_for_size (max_size);
2643 if (mode == VOIDmode)
2644 break;
2646 icode = optab_handler (mov_optab, mode);
2647 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2648 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2650 max_size = GET_MODE_SIZE (mode);
2653 /* The code above should have handled everything. */
2654 gcc_assert (!data->len);
2657 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2658 with move instructions for mode MODE. GENFUN is the gen_... function
2659 to make a move insn for that mode. DATA has all the other info. */
2661 static void
2662 store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
2663 struct store_by_pieces_d *data)
2665 unsigned int size = GET_MODE_SIZE (mode);
2666 rtx to1, cst;
2668 while (data->len >= size)
2670 if (data->reverse)
2671 data->offset -= size;
2673 if (data->autinc_to)
2674 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2675 data->offset);
2676 else
2677 to1 = adjust_address (data->to, mode, data->offset);
2679 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2680 emit_insn (gen_add2_insn (data->to_addr,
2681 gen_int_mode (-(HOST_WIDE_INT) size,
2682 GET_MODE (data->to_addr))));
2684 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2685 emit_insn ((*genfun) (to1, cst));
2687 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2688 emit_insn (gen_add2_insn (data->to_addr,
2689 gen_int_mode (size,
2690 GET_MODE (data->to_addr))));
2692 if (! data->reverse)
2693 data->offset += size;
2695 data->len -= size;
2699 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2700 its length in bytes. */
2703 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2704 unsigned int expected_align, HOST_WIDE_INT expected_size,
2705 unsigned HOST_WIDE_INT min_size,
2706 unsigned HOST_WIDE_INT max_size,
2707 unsigned HOST_WIDE_INT probable_max_size)
2709 machine_mode mode = GET_MODE (object);
2710 unsigned int align;
2712 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2714 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2715 just move a zero. Otherwise, do this a piece at a time. */
2716 if (mode != BLKmode
2717 && CONST_INT_P (size)
2718 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2720 rtx zero = CONST0_RTX (mode);
2721 if (zero != NULL)
2723 emit_move_insn (object, zero);
2724 return NULL;
2727 if (COMPLEX_MODE_P (mode))
2729 zero = CONST0_RTX (GET_MODE_INNER (mode));
2730 if (zero != NULL)
2732 write_complex_part (object, zero, 0);
2733 write_complex_part (object, zero, 1);
2734 return NULL;
2739 if (size == const0_rtx)
2740 return NULL;
2742 align = MEM_ALIGN (object);
2744 if (CONST_INT_P (size)
2745 && targetm.use_by_pieces_infrastructure_p (INTVAL (size), align,
2746 CLEAR_BY_PIECES,
2747 optimize_insn_for_speed_p ()))
2748 clear_by_pieces (object, INTVAL (size), align);
2749 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2750 expected_align, expected_size,
2751 min_size, max_size, probable_max_size))
2753 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2754 return set_storage_via_libcall (object, size, const0_rtx,
2755 method == BLOCK_OP_TAILCALL);
2756 else
2757 gcc_unreachable ();
2759 return NULL;
2763 clear_storage (rtx object, rtx size, enum block_op_methods method)
2765 unsigned HOST_WIDE_INT max, min = 0;
2766 if (GET_CODE (size) == CONST_INT)
2767 min = max = UINTVAL (size);
2768 else
2769 max = GET_MODE_MASK (GET_MODE (size));
2770 return clear_storage_hints (object, size, method, 0, -1, min, max, max);
2774 /* A subroutine of clear_storage. Expand a call to memset.
2775 Return the return value of memset, 0 otherwise. */
2778 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2780 tree call_expr, fn, object_tree, size_tree, val_tree;
2781 machine_mode size_mode;
2782 rtx retval;
2784 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2785 place those into new pseudos into a VAR_DECL and use them later. */
2787 object = copy_addr_to_reg (XEXP (object, 0));
2789 size_mode = TYPE_MODE (sizetype);
2790 size = convert_to_mode (size_mode, size, 1);
2791 size = copy_to_mode_reg (size_mode, size);
2793 /* It is incorrect to use the libcall calling conventions to call
2794 memset in this context. This could be a user call to memset and
2795 the user may wish to examine the return value from memset. For
2796 targets where libcalls and normal calls have different conventions
2797 for returning pointers, we could end up generating incorrect code. */
2799 object_tree = make_tree (ptr_type_node, object);
2800 if (!CONST_INT_P (val))
2801 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2802 size_tree = make_tree (sizetype, size);
2803 val_tree = make_tree (integer_type_node, val);
2805 fn = clear_storage_libcall_fn (true);
2806 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2807 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2809 retval = expand_normal (call_expr);
2811 return retval;
2814 /* A subroutine of set_storage_via_libcall. Create the tree node
2815 for the function we use for block clears. */
2817 tree block_clear_fn;
2819 void
2820 init_block_clear_fn (const char *asmspec)
2822 if (!block_clear_fn)
2824 tree fn, args;
2826 fn = get_identifier ("memset");
2827 args = build_function_type_list (ptr_type_node, ptr_type_node,
2828 integer_type_node, sizetype,
2829 NULL_TREE);
2831 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2832 DECL_EXTERNAL (fn) = 1;
2833 TREE_PUBLIC (fn) = 1;
2834 DECL_ARTIFICIAL (fn) = 1;
2835 TREE_NOTHROW (fn) = 1;
2836 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2837 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2839 block_clear_fn = fn;
2842 if (asmspec)
2843 set_user_assembler_name (block_clear_fn, asmspec);
2846 static tree
2847 clear_storage_libcall_fn (int for_call)
2849 static bool emitted_extern;
2851 if (!block_clear_fn)
2852 init_block_clear_fn (NULL);
2854 if (for_call && !emitted_extern)
2856 emitted_extern = true;
2857 make_decl_rtl (block_clear_fn);
2860 return block_clear_fn;
2863 /* Expand a setmem pattern; return true if successful. */
2865 bool
2866 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2867 unsigned int expected_align, HOST_WIDE_INT expected_size,
2868 unsigned HOST_WIDE_INT min_size,
2869 unsigned HOST_WIDE_INT max_size,
2870 unsigned HOST_WIDE_INT probable_max_size)
2872 /* Try the most limited insn first, because there's no point
2873 including more than one in the machine description unless
2874 the more limited one has some advantage. */
2876 machine_mode mode;
2878 if (expected_align < align)
2879 expected_align = align;
2880 if (expected_size != -1)
2882 if ((unsigned HOST_WIDE_INT)expected_size > max_size)
2883 expected_size = max_size;
2884 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
2885 expected_size = min_size;
2888 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2889 mode = GET_MODE_WIDER_MODE (mode))
2891 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2893 if (code != CODE_FOR_nothing
2894 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2895 here because if SIZE is less than the mode mask, as it is
2896 returned by the macro, it will definitely be less than the
2897 actual mode mask. Since SIZE is within the Pmode address
2898 space, we limit MODE to Pmode. */
2899 && ((CONST_INT_P (size)
2900 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2901 <= (GET_MODE_MASK (mode) >> 1)))
2902 || max_size <= (GET_MODE_MASK (mode) >> 1)
2903 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
2905 struct expand_operand ops[9];
2906 unsigned int nops;
2908 nops = insn_data[(int) code].n_generator_args;
2909 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
2911 create_fixed_operand (&ops[0], object);
2912 /* The check above guarantees that this size conversion is valid. */
2913 create_convert_operand_to (&ops[1], size, mode, true);
2914 create_convert_operand_from (&ops[2], val, byte_mode, true);
2915 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2916 if (nops >= 6)
2918 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2919 create_integer_operand (&ops[5], expected_size);
2921 if (nops >= 8)
2923 create_integer_operand (&ops[6], min_size);
2924 /* If we can not represent the maximal size,
2925 make parameter NULL. */
2926 if ((HOST_WIDE_INT) max_size != -1)
2927 create_integer_operand (&ops[7], max_size);
2928 else
2929 create_fixed_operand (&ops[7], NULL);
2931 if (nops == 9)
2933 /* If we can not represent the maximal size,
2934 make parameter NULL. */
2935 if ((HOST_WIDE_INT) probable_max_size != -1)
2936 create_integer_operand (&ops[8], probable_max_size);
2937 else
2938 create_fixed_operand (&ops[8], NULL);
2940 if (maybe_expand_insn (code, nops, ops))
2941 return true;
2945 return false;
2949 /* Write to one of the components of the complex value CPLX. Write VAL to
2950 the real part if IMAG_P is false, and the imaginary part if its true. */
2952 void
2953 write_complex_part (rtx cplx, rtx val, bool imag_p)
2955 machine_mode cmode;
2956 machine_mode imode;
2957 unsigned ibitsize;
2959 if (GET_CODE (cplx) == CONCAT)
2961 emit_move_insn (XEXP (cplx, imag_p), val);
2962 return;
2965 cmode = GET_MODE (cplx);
2966 imode = GET_MODE_INNER (cmode);
2967 ibitsize = GET_MODE_BITSIZE (imode);
2969 /* For MEMs simplify_gen_subreg may generate an invalid new address
2970 because, e.g., the original address is considered mode-dependent
2971 by the target, which restricts simplify_subreg from invoking
2972 adjust_address_nv. Instead of preparing fallback support for an
2973 invalid address, we call adjust_address_nv directly. */
2974 if (MEM_P (cplx))
2976 emit_move_insn (adjust_address_nv (cplx, imode,
2977 imag_p ? GET_MODE_SIZE (imode) : 0),
2978 val);
2979 return;
2982 /* If the sub-object is at least word sized, then we know that subregging
2983 will work. This special case is important, since store_bit_field
2984 wants to operate on integer modes, and there's rarely an OImode to
2985 correspond to TCmode. */
2986 if (ibitsize >= BITS_PER_WORD
2987 /* For hard regs we have exact predicates. Assume we can split
2988 the original object if it spans an even number of hard regs.
2989 This special case is important for SCmode on 64-bit platforms
2990 where the natural size of floating-point regs is 32-bit. */
2991 || (REG_P (cplx)
2992 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2993 && REG_NREGS (cplx) % 2 == 0))
2995 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2996 imag_p ? GET_MODE_SIZE (imode) : 0);
2997 if (part)
2999 emit_move_insn (part, val);
3000 return;
3002 else
3003 /* simplify_gen_subreg may fail for sub-word MEMs. */
3004 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3007 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
3010 /* Extract one of the components of the complex value CPLX. Extract the
3011 real part if IMAG_P is false, and the imaginary part if it's true. */
3014 read_complex_part (rtx cplx, bool imag_p)
3016 machine_mode cmode, imode;
3017 unsigned ibitsize;
3019 if (GET_CODE (cplx) == CONCAT)
3020 return XEXP (cplx, imag_p);
3022 cmode = GET_MODE (cplx);
3023 imode = GET_MODE_INNER (cmode);
3024 ibitsize = GET_MODE_BITSIZE (imode);
3026 /* Special case reads from complex constants that got spilled to memory. */
3027 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3029 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3030 if (decl && TREE_CODE (decl) == COMPLEX_CST)
3032 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3033 if (CONSTANT_CLASS_P (part))
3034 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3038 /* For MEMs simplify_gen_subreg may generate an invalid new address
3039 because, e.g., the original address is considered mode-dependent
3040 by the target, which restricts simplify_subreg from invoking
3041 adjust_address_nv. Instead of preparing fallback support for an
3042 invalid address, we call adjust_address_nv directly. */
3043 if (MEM_P (cplx))
3044 return adjust_address_nv (cplx, imode,
3045 imag_p ? GET_MODE_SIZE (imode) : 0);
3047 /* If the sub-object is at least word sized, then we know that subregging
3048 will work. This special case is important, since extract_bit_field
3049 wants to operate on integer modes, and there's rarely an OImode to
3050 correspond to TCmode. */
3051 if (ibitsize >= BITS_PER_WORD
3052 /* For hard regs we have exact predicates. Assume we can split
3053 the original object if it spans an even number of hard regs.
3054 This special case is important for SCmode on 64-bit platforms
3055 where the natural size of floating-point regs is 32-bit. */
3056 || (REG_P (cplx)
3057 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3058 && REG_NREGS (cplx) % 2 == 0))
3060 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3061 imag_p ? GET_MODE_SIZE (imode) : 0);
3062 if (ret)
3063 return ret;
3064 else
3065 /* simplify_gen_subreg may fail for sub-word MEMs. */
3066 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3069 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3070 true, NULL_RTX, imode, imode);
3073 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3074 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3075 represented in NEW_MODE. If FORCE is true, this will never happen, as
3076 we'll force-create a SUBREG if needed. */
3078 static rtx
3079 emit_move_change_mode (machine_mode new_mode,
3080 machine_mode old_mode, rtx x, bool force)
3082 rtx ret;
3084 if (push_operand (x, GET_MODE (x)))
3086 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3087 MEM_COPY_ATTRIBUTES (ret, x);
3089 else if (MEM_P (x))
3091 /* We don't have to worry about changing the address since the
3092 size in bytes is supposed to be the same. */
3093 if (reload_in_progress)
3095 /* Copy the MEM to change the mode and move any
3096 substitutions from the old MEM to the new one. */
3097 ret = adjust_address_nv (x, new_mode, 0);
3098 copy_replacements (x, ret);
3100 else
3101 ret = adjust_address (x, new_mode, 0);
3103 else
3105 /* Note that we do want simplify_subreg's behavior of validating
3106 that the new mode is ok for a hard register. If we were to use
3107 simplify_gen_subreg, we would create the subreg, but would
3108 probably run into the target not being able to implement it. */
3109 /* Except, of course, when FORCE is true, when this is exactly what
3110 we want. Which is needed for CCmodes on some targets. */
3111 if (force)
3112 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3113 else
3114 ret = simplify_subreg (new_mode, x, old_mode, 0);
3117 return ret;
3120 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3121 an integer mode of the same size as MODE. Returns the instruction
3122 emitted, or NULL if such a move could not be generated. */
3124 static rtx_insn *
3125 emit_move_via_integer (machine_mode mode, rtx x, rtx y, bool force)
3127 machine_mode imode;
3128 enum insn_code code;
3130 /* There must exist a mode of the exact size we require. */
3131 imode = int_mode_for_mode (mode);
3132 if (imode == BLKmode)
3133 return NULL;
3135 /* The target must support moves in this mode. */
3136 code = optab_handler (mov_optab, imode);
3137 if (code == CODE_FOR_nothing)
3138 return NULL;
3140 x = emit_move_change_mode (imode, mode, x, force);
3141 if (x == NULL_RTX)
3142 return NULL;
3143 y = emit_move_change_mode (imode, mode, y, force);
3144 if (y == NULL_RTX)
3145 return NULL;
3146 return emit_insn (GEN_FCN (code) (x, y));
3149 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3150 Return an equivalent MEM that does not use an auto-increment. */
3153 emit_move_resolve_push (machine_mode mode, rtx x)
3155 enum rtx_code code = GET_CODE (XEXP (x, 0));
3156 HOST_WIDE_INT adjust;
3157 rtx temp;
3159 adjust = GET_MODE_SIZE (mode);
3160 #ifdef PUSH_ROUNDING
3161 adjust = PUSH_ROUNDING (adjust);
3162 #endif
3163 if (code == PRE_DEC || code == POST_DEC)
3164 adjust = -adjust;
3165 else if (code == PRE_MODIFY || code == POST_MODIFY)
3167 rtx expr = XEXP (XEXP (x, 0), 1);
3168 HOST_WIDE_INT val;
3170 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3171 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3172 val = INTVAL (XEXP (expr, 1));
3173 if (GET_CODE (expr) == MINUS)
3174 val = -val;
3175 gcc_assert (adjust == val || adjust == -val);
3176 adjust = val;
3179 /* Do not use anti_adjust_stack, since we don't want to update
3180 stack_pointer_delta. */
3181 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3182 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3183 0, OPTAB_LIB_WIDEN);
3184 if (temp != stack_pointer_rtx)
3185 emit_move_insn (stack_pointer_rtx, temp);
3187 switch (code)
3189 case PRE_INC:
3190 case PRE_DEC:
3191 case PRE_MODIFY:
3192 temp = stack_pointer_rtx;
3193 break;
3194 case POST_INC:
3195 case POST_DEC:
3196 case POST_MODIFY:
3197 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3198 break;
3199 default:
3200 gcc_unreachable ();
3203 return replace_equiv_address (x, temp);
3206 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3207 X is known to satisfy push_operand, and MODE is known to be complex.
3208 Returns the last instruction emitted. */
3210 rtx_insn *
3211 emit_move_complex_push (machine_mode mode, rtx x, rtx y)
3213 machine_mode submode = GET_MODE_INNER (mode);
3214 bool imag_first;
3216 #ifdef PUSH_ROUNDING
3217 unsigned int submodesize = GET_MODE_SIZE (submode);
3219 /* In case we output to the stack, but the size is smaller than the
3220 machine can push exactly, we need to use move instructions. */
3221 if (PUSH_ROUNDING (submodesize) != submodesize)
3223 x = emit_move_resolve_push (mode, x);
3224 return emit_move_insn (x, y);
3226 #endif
3228 /* Note that the real part always precedes the imag part in memory
3229 regardless of machine's endianness. */
3230 switch (GET_CODE (XEXP (x, 0)))
3232 case PRE_DEC:
3233 case POST_DEC:
3234 imag_first = true;
3235 break;
3236 case PRE_INC:
3237 case POST_INC:
3238 imag_first = false;
3239 break;
3240 default:
3241 gcc_unreachable ();
3244 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3245 read_complex_part (y, imag_first));
3246 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3247 read_complex_part (y, !imag_first));
3250 /* A subroutine of emit_move_complex. Perform the move from Y to X
3251 via two moves of the parts. Returns the last instruction emitted. */
3253 rtx_insn *
3254 emit_move_complex_parts (rtx x, rtx y)
3256 /* Show the output dies here. This is necessary for SUBREGs
3257 of pseudos since we cannot track their lifetimes correctly;
3258 hard regs shouldn't appear here except as return values. */
3259 if (!reload_completed && !reload_in_progress
3260 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3261 emit_clobber (x);
3263 write_complex_part (x, read_complex_part (y, false), false);
3264 write_complex_part (x, read_complex_part (y, true), true);
3266 return get_last_insn ();
3269 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3270 MODE is known to be complex. Returns the last instruction emitted. */
3272 static rtx_insn *
3273 emit_move_complex (machine_mode mode, rtx x, rtx y)
3275 bool try_int;
3277 /* Need to take special care for pushes, to maintain proper ordering
3278 of the data, and possibly extra padding. */
3279 if (push_operand (x, mode))
3280 return emit_move_complex_push (mode, x, y);
3282 /* See if we can coerce the target into moving both values at once, except
3283 for floating point where we favor moving as parts if this is easy. */
3284 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3285 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3286 && !(REG_P (x)
3287 && HARD_REGISTER_P (x)
3288 && REG_NREGS (x) == 1)
3289 && !(REG_P (y)
3290 && HARD_REGISTER_P (y)
3291 && REG_NREGS (y) == 1))
3292 try_int = false;
3293 /* Not possible if the values are inherently not adjacent. */
3294 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3295 try_int = false;
3296 /* Is possible if both are registers (or subregs of registers). */
3297 else if (register_operand (x, mode) && register_operand (y, mode))
3298 try_int = true;
3299 /* If one of the operands is a memory, and alignment constraints
3300 are friendly enough, we may be able to do combined memory operations.
3301 We do not attempt this if Y is a constant because that combination is
3302 usually better with the by-parts thing below. */
3303 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3304 && (!STRICT_ALIGNMENT
3305 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3306 try_int = true;
3307 else
3308 try_int = false;
3310 if (try_int)
3312 rtx_insn *ret;
3314 /* For memory to memory moves, optimal behavior can be had with the
3315 existing block move logic. */
3316 if (MEM_P (x) && MEM_P (y))
3318 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3319 BLOCK_OP_NO_LIBCALL);
3320 return get_last_insn ();
3323 ret = emit_move_via_integer (mode, x, y, true);
3324 if (ret)
3325 return ret;
3328 return emit_move_complex_parts (x, y);
3331 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3332 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3334 static rtx_insn *
3335 emit_move_ccmode (machine_mode mode, rtx x, rtx y)
3337 rtx_insn *ret;
3339 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3340 if (mode != CCmode)
3342 enum insn_code code = optab_handler (mov_optab, CCmode);
3343 if (code != CODE_FOR_nothing)
3345 x = emit_move_change_mode (CCmode, mode, x, true);
3346 y = emit_move_change_mode (CCmode, mode, y, true);
3347 return emit_insn (GEN_FCN (code) (x, y));
3351 /* Otherwise, find the MODE_INT mode of the same width. */
3352 ret = emit_move_via_integer (mode, x, y, false);
3353 gcc_assert (ret != NULL);
3354 return ret;
3357 /* Return true if word I of OP lies entirely in the
3358 undefined bits of a paradoxical subreg. */
3360 static bool
3361 undefined_operand_subword_p (const_rtx op, int i)
3363 machine_mode innermode, innermostmode;
3364 int offset;
3365 if (GET_CODE (op) != SUBREG)
3366 return false;
3367 innermode = GET_MODE (op);
3368 innermostmode = GET_MODE (SUBREG_REG (op));
3369 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3370 /* The SUBREG_BYTE represents offset, as if the value were stored in
3371 memory, except for a paradoxical subreg where we define
3372 SUBREG_BYTE to be 0; undo this exception as in
3373 simplify_subreg. */
3374 if (SUBREG_BYTE (op) == 0
3375 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3377 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3378 if (WORDS_BIG_ENDIAN)
3379 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3380 if (BYTES_BIG_ENDIAN)
3381 offset += difference % UNITS_PER_WORD;
3383 if (offset >= GET_MODE_SIZE (innermostmode)
3384 || offset <= -GET_MODE_SIZE (word_mode))
3385 return true;
3386 return false;
3389 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3390 MODE is any multi-word or full-word mode that lacks a move_insn
3391 pattern. Note that you will get better code if you define such
3392 patterns, even if they must turn into multiple assembler instructions. */
3394 static rtx_insn *
3395 emit_move_multi_word (machine_mode mode, rtx x, rtx y)
3397 rtx_insn *last_insn = 0;
3398 rtx_insn *seq;
3399 rtx inner;
3400 bool need_clobber;
3401 int i;
3403 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3405 /* If X is a push on the stack, do the push now and replace
3406 X with a reference to the stack pointer. */
3407 if (push_operand (x, mode))
3408 x = emit_move_resolve_push (mode, x);
3410 /* If we are in reload, see if either operand is a MEM whose address
3411 is scheduled for replacement. */
3412 if (reload_in_progress && MEM_P (x)
3413 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3414 x = replace_equiv_address_nv (x, inner);
3415 if (reload_in_progress && MEM_P (y)
3416 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3417 y = replace_equiv_address_nv (y, inner);
3419 start_sequence ();
3421 need_clobber = false;
3422 for (i = 0;
3423 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3424 i++)
3426 rtx xpart = operand_subword (x, i, 1, mode);
3427 rtx ypart;
3429 /* Do not generate code for a move if it would come entirely
3430 from the undefined bits of a paradoxical subreg. */
3431 if (undefined_operand_subword_p (y, i))
3432 continue;
3434 ypart = operand_subword (y, i, 1, mode);
3436 /* If we can't get a part of Y, put Y into memory if it is a
3437 constant. Otherwise, force it into a register. Then we must
3438 be able to get a part of Y. */
3439 if (ypart == 0 && CONSTANT_P (y))
3441 y = use_anchored_address (force_const_mem (mode, y));
3442 ypart = operand_subword (y, i, 1, mode);
3444 else if (ypart == 0)
3445 ypart = operand_subword_force (y, i, mode);
3447 gcc_assert (xpart && ypart);
3449 need_clobber |= (GET_CODE (xpart) == SUBREG);
3451 last_insn = emit_move_insn (xpart, ypart);
3454 seq = get_insns ();
3455 end_sequence ();
3457 /* Show the output dies here. This is necessary for SUBREGs
3458 of pseudos since we cannot track their lifetimes correctly;
3459 hard regs shouldn't appear here except as return values.
3460 We never want to emit such a clobber after reload. */
3461 if (x != y
3462 && ! (reload_in_progress || reload_completed)
3463 && need_clobber != 0)
3464 emit_clobber (x);
3466 emit_insn (seq);
3468 return last_insn;
3471 /* Low level part of emit_move_insn.
3472 Called just like emit_move_insn, but assumes X and Y
3473 are basically valid. */
3475 rtx_insn *
3476 emit_move_insn_1 (rtx x, rtx y)
3478 machine_mode mode = GET_MODE (x);
3479 enum insn_code code;
3481 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3483 code = optab_handler (mov_optab, mode);
3484 if (code != CODE_FOR_nothing)
3485 return emit_insn (GEN_FCN (code) (x, y));
3487 /* Expand complex moves by moving real part and imag part. */
3488 if (COMPLEX_MODE_P (mode))
3489 return emit_move_complex (mode, x, y);
3491 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3492 || ALL_FIXED_POINT_MODE_P (mode))
3494 rtx_insn *result = emit_move_via_integer (mode, x, y, true);
3496 /* If we can't find an integer mode, use multi words. */
3497 if (result)
3498 return result;
3499 else
3500 return emit_move_multi_word (mode, x, y);
3503 if (GET_MODE_CLASS (mode) == MODE_CC)
3504 return emit_move_ccmode (mode, x, y);
3506 /* Try using a move pattern for the corresponding integer mode. This is
3507 only safe when simplify_subreg can convert MODE constants into integer
3508 constants. At present, it can only do this reliably if the value
3509 fits within a HOST_WIDE_INT. */
3510 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3512 rtx_insn *ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3514 if (ret)
3516 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3517 return ret;
3521 return emit_move_multi_word (mode, x, y);
3524 /* Generate code to copy Y into X.
3525 Both Y and X must have the same mode, except that
3526 Y can be a constant with VOIDmode.
3527 This mode cannot be BLKmode; use emit_block_move for that.
3529 Return the last instruction emitted. */
3531 rtx_insn *
3532 emit_move_insn (rtx x, rtx y)
3534 machine_mode mode = GET_MODE (x);
3535 rtx y_cst = NULL_RTX;
3536 rtx_insn *last_insn;
3537 rtx set;
3539 gcc_assert (mode != BLKmode
3540 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3542 if (CONSTANT_P (y))
3544 if (optimize
3545 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3546 && (last_insn = compress_float_constant (x, y)))
3547 return last_insn;
3549 y_cst = y;
3551 if (!targetm.legitimate_constant_p (mode, y))
3553 y = force_const_mem (mode, y);
3555 /* If the target's cannot_force_const_mem prevented the spill,
3556 assume that the target's move expanders will also take care
3557 of the non-legitimate constant. */
3558 if (!y)
3559 y = y_cst;
3560 else
3561 y = use_anchored_address (y);
3565 /* If X or Y are memory references, verify that their addresses are valid
3566 for the machine. */
3567 if (MEM_P (x)
3568 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3569 MEM_ADDR_SPACE (x))
3570 && ! push_operand (x, GET_MODE (x))))
3571 x = validize_mem (x);
3573 if (MEM_P (y)
3574 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3575 MEM_ADDR_SPACE (y)))
3576 y = validize_mem (y);
3578 gcc_assert (mode != BLKmode);
3580 last_insn = emit_move_insn_1 (x, y);
3582 if (y_cst && REG_P (x)
3583 && (set = single_set (last_insn)) != NULL_RTX
3584 && SET_DEST (set) == x
3585 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3586 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3588 return last_insn;
3591 /* Generate the body of an instruction to copy Y into X.
3592 It may be a list of insns, if one insn isn't enough. */
3594 rtx_insn *
3595 gen_move_insn (rtx x, rtx y)
3597 rtx_insn *seq;
3599 start_sequence ();
3600 emit_move_insn_1 (x, y);
3601 seq = get_insns ();
3602 end_sequence ();
3603 return seq;
3606 /* If Y is representable exactly in a narrower mode, and the target can
3607 perform the extension directly from constant or memory, then emit the
3608 move as an extension. */
3610 static rtx_insn *
3611 compress_float_constant (rtx x, rtx y)
3613 machine_mode dstmode = GET_MODE (x);
3614 machine_mode orig_srcmode = GET_MODE (y);
3615 machine_mode srcmode;
3616 const REAL_VALUE_TYPE *r;
3617 int oldcost, newcost;
3618 bool speed = optimize_insn_for_speed_p ();
3620 r = CONST_DOUBLE_REAL_VALUE (y);
3622 if (targetm.legitimate_constant_p (dstmode, y))
3623 oldcost = set_src_cost (y, orig_srcmode, speed);
3624 else
3625 oldcost = set_src_cost (force_const_mem (dstmode, y), dstmode, speed);
3627 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3628 srcmode != orig_srcmode;
3629 srcmode = GET_MODE_WIDER_MODE (srcmode))
3631 enum insn_code ic;
3632 rtx trunc_y;
3633 rtx_insn *last_insn;
3635 /* Skip if the target can't extend this way. */
3636 ic = can_extend_p (dstmode, srcmode, 0);
3637 if (ic == CODE_FOR_nothing)
3638 continue;
3640 /* Skip if the narrowed value isn't exact. */
3641 if (! exact_real_truncate (srcmode, r))
3642 continue;
3644 trunc_y = const_double_from_real_value (*r, srcmode);
3646 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3648 /* Skip if the target needs extra instructions to perform
3649 the extension. */
3650 if (!insn_operand_matches (ic, 1, trunc_y))
3651 continue;
3652 /* This is valid, but may not be cheaper than the original. */
3653 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3654 dstmode, speed);
3655 if (oldcost < newcost)
3656 continue;
3658 else if (float_extend_from_mem[dstmode][srcmode])
3660 trunc_y = force_const_mem (srcmode, trunc_y);
3661 /* This is valid, but may not be cheaper than the original. */
3662 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3663 dstmode, speed);
3664 if (oldcost < newcost)
3665 continue;
3666 trunc_y = validize_mem (trunc_y);
3668 else
3669 continue;
3671 /* For CSE's benefit, force the compressed constant pool entry
3672 into a new pseudo. This constant may be used in different modes,
3673 and if not, combine will put things back together for us. */
3674 trunc_y = force_reg (srcmode, trunc_y);
3676 /* If x is a hard register, perform the extension into a pseudo,
3677 so that e.g. stack realignment code is aware of it. */
3678 rtx target = x;
3679 if (REG_P (x) && HARD_REGISTER_P (x))
3680 target = gen_reg_rtx (dstmode);
3682 emit_unop_insn (ic, target, trunc_y, UNKNOWN);
3683 last_insn = get_last_insn ();
3685 if (REG_P (target))
3686 set_unique_reg_note (last_insn, REG_EQUAL, y);
3688 if (target != x)
3689 return emit_move_insn (x, target);
3690 return last_insn;
3693 return NULL;
3696 /* Pushing data onto the stack. */
3698 /* Push a block of length SIZE (perhaps variable)
3699 and return an rtx to address the beginning of the block.
3700 The value may be virtual_outgoing_args_rtx.
3702 EXTRA is the number of bytes of padding to push in addition to SIZE.
3703 BELOW nonzero means this padding comes at low addresses;
3704 otherwise, the padding comes at high addresses. */
3707 push_block (rtx size, int extra, int below)
3709 rtx temp;
3711 size = convert_modes (Pmode, ptr_mode, size, 1);
3712 if (CONSTANT_P (size))
3713 anti_adjust_stack (plus_constant (Pmode, size, extra));
3714 else if (REG_P (size) && extra == 0)
3715 anti_adjust_stack (size);
3716 else
3718 temp = copy_to_mode_reg (Pmode, size);
3719 if (extra != 0)
3720 temp = expand_binop (Pmode, add_optab, temp,
3721 gen_int_mode (extra, Pmode),
3722 temp, 0, OPTAB_LIB_WIDEN);
3723 anti_adjust_stack (temp);
3726 if (STACK_GROWS_DOWNWARD)
3728 temp = virtual_outgoing_args_rtx;
3729 if (extra != 0 && below)
3730 temp = plus_constant (Pmode, temp, extra);
3732 else
3734 if (CONST_INT_P (size))
3735 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3736 -INTVAL (size) - (below ? 0 : extra));
3737 else if (extra != 0 && !below)
3738 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3739 negate_rtx (Pmode, plus_constant (Pmode, size,
3740 extra)));
3741 else
3742 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3743 negate_rtx (Pmode, size));
3746 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3749 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3751 static rtx
3752 mem_autoinc_base (rtx mem)
3754 if (MEM_P (mem))
3756 rtx addr = XEXP (mem, 0);
3757 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3758 return XEXP (addr, 0);
3760 return NULL;
3763 /* A utility routine used here, in reload, and in try_split. The insns
3764 after PREV up to and including LAST are known to adjust the stack,
3765 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3766 placing notes as appropriate. PREV may be NULL, indicating the
3767 entire insn sequence prior to LAST should be scanned.
3769 The set of allowed stack pointer modifications is small:
3770 (1) One or more auto-inc style memory references (aka pushes),
3771 (2) One or more addition/subtraction with the SP as destination,
3772 (3) A single move insn with the SP as destination,
3773 (4) A call_pop insn,
3774 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3776 Insns in the sequence that do not modify the SP are ignored,
3777 except for noreturn calls.
3779 The return value is the amount of adjustment that can be trivially
3780 verified, via immediate operand or auto-inc. If the adjustment
3781 cannot be trivially extracted, the return value is INT_MIN. */
3783 HOST_WIDE_INT
3784 find_args_size_adjust (rtx_insn *insn)
3786 rtx dest, set, pat;
3787 int i;
3789 pat = PATTERN (insn);
3790 set = NULL;
3792 /* Look for a call_pop pattern. */
3793 if (CALL_P (insn))
3795 /* We have to allow non-call_pop patterns for the case
3796 of emit_single_push_insn of a TLS address. */
3797 if (GET_CODE (pat) != PARALLEL)
3798 return 0;
3800 /* All call_pop have a stack pointer adjust in the parallel.
3801 The call itself is always first, and the stack adjust is
3802 usually last, so search from the end. */
3803 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3805 set = XVECEXP (pat, 0, i);
3806 if (GET_CODE (set) != SET)
3807 continue;
3808 dest = SET_DEST (set);
3809 if (dest == stack_pointer_rtx)
3810 break;
3812 /* We'd better have found the stack pointer adjust. */
3813 if (i == 0)
3814 return 0;
3815 /* Fall through to process the extracted SET and DEST
3816 as if it was a standalone insn. */
3818 else if (GET_CODE (pat) == SET)
3819 set = pat;
3820 else if ((set = single_set (insn)) != NULL)
3822 else if (GET_CODE (pat) == PARALLEL)
3824 /* ??? Some older ports use a parallel with a stack adjust
3825 and a store for a PUSH_ROUNDING pattern, rather than a
3826 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3827 /* ??? See h8300 and m68k, pushqi1. */
3828 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3830 set = XVECEXP (pat, 0, i);
3831 if (GET_CODE (set) != SET)
3832 continue;
3833 dest = SET_DEST (set);
3834 if (dest == stack_pointer_rtx)
3835 break;
3837 /* We do not expect an auto-inc of the sp in the parallel. */
3838 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3839 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3840 != stack_pointer_rtx);
3842 if (i < 0)
3843 return 0;
3845 else
3846 return 0;
3848 dest = SET_DEST (set);
3850 /* Look for direct modifications of the stack pointer. */
3851 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3853 /* Look for a trivial adjustment, otherwise assume nothing. */
3854 /* Note that the SPU restore_stack_block pattern refers to
3855 the stack pointer in V4SImode. Consider that non-trivial. */
3856 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3857 && GET_CODE (SET_SRC (set)) == PLUS
3858 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3859 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3860 return INTVAL (XEXP (SET_SRC (set), 1));
3861 /* ??? Reload can generate no-op moves, which will be cleaned
3862 up later. Recognize it and continue searching. */
3863 else if (rtx_equal_p (dest, SET_SRC (set)))
3864 return 0;
3865 else
3866 return HOST_WIDE_INT_MIN;
3868 else
3870 rtx mem, addr;
3872 /* Otherwise only think about autoinc patterns. */
3873 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3875 mem = dest;
3876 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3877 != stack_pointer_rtx);
3879 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3880 mem = SET_SRC (set);
3881 else
3882 return 0;
3884 addr = XEXP (mem, 0);
3885 switch (GET_CODE (addr))
3887 case PRE_INC:
3888 case POST_INC:
3889 return GET_MODE_SIZE (GET_MODE (mem));
3890 case PRE_DEC:
3891 case POST_DEC:
3892 return -GET_MODE_SIZE (GET_MODE (mem));
3893 case PRE_MODIFY:
3894 case POST_MODIFY:
3895 addr = XEXP (addr, 1);
3896 gcc_assert (GET_CODE (addr) == PLUS);
3897 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3898 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3899 return INTVAL (XEXP (addr, 1));
3900 default:
3901 gcc_unreachable ();
3907 fixup_args_size_notes (rtx_insn *prev, rtx_insn *last, int end_args_size)
3909 int args_size = end_args_size;
3910 bool saw_unknown = false;
3911 rtx_insn *insn;
3913 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3915 HOST_WIDE_INT this_delta;
3917 if (!NONDEBUG_INSN_P (insn))
3918 continue;
3920 this_delta = find_args_size_adjust (insn);
3921 if (this_delta == 0)
3923 if (!CALL_P (insn)
3924 || ACCUMULATE_OUTGOING_ARGS
3925 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3926 continue;
3929 gcc_assert (!saw_unknown);
3930 if (this_delta == HOST_WIDE_INT_MIN)
3931 saw_unknown = true;
3933 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3934 if (STACK_GROWS_DOWNWARD)
3935 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
3937 args_size -= this_delta;
3940 return saw_unknown ? INT_MIN : args_size;
3943 #ifdef PUSH_ROUNDING
3944 /* Emit single push insn. */
3946 static void
3947 emit_single_push_insn_1 (machine_mode mode, rtx x, tree type)
3949 rtx dest_addr;
3950 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3951 rtx dest;
3952 enum insn_code icode;
3954 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3955 /* If there is push pattern, use it. Otherwise try old way of throwing
3956 MEM representing push operation to move expander. */
3957 icode = optab_handler (push_optab, mode);
3958 if (icode != CODE_FOR_nothing)
3960 struct expand_operand ops[1];
3962 create_input_operand (&ops[0], x, mode);
3963 if (maybe_expand_insn (icode, 1, ops))
3964 return;
3966 if (GET_MODE_SIZE (mode) == rounded_size)
3967 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3968 /* If we are to pad downward, adjust the stack pointer first and
3969 then store X into the stack location using an offset. This is
3970 because emit_move_insn does not know how to pad; it does not have
3971 access to type. */
3972 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3974 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3975 HOST_WIDE_INT offset;
3977 emit_move_insn (stack_pointer_rtx,
3978 expand_binop (Pmode,
3979 STACK_GROWS_DOWNWARD ? sub_optab
3980 : add_optab,
3981 stack_pointer_rtx,
3982 gen_int_mode (rounded_size, Pmode),
3983 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3985 offset = (HOST_WIDE_INT) padding_size;
3986 if (STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_DEC)
3987 /* We have already decremented the stack pointer, so get the
3988 previous value. */
3989 offset += (HOST_WIDE_INT) rounded_size;
3991 if (!STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_INC)
3992 /* We have already incremented the stack pointer, so get the
3993 previous value. */
3994 offset -= (HOST_WIDE_INT) rounded_size;
3996 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3997 gen_int_mode (offset, Pmode));
3999 else
4001 if (STACK_GROWS_DOWNWARD)
4002 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4003 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4004 gen_int_mode (-(HOST_WIDE_INT) rounded_size,
4005 Pmode));
4006 else
4007 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4008 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4009 gen_int_mode (rounded_size, Pmode));
4011 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4014 dest = gen_rtx_MEM (mode, dest_addr);
4016 if (type != 0)
4018 set_mem_attributes (dest, type, 1);
4020 if (cfun->tail_call_marked)
4021 /* Function incoming arguments may overlap with sibling call
4022 outgoing arguments and we cannot allow reordering of reads
4023 from function arguments with stores to outgoing arguments
4024 of sibling calls. */
4025 set_mem_alias_set (dest, 0);
4027 emit_move_insn (dest, x);
4030 /* Emit and annotate a single push insn. */
4032 static void
4033 emit_single_push_insn (machine_mode mode, rtx x, tree type)
4035 int delta, old_delta = stack_pointer_delta;
4036 rtx_insn *prev = get_last_insn ();
4037 rtx_insn *last;
4039 emit_single_push_insn_1 (mode, x, type);
4041 last = get_last_insn ();
4043 /* Notice the common case where we emitted exactly one insn. */
4044 if (PREV_INSN (last) == prev)
4046 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4047 return;
4050 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4051 gcc_assert (delta == INT_MIN || delta == old_delta);
4053 #endif
4055 /* If reading SIZE bytes from X will end up reading from
4056 Y return the number of bytes that overlap. Return -1
4057 if there is no overlap or -2 if we can't determine
4058 (for example when X and Y have different base registers). */
4060 static int
4061 memory_load_overlap (rtx x, rtx y, HOST_WIDE_INT size)
4063 rtx tmp = plus_constant (Pmode, x, size);
4064 rtx sub = simplify_gen_binary (MINUS, Pmode, tmp, y);
4066 if (!CONST_INT_P (sub))
4067 return -2;
4069 HOST_WIDE_INT val = INTVAL (sub);
4071 return IN_RANGE (val, 1, size) ? val : -1;
4074 /* Generate code to push X onto the stack, assuming it has mode MODE and
4075 type TYPE.
4076 MODE is redundant except when X is a CONST_INT (since they don't
4077 carry mode info).
4078 SIZE is an rtx for the size of data to be copied (in bytes),
4079 needed only if X is BLKmode.
4080 Return true if successful. May return false if asked to push a
4081 partial argument during a sibcall optimization (as specified by
4082 SIBCALL_P) and the incoming and outgoing pointers cannot be shown
4083 to not overlap.
4085 ALIGN (in bits) is maximum alignment we can assume.
4087 If PARTIAL and REG are both nonzero, then copy that many of the first
4088 bytes of X into registers starting with REG, and push the rest of X.
4089 The amount of space pushed is decreased by PARTIAL bytes.
4090 REG must be a hard register in this case.
4091 If REG is zero but PARTIAL is not, take any all others actions for an
4092 argument partially in registers, but do not actually load any
4093 registers.
4095 EXTRA is the amount in bytes of extra space to leave next to this arg.
4096 This is ignored if an argument block has already been allocated.
4098 On a machine that lacks real push insns, ARGS_ADDR is the address of
4099 the bottom of the argument block for this call. We use indexing off there
4100 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4101 argument block has not been preallocated.
4103 ARGS_SO_FAR is the size of args previously pushed for this call.
4105 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4106 for arguments passed in registers. If nonzero, it will be the number
4107 of bytes required. */
4109 bool
4110 emit_push_insn (rtx x, machine_mode mode, tree type, rtx size,
4111 unsigned int align, int partial, rtx reg, int extra,
4112 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4113 rtx alignment_pad, bool sibcall_p)
4115 rtx xinner;
4116 enum direction stack_direction = STACK_GROWS_DOWNWARD ? downward : upward;
4118 /* Decide where to pad the argument: `downward' for below,
4119 `upward' for above, or `none' for don't pad it.
4120 Default is below for small data on big-endian machines; else above. */
4121 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4123 /* Invert direction if stack is post-decrement.
4124 FIXME: why? */
4125 if (STACK_PUSH_CODE == POST_DEC)
4126 if (where_pad != none)
4127 where_pad = (where_pad == downward ? upward : downward);
4129 xinner = x;
4131 int nregs = partial / UNITS_PER_WORD;
4132 rtx *tmp_regs = NULL;
4133 int overlapping = 0;
4135 if (mode == BLKmode
4136 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4138 /* Copy a block into the stack, entirely or partially. */
4140 rtx temp;
4141 int used;
4142 int offset;
4143 int skip;
4145 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4146 used = partial - offset;
4148 if (mode != BLKmode)
4150 /* A value is to be stored in an insufficiently aligned
4151 stack slot; copy via a suitably aligned slot if
4152 necessary. */
4153 size = GEN_INT (GET_MODE_SIZE (mode));
4154 if (!MEM_P (xinner))
4156 temp = assign_temp (type, 1, 1);
4157 emit_move_insn (temp, xinner);
4158 xinner = temp;
4162 gcc_assert (size);
4164 /* USED is now the # of bytes we need not copy to the stack
4165 because registers will take care of them. */
4167 if (partial != 0)
4168 xinner = adjust_address (xinner, BLKmode, used);
4170 /* If the partial register-part of the arg counts in its stack size,
4171 skip the part of stack space corresponding to the registers.
4172 Otherwise, start copying to the beginning of the stack space,
4173 by setting SKIP to 0. */
4174 skip = (reg_parm_stack_space == 0) ? 0 : used;
4176 #ifdef PUSH_ROUNDING
4177 /* Do it with several push insns if that doesn't take lots of insns
4178 and if there is no difficulty with push insns that skip bytes
4179 on the stack for alignment purposes. */
4180 if (args_addr == 0
4181 && PUSH_ARGS
4182 && CONST_INT_P (size)
4183 && skip == 0
4184 && MEM_ALIGN (xinner) >= align
4185 && can_move_by_pieces ((unsigned) INTVAL (size) - used, align)
4186 /* Here we avoid the case of a structure whose weak alignment
4187 forces many pushes of a small amount of data,
4188 and such small pushes do rounding that causes trouble. */
4189 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4190 || align >= BIGGEST_ALIGNMENT
4191 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4192 == (align / BITS_PER_UNIT)))
4193 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4195 /* Push padding now if padding above and stack grows down,
4196 or if padding below and stack grows up.
4197 But if space already allocated, this has already been done. */
4198 if (extra && args_addr == 0
4199 && where_pad != none && where_pad != stack_direction)
4200 anti_adjust_stack (GEN_INT (extra));
4202 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4204 else
4205 #endif /* PUSH_ROUNDING */
4207 rtx target;
4209 /* Otherwise make space on the stack and copy the data
4210 to the address of that space. */
4212 /* Deduct words put into registers from the size we must copy. */
4213 if (partial != 0)
4215 if (CONST_INT_P (size))
4216 size = GEN_INT (INTVAL (size) - used);
4217 else
4218 size = expand_binop (GET_MODE (size), sub_optab, size,
4219 gen_int_mode (used, GET_MODE (size)),
4220 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4223 /* Get the address of the stack space.
4224 In this case, we do not deal with EXTRA separately.
4225 A single stack adjust will do. */
4226 if (! args_addr)
4228 temp = push_block (size, extra, where_pad == downward);
4229 extra = 0;
4231 else if (CONST_INT_P (args_so_far))
4232 temp = memory_address (BLKmode,
4233 plus_constant (Pmode, args_addr,
4234 skip + INTVAL (args_so_far)));
4235 else
4236 temp = memory_address (BLKmode,
4237 plus_constant (Pmode,
4238 gen_rtx_PLUS (Pmode,
4239 args_addr,
4240 args_so_far),
4241 skip));
4243 if (!ACCUMULATE_OUTGOING_ARGS)
4245 /* If the source is referenced relative to the stack pointer,
4246 copy it to another register to stabilize it. We do not need
4247 to do this if we know that we won't be changing sp. */
4249 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4250 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4251 temp = copy_to_reg (temp);
4254 target = gen_rtx_MEM (BLKmode, temp);
4256 /* We do *not* set_mem_attributes here, because incoming arguments
4257 may overlap with sibling call outgoing arguments and we cannot
4258 allow reordering of reads from function arguments with stores
4259 to outgoing arguments of sibling calls. We do, however, want
4260 to record the alignment of the stack slot. */
4261 /* ALIGN may well be better aligned than TYPE, e.g. due to
4262 PARM_BOUNDARY. Assume the caller isn't lying. */
4263 set_mem_align (target, align);
4265 /* If part should go in registers and pushing to that part would
4266 overwrite some of the values that need to go into regs, load the
4267 overlapping values into temporary pseudos to be moved into the hard
4268 regs at the end after the stack pushing has completed.
4269 We cannot load them directly into the hard regs here because
4270 they can be clobbered by the block move expansions.
4271 See PR 65358. */
4273 if (partial > 0 && reg != 0 && mode == BLKmode
4274 && GET_CODE (reg) != PARALLEL)
4276 overlapping = memory_load_overlap (XEXP (x, 0), temp, partial);
4277 if (overlapping > 0)
4279 gcc_assert (overlapping % UNITS_PER_WORD == 0);
4280 overlapping /= UNITS_PER_WORD;
4282 tmp_regs = XALLOCAVEC (rtx, overlapping);
4284 for (int i = 0; i < overlapping; i++)
4285 tmp_regs[i] = gen_reg_rtx (word_mode);
4287 for (int i = 0; i < overlapping; i++)
4288 emit_move_insn (tmp_regs[i],
4289 operand_subword_force (target, i, mode));
4291 else if (overlapping == -1)
4292 overlapping = 0;
4293 /* Could not determine whether there is overlap.
4294 Fail the sibcall. */
4295 else
4297 overlapping = 0;
4298 if (sibcall_p)
4299 return false;
4302 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4305 else if (partial > 0)
4307 /* Scalar partly in registers. */
4309 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4310 int i;
4311 int not_stack;
4312 /* # bytes of start of argument
4313 that we must make space for but need not store. */
4314 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4315 int args_offset = INTVAL (args_so_far);
4316 int skip;
4318 /* Push padding now if padding above and stack grows down,
4319 or if padding below and stack grows up.
4320 But if space already allocated, this has already been done. */
4321 if (extra && args_addr == 0
4322 && where_pad != none && where_pad != stack_direction)
4323 anti_adjust_stack (GEN_INT (extra));
4325 /* If we make space by pushing it, we might as well push
4326 the real data. Otherwise, we can leave OFFSET nonzero
4327 and leave the space uninitialized. */
4328 if (args_addr == 0)
4329 offset = 0;
4331 /* Now NOT_STACK gets the number of words that we don't need to
4332 allocate on the stack. Convert OFFSET to words too. */
4333 not_stack = (partial - offset) / UNITS_PER_WORD;
4334 offset /= UNITS_PER_WORD;
4336 /* If the partial register-part of the arg counts in its stack size,
4337 skip the part of stack space corresponding to the registers.
4338 Otherwise, start copying to the beginning of the stack space,
4339 by setting SKIP to 0. */
4340 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4342 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4343 x = validize_mem (force_const_mem (mode, x));
4345 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4346 SUBREGs of such registers are not allowed. */
4347 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4348 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4349 x = copy_to_reg (x);
4351 /* Loop over all the words allocated on the stack for this arg. */
4352 /* We can do it by words, because any scalar bigger than a word
4353 has a size a multiple of a word. */
4354 for (i = size - 1; i >= not_stack; i--)
4355 if (i >= not_stack + offset)
4356 if (!emit_push_insn (operand_subword_force (x, i, mode),
4357 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4358 0, args_addr,
4359 GEN_INT (args_offset + ((i - not_stack + skip)
4360 * UNITS_PER_WORD)),
4361 reg_parm_stack_space, alignment_pad, sibcall_p))
4362 return false;
4364 else
4366 rtx addr;
4367 rtx dest;
4369 /* Push padding now if padding above and stack grows down,
4370 or if padding below and stack grows up.
4371 But if space already allocated, this has already been done. */
4372 if (extra && args_addr == 0
4373 && where_pad != none && where_pad != stack_direction)
4374 anti_adjust_stack (GEN_INT (extra));
4376 #ifdef PUSH_ROUNDING
4377 if (args_addr == 0 && PUSH_ARGS)
4378 emit_single_push_insn (mode, x, type);
4379 else
4380 #endif
4382 if (CONST_INT_P (args_so_far))
4383 addr
4384 = memory_address (mode,
4385 plus_constant (Pmode, args_addr,
4386 INTVAL (args_so_far)));
4387 else
4388 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4389 args_so_far));
4390 dest = gen_rtx_MEM (mode, addr);
4392 /* We do *not* set_mem_attributes here, because incoming arguments
4393 may overlap with sibling call outgoing arguments and we cannot
4394 allow reordering of reads from function arguments with stores
4395 to outgoing arguments of sibling calls. We do, however, want
4396 to record the alignment of the stack slot. */
4397 /* ALIGN may well be better aligned than TYPE, e.g. due to
4398 PARM_BOUNDARY. Assume the caller isn't lying. */
4399 set_mem_align (dest, align);
4401 emit_move_insn (dest, x);
4405 /* Move the partial arguments into the registers and any overlapping
4406 values that we moved into the pseudos in tmp_regs. */
4407 if (partial > 0 && reg != 0)
4409 /* Handle calls that pass values in multiple non-contiguous locations.
4410 The Irix 6 ABI has examples of this. */
4411 if (GET_CODE (reg) == PARALLEL)
4412 emit_group_load (reg, x, type, -1);
4413 else
4415 gcc_assert (partial % UNITS_PER_WORD == 0);
4416 move_block_to_reg (REGNO (reg), x, nregs - overlapping, mode);
4418 for (int i = 0; i < overlapping; i++)
4419 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg)
4420 + nregs - overlapping + i),
4421 tmp_regs[i]);
4426 if (extra && args_addr == 0 && where_pad == stack_direction)
4427 anti_adjust_stack (GEN_INT (extra));
4429 if (alignment_pad && args_addr == 0)
4430 anti_adjust_stack (alignment_pad);
4432 return true;
4435 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4436 operations. */
4438 static rtx
4439 get_subtarget (rtx x)
4441 return (optimize
4442 || x == 0
4443 /* Only registers can be subtargets. */
4444 || !REG_P (x)
4445 /* Don't use hard regs to avoid extending their life. */
4446 || REGNO (x) < FIRST_PSEUDO_REGISTER
4447 ? 0 : x);
4450 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4451 FIELD is a bitfield. Returns true if the optimization was successful,
4452 and there's nothing else to do. */
4454 static bool
4455 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4456 unsigned HOST_WIDE_INT bitpos,
4457 unsigned HOST_WIDE_INT bitregion_start,
4458 unsigned HOST_WIDE_INT bitregion_end,
4459 machine_mode mode1, rtx str_rtx,
4460 tree to, tree src)
4462 machine_mode str_mode = GET_MODE (str_rtx);
4463 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4464 tree op0, op1;
4465 rtx value, result;
4466 optab binop;
4467 gimple *srcstmt;
4468 enum tree_code code;
4470 if (mode1 != VOIDmode
4471 || bitsize >= BITS_PER_WORD
4472 || str_bitsize > BITS_PER_WORD
4473 || TREE_SIDE_EFFECTS (to)
4474 || TREE_THIS_VOLATILE (to))
4475 return false;
4477 STRIP_NOPS (src);
4478 if (TREE_CODE (src) != SSA_NAME)
4479 return false;
4480 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4481 return false;
4483 srcstmt = get_gimple_for_ssa_name (src);
4484 if (!srcstmt
4485 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4486 return false;
4488 code = gimple_assign_rhs_code (srcstmt);
4490 op0 = gimple_assign_rhs1 (srcstmt);
4492 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4493 to find its initialization. Hopefully the initialization will
4494 be from a bitfield load. */
4495 if (TREE_CODE (op0) == SSA_NAME)
4497 gimple *op0stmt = get_gimple_for_ssa_name (op0);
4499 /* We want to eventually have OP0 be the same as TO, which
4500 should be a bitfield. */
4501 if (!op0stmt
4502 || !is_gimple_assign (op0stmt)
4503 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4504 return false;
4505 op0 = gimple_assign_rhs1 (op0stmt);
4508 op1 = gimple_assign_rhs2 (srcstmt);
4510 if (!operand_equal_p (to, op0, 0))
4511 return false;
4513 if (MEM_P (str_rtx))
4515 unsigned HOST_WIDE_INT offset1;
4517 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4518 str_mode = word_mode;
4519 str_mode = get_best_mode (bitsize, bitpos,
4520 bitregion_start, bitregion_end,
4521 MEM_ALIGN (str_rtx), str_mode, 0);
4522 if (str_mode == VOIDmode)
4523 return false;
4524 str_bitsize = GET_MODE_BITSIZE (str_mode);
4526 offset1 = bitpos;
4527 bitpos %= str_bitsize;
4528 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4529 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4531 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4532 return false;
4534 /* If the bit field covers the whole REG/MEM, store_field
4535 will likely generate better code. */
4536 if (bitsize >= str_bitsize)
4537 return false;
4539 /* We can't handle fields split across multiple entities. */
4540 if (bitpos + bitsize > str_bitsize)
4541 return false;
4543 if (BYTES_BIG_ENDIAN)
4544 bitpos = str_bitsize - bitpos - bitsize;
4546 switch (code)
4548 case PLUS_EXPR:
4549 case MINUS_EXPR:
4550 /* For now, just optimize the case of the topmost bitfield
4551 where we don't need to do any masking and also
4552 1 bit bitfields where xor can be used.
4553 We might win by one instruction for the other bitfields
4554 too if insv/extv instructions aren't used, so that
4555 can be added later. */
4556 if (bitpos + bitsize != str_bitsize
4557 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4558 break;
4560 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4561 value = convert_modes (str_mode,
4562 TYPE_MODE (TREE_TYPE (op1)), value,
4563 TYPE_UNSIGNED (TREE_TYPE (op1)));
4565 /* We may be accessing data outside the field, which means
4566 we can alias adjacent data. */
4567 if (MEM_P (str_rtx))
4569 str_rtx = shallow_copy_rtx (str_rtx);
4570 set_mem_alias_set (str_rtx, 0);
4571 set_mem_expr (str_rtx, 0);
4574 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4575 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4577 value = expand_and (str_mode, value, const1_rtx, NULL);
4578 binop = xor_optab;
4580 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4581 result = expand_binop (str_mode, binop, str_rtx,
4582 value, str_rtx, 1, OPTAB_WIDEN);
4583 if (result != str_rtx)
4584 emit_move_insn (str_rtx, result);
4585 return true;
4587 case BIT_IOR_EXPR:
4588 case BIT_XOR_EXPR:
4589 if (TREE_CODE (op1) != INTEGER_CST)
4590 break;
4591 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4592 value = convert_modes (str_mode,
4593 TYPE_MODE (TREE_TYPE (op1)), value,
4594 TYPE_UNSIGNED (TREE_TYPE (op1)));
4596 /* We may be accessing data outside the field, which means
4597 we can alias adjacent data. */
4598 if (MEM_P (str_rtx))
4600 str_rtx = shallow_copy_rtx (str_rtx);
4601 set_mem_alias_set (str_rtx, 0);
4602 set_mem_expr (str_rtx, 0);
4605 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4606 if (bitpos + bitsize != str_bitsize)
4608 rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4609 str_mode);
4610 value = expand_and (str_mode, value, mask, NULL_RTX);
4612 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4613 result = expand_binop (str_mode, binop, str_rtx,
4614 value, str_rtx, 1, OPTAB_WIDEN);
4615 if (result != str_rtx)
4616 emit_move_insn (str_rtx, result);
4617 return true;
4619 default:
4620 break;
4623 return false;
4626 /* In the C++ memory model, consecutive bit fields in a structure are
4627 considered one memory location.
4629 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4630 returns the bit range of consecutive bits in which this COMPONENT_REF
4631 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4632 and *OFFSET may be adjusted in the process.
4634 If the access does not need to be restricted, 0 is returned in both
4635 *BITSTART and *BITEND. */
4637 static void
4638 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4639 unsigned HOST_WIDE_INT *bitend,
4640 tree exp,
4641 HOST_WIDE_INT *bitpos,
4642 tree *offset)
4644 HOST_WIDE_INT bitoffset;
4645 tree field, repr;
4647 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4649 field = TREE_OPERAND (exp, 1);
4650 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4651 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4652 need to limit the range we can access. */
4653 if (!repr)
4655 *bitstart = *bitend = 0;
4656 return;
4659 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4660 part of a larger bit field, then the representative does not serve any
4661 useful purpose. This can occur in Ada. */
4662 if (handled_component_p (TREE_OPERAND (exp, 0)))
4664 machine_mode rmode;
4665 HOST_WIDE_INT rbitsize, rbitpos;
4666 tree roffset;
4667 int unsignedp;
4668 int volatilep = 0;
4669 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4670 &roffset, &rmode, &unsignedp, &volatilep, false);
4671 if ((rbitpos % BITS_PER_UNIT) != 0)
4673 *bitstart = *bitend = 0;
4674 return;
4678 /* Compute the adjustment to bitpos from the offset of the field
4679 relative to the representative. DECL_FIELD_OFFSET of field and
4680 repr are the same by construction if they are not constants,
4681 see finish_bitfield_layout. */
4682 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
4683 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
4684 bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
4685 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
4686 else
4687 bitoffset = 0;
4688 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4689 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4691 /* If the adjustment is larger than bitpos, we would have a negative bit
4692 position for the lower bound and this may wreak havoc later. Adjust
4693 offset and bitpos to make the lower bound non-negative in that case. */
4694 if (bitoffset > *bitpos)
4696 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4697 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4699 *bitpos += adjust;
4700 if (*offset == NULL_TREE)
4701 *offset = size_int (-adjust / BITS_PER_UNIT);
4702 else
4703 *offset
4704 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4705 *bitstart = 0;
4707 else
4708 *bitstart = *bitpos - bitoffset;
4710 *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4713 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4714 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4715 DECL_RTL was not set yet, return NORTL. */
4717 static inline bool
4718 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4720 if (TREE_CODE (addr) != ADDR_EXPR)
4721 return false;
4723 tree base = TREE_OPERAND (addr, 0);
4725 if (!DECL_P (base)
4726 || TREE_ADDRESSABLE (base)
4727 || DECL_MODE (base) == BLKmode)
4728 return false;
4730 if (!DECL_RTL_SET_P (base))
4731 return nortl;
4733 return (!MEM_P (DECL_RTL (base)));
4736 /* Returns true if the MEM_REF REF refers to an object that does not
4737 reside in memory and has non-BLKmode. */
4739 static inline bool
4740 mem_ref_refers_to_non_mem_p (tree ref)
4742 tree base = TREE_OPERAND (ref, 0);
4743 return addr_expr_of_non_mem_decl_p_1 (base, false);
4746 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4747 is true, try generating a nontemporal store. */
4749 void
4750 expand_assignment (tree to, tree from, bool nontemporal)
4752 rtx to_rtx = 0;
4753 rtx result;
4754 machine_mode mode;
4755 unsigned int align;
4756 enum insn_code icode;
4758 /* Don't crash if the lhs of the assignment was erroneous. */
4759 if (TREE_CODE (to) == ERROR_MARK)
4761 expand_normal (from);
4762 return;
4765 /* Optimize away no-op moves without side-effects. */
4766 if (operand_equal_p (to, from, 0))
4767 return;
4769 /* Handle misaligned stores. */
4770 mode = TYPE_MODE (TREE_TYPE (to));
4771 if ((TREE_CODE (to) == MEM_REF
4772 || TREE_CODE (to) == TARGET_MEM_REF)
4773 && mode != BLKmode
4774 && !mem_ref_refers_to_non_mem_p (to)
4775 && ((align = get_object_alignment (to))
4776 < GET_MODE_ALIGNMENT (mode))
4777 && (((icode = optab_handler (movmisalign_optab, mode))
4778 != CODE_FOR_nothing)
4779 || SLOW_UNALIGNED_ACCESS (mode, align)))
4781 rtx reg, mem;
4783 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4784 reg = force_not_mem (reg);
4785 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4787 if (icode != CODE_FOR_nothing)
4789 struct expand_operand ops[2];
4791 create_fixed_operand (&ops[0], mem);
4792 create_input_operand (&ops[1], reg, mode);
4793 /* The movmisalign<mode> pattern cannot fail, else the assignment
4794 would silently be omitted. */
4795 expand_insn (icode, 2, ops);
4797 else
4798 store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg);
4799 return;
4802 /* Assignment of a structure component needs special treatment
4803 if the structure component's rtx is not simply a MEM.
4804 Assignment of an array element at a constant index, and assignment of
4805 an array element in an unaligned packed structure field, has the same
4806 problem. Same for (partially) storing into a non-memory object. */
4807 if (handled_component_p (to)
4808 || (TREE_CODE (to) == MEM_REF
4809 && mem_ref_refers_to_non_mem_p (to))
4810 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4812 machine_mode mode1;
4813 HOST_WIDE_INT bitsize, bitpos;
4814 unsigned HOST_WIDE_INT bitregion_start = 0;
4815 unsigned HOST_WIDE_INT bitregion_end = 0;
4816 tree offset;
4817 int unsignedp;
4818 int volatilep = 0;
4819 tree tem;
4821 push_temp_slots ();
4822 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4823 &unsignedp, &volatilep, true);
4825 /* Make sure bitpos is not negative, it can wreak havoc later. */
4826 if (bitpos < 0)
4828 gcc_assert (offset == NULL_TREE);
4829 offset = size_int (bitpos >> (BITS_PER_UNIT == 8
4830 ? 3 : exact_log2 (BITS_PER_UNIT)));
4831 bitpos &= BITS_PER_UNIT - 1;
4834 if (TREE_CODE (to) == COMPONENT_REF
4835 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4836 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4837 /* The C++ memory model naturally applies to byte-aligned fields.
4838 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4839 BITSIZE are not byte-aligned, there is no need to limit the range
4840 we can access. This can occur with packed structures in Ada. */
4841 else if (bitsize > 0
4842 && bitsize % BITS_PER_UNIT == 0
4843 && bitpos % BITS_PER_UNIT == 0)
4845 bitregion_start = bitpos;
4846 bitregion_end = bitpos + bitsize - 1;
4849 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4851 /* If the field has a mode, we want to access it in the
4852 field's mode, not the computed mode.
4853 If a MEM has VOIDmode (external with incomplete type),
4854 use BLKmode for it instead. */
4855 if (MEM_P (to_rtx))
4857 if (mode1 != VOIDmode)
4858 to_rtx = adjust_address (to_rtx, mode1, 0);
4859 else if (GET_MODE (to_rtx) == VOIDmode)
4860 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4863 if (offset != 0)
4865 machine_mode address_mode;
4866 rtx offset_rtx;
4868 if (!MEM_P (to_rtx))
4870 /* We can get constant negative offsets into arrays with broken
4871 user code. Translate this to a trap instead of ICEing. */
4872 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4873 expand_builtin_trap ();
4874 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4877 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4878 address_mode = get_address_mode (to_rtx);
4879 if (GET_MODE (offset_rtx) != address_mode)
4881 /* We cannot be sure that the RTL in offset_rtx is valid outside
4882 of a memory address context, so force it into a register
4883 before attempting to convert it to the desired mode. */
4884 offset_rtx = force_operand (offset_rtx, NULL_RTX);
4885 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4888 /* If we have an expression in OFFSET_RTX and a non-zero
4889 byte offset in BITPOS, adding the byte offset before the
4890 OFFSET_RTX results in better intermediate code, which makes
4891 later rtl optimization passes perform better.
4893 We prefer intermediate code like this:
4895 r124:DI=r123:DI+0x18
4896 [r124:DI]=r121:DI
4898 ... instead of ...
4900 r124:DI=r123:DI+0x10
4901 [r124:DI+0x8]=r121:DI
4903 This is only done for aligned data values, as these can
4904 be expected to result in single move instructions. */
4905 if (mode1 != VOIDmode
4906 && bitpos != 0
4907 && bitsize > 0
4908 && (bitpos % bitsize) == 0
4909 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4910 && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
4912 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4913 bitregion_start = 0;
4914 if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos)
4915 bitregion_end -= bitpos;
4916 bitpos = 0;
4919 to_rtx = offset_address (to_rtx, offset_rtx,
4920 highest_pow2_factor_for_target (to,
4921 offset));
4924 /* No action is needed if the target is not a memory and the field
4925 lies completely outside that target. This can occur if the source
4926 code contains an out-of-bounds access to a small array. */
4927 if (!MEM_P (to_rtx)
4928 && GET_MODE (to_rtx) != BLKmode
4929 && (unsigned HOST_WIDE_INT) bitpos
4930 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4932 expand_normal (from);
4933 result = NULL;
4935 /* Handle expand_expr of a complex value returning a CONCAT. */
4936 else if (GET_CODE (to_rtx) == CONCAT)
4938 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4939 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4940 && bitpos == 0
4941 && bitsize == mode_bitsize)
4942 result = store_expr (from, to_rtx, false, nontemporal);
4943 else if (bitsize == mode_bitsize / 2
4944 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4945 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4946 nontemporal);
4947 else if (bitpos + bitsize <= mode_bitsize / 2)
4948 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4949 bitregion_start, bitregion_end,
4950 mode1, from,
4951 get_alias_set (to), nontemporal);
4952 else if (bitpos >= mode_bitsize / 2)
4953 result = store_field (XEXP (to_rtx, 1), bitsize,
4954 bitpos - mode_bitsize / 2,
4955 bitregion_start, bitregion_end,
4956 mode1, from,
4957 get_alias_set (to), nontemporal);
4958 else if (bitpos == 0 && bitsize == mode_bitsize)
4960 rtx from_rtx;
4961 result = expand_normal (from);
4962 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4963 TYPE_MODE (TREE_TYPE (from)), 0);
4964 emit_move_insn (XEXP (to_rtx, 0),
4965 read_complex_part (from_rtx, false));
4966 emit_move_insn (XEXP (to_rtx, 1),
4967 read_complex_part (from_rtx, true));
4969 else
4971 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4972 GET_MODE_SIZE (GET_MODE (to_rtx)));
4973 write_complex_part (temp, XEXP (to_rtx, 0), false);
4974 write_complex_part (temp, XEXP (to_rtx, 1), true);
4975 result = store_field (temp, bitsize, bitpos,
4976 bitregion_start, bitregion_end,
4977 mode1, from,
4978 get_alias_set (to), nontemporal);
4979 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4980 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4983 else
4985 if (MEM_P (to_rtx))
4987 /* If the field is at offset zero, we could have been given the
4988 DECL_RTX of the parent struct. Don't munge it. */
4989 to_rtx = shallow_copy_rtx (to_rtx);
4990 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4991 if (volatilep)
4992 MEM_VOLATILE_P (to_rtx) = 1;
4995 if (optimize_bitfield_assignment_op (bitsize, bitpos,
4996 bitregion_start, bitregion_end,
4997 mode1,
4998 to_rtx, to, from))
4999 result = NULL;
5000 else
5001 result = store_field (to_rtx, bitsize, bitpos,
5002 bitregion_start, bitregion_end,
5003 mode1, from,
5004 get_alias_set (to), nontemporal);
5007 if (result)
5008 preserve_temp_slots (result);
5009 pop_temp_slots ();
5010 return;
5013 /* If the rhs is a function call and its value is not an aggregate,
5014 call the function before we start to compute the lhs.
5015 This is needed for correct code for cases such as
5016 val = setjmp (buf) on machines where reference to val
5017 requires loading up part of an address in a separate insn.
5019 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5020 since it might be a promoted variable where the zero- or sign- extension
5021 needs to be done. Handling this in the normal way is safe because no
5022 computation is done before the call. The same is true for SSA names. */
5023 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
5024 && COMPLETE_TYPE_P (TREE_TYPE (from))
5025 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
5026 && ! (((TREE_CODE (to) == VAR_DECL
5027 || TREE_CODE (to) == PARM_DECL
5028 || TREE_CODE (to) == RESULT_DECL)
5029 && REG_P (DECL_RTL (to)))
5030 || TREE_CODE (to) == SSA_NAME))
5032 rtx value;
5033 rtx bounds;
5035 push_temp_slots ();
5036 value = expand_normal (from);
5038 /* Split value and bounds to store them separately. */
5039 chkp_split_slot (value, &value, &bounds);
5041 if (to_rtx == 0)
5042 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5044 /* Handle calls that return values in multiple non-contiguous locations.
5045 The Irix 6 ABI has examples of this. */
5046 if (GET_CODE (to_rtx) == PARALLEL)
5048 if (GET_CODE (value) == PARALLEL)
5049 emit_group_move (to_rtx, value);
5050 else
5051 emit_group_load (to_rtx, value, TREE_TYPE (from),
5052 int_size_in_bytes (TREE_TYPE (from)));
5054 else if (GET_CODE (value) == PARALLEL)
5055 emit_group_store (to_rtx, value, TREE_TYPE (from),
5056 int_size_in_bytes (TREE_TYPE (from)));
5057 else if (GET_MODE (to_rtx) == BLKmode)
5059 /* Handle calls that return BLKmode values in registers. */
5060 if (REG_P (value))
5061 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5062 else
5063 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5065 else
5067 if (POINTER_TYPE_P (TREE_TYPE (to)))
5068 value = convert_memory_address_addr_space
5069 (GET_MODE (to_rtx), value,
5070 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5072 emit_move_insn (to_rtx, value);
5075 /* Store bounds if required. */
5076 if (bounds
5077 && (BOUNDED_P (to) || chkp_type_has_pointer (TREE_TYPE (to))))
5079 gcc_assert (MEM_P (to_rtx));
5080 chkp_emit_bounds_store (bounds, value, to_rtx);
5083 preserve_temp_slots (to_rtx);
5084 pop_temp_slots ();
5085 return;
5088 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5089 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5091 /* Don't move directly into a return register. */
5092 if (TREE_CODE (to) == RESULT_DECL
5093 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5095 rtx temp;
5097 push_temp_slots ();
5099 /* If the source is itself a return value, it still is in a pseudo at
5100 this point so we can move it back to the return register directly. */
5101 if (REG_P (to_rtx)
5102 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5103 && TREE_CODE (from) != CALL_EXPR)
5104 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5105 else
5106 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5108 /* Handle calls that return values in multiple non-contiguous locations.
5109 The Irix 6 ABI has examples of this. */
5110 if (GET_CODE (to_rtx) == PARALLEL)
5112 if (GET_CODE (temp) == PARALLEL)
5113 emit_group_move (to_rtx, temp);
5114 else
5115 emit_group_load (to_rtx, temp, TREE_TYPE (from),
5116 int_size_in_bytes (TREE_TYPE (from)));
5118 else if (temp)
5119 emit_move_insn (to_rtx, temp);
5121 preserve_temp_slots (to_rtx);
5122 pop_temp_slots ();
5123 return;
5126 /* In case we are returning the contents of an object which overlaps
5127 the place the value is being stored, use a safe function when copying
5128 a value through a pointer into a structure value return block. */
5129 if (TREE_CODE (to) == RESULT_DECL
5130 && TREE_CODE (from) == INDIRECT_REF
5131 && ADDR_SPACE_GENERIC_P
5132 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5133 && refs_may_alias_p (to, from)
5134 && cfun->returns_struct
5135 && !cfun->returns_pcc_struct)
5137 rtx from_rtx, size;
5139 push_temp_slots ();
5140 size = expr_size (from);
5141 from_rtx = expand_normal (from);
5143 emit_library_call (memmove_libfunc, LCT_NORMAL,
5144 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
5145 XEXP (from_rtx, 0), Pmode,
5146 convert_to_mode (TYPE_MODE (sizetype),
5147 size, TYPE_UNSIGNED (sizetype)),
5148 TYPE_MODE (sizetype));
5150 preserve_temp_slots (to_rtx);
5151 pop_temp_slots ();
5152 return;
5155 /* Compute FROM and store the value in the rtx we got. */
5157 push_temp_slots ();
5158 result = store_expr_with_bounds (from, to_rtx, 0, nontemporal, to);
5159 preserve_temp_slots (result);
5160 pop_temp_slots ();
5161 return;
5164 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5165 succeeded, false otherwise. */
5167 bool
5168 emit_storent_insn (rtx to, rtx from)
5170 struct expand_operand ops[2];
5171 machine_mode mode = GET_MODE (to);
5172 enum insn_code code = optab_handler (storent_optab, mode);
5174 if (code == CODE_FOR_nothing)
5175 return false;
5177 create_fixed_operand (&ops[0], to);
5178 create_input_operand (&ops[1], from, mode);
5179 return maybe_expand_insn (code, 2, ops);
5182 /* Generate code for computing expression EXP,
5183 and storing the value into TARGET.
5185 If the mode is BLKmode then we may return TARGET itself.
5186 It turns out that in BLKmode it doesn't cause a problem.
5187 because C has no operators that could combine two different
5188 assignments into the same BLKmode object with different values
5189 with no sequence point. Will other languages need this to
5190 be more thorough?
5192 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5193 stack, and block moves may need to be treated specially.
5195 If NONTEMPORAL is true, try using a nontemporal store instruction.
5197 If BTARGET is not NULL then computed bounds of EXP are
5198 associated with BTARGET. */
5201 store_expr_with_bounds (tree exp, rtx target, int call_param_p,
5202 bool nontemporal, tree btarget)
5204 rtx temp;
5205 rtx alt_rtl = NULL_RTX;
5206 location_t loc = curr_insn_location ();
5208 if (VOID_TYPE_P (TREE_TYPE (exp)))
5210 /* C++ can generate ?: expressions with a throw expression in one
5211 branch and an rvalue in the other. Here, we resolve attempts to
5212 store the throw expression's nonexistent result. */
5213 gcc_assert (!call_param_p);
5214 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5215 return NULL_RTX;
5217 if (TREE_CODE (exp) == COMPOUND_EXPR)
5219 /* Perform first part of compound expression, then assign from second
5220 part. */
5221 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5222 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5223 return store_expr_with_bounds (TREE_OPERAND (exp, 1), target,
5224 call_param_p, nontemporal, btarget);
5226 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5228 /* For conditional expression, get safe form of the target. Then
5229 test the condition, doing the appropriate assignment on either
5230 side. This avoids the creation of unnecessary temporaries.
5231 For non-BLKmode, it is more efficient not to do this. */
5233 rtx_code_label *lab1 = gen_label_rtx (), *lab2 = gen_label_rtx ();
5235 do_pending_stack_adjust ();
5236 NO_DEFER_POP;
5237 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5238 store_expr_with_bounds (TREE_OPERAND (exp, 1), target, call_param_p,
5239 nontemporal, btarget);
5240 emit_jump_insn (targetm.gen_jump (lab2));
5241 emit_barrier ();
5242 emit_label (lab1);
5243 store_expr_with_bounds (TREE_OPERAND (exp, 2), target, call_param_p,
5244 nontemporal, btarget);
5245 emit_label (lab2);
5246 OK_DEFER_POP;
5248 return NULL_RTX;
5250 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5251 /* If this is a scalar in a register that is stored in a wider mode
5252 than the declared mode, compute the result into its declared mode
5253 and then convert to the wider mode. Our value is the computed
5254 expression. */
5256 rtx inner_target = 0;
5258 /* We can do the conversion inside EXP, which will often result
5259 in some optimizations. Do the conversion in two steps: first
5260 change the signedness, if needed, then the extend. But don't
5261 do this if the type of EXP is a subtype of something else
5262 since then the conversion might involve more than just
5263 converting modes. */
5264 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5265 && TREE_TYPE (TREE_TYPE (exp)) == 0
5266 && GET_MODE_PRECISION (GET_MODE (target))
5267 == TYPE_PRECISION (TREE_TYPE (exp)))
5269 if (!SUBREG_CHECK_PROMOTED_SIGN (target,
5270 TYPE_UNSIGNED (TREE_TYPE (exp))))
5272 /* Some types, e.g. Fortran's logical*4, won't have a signed
5273 version, so use the mode instead. */
5274 tree ntype
5275 = (signed_or_unsigned_type_for
5276 (SUBREG_PROMOTED_SIGN (target), TREE_TYPE (exp)));
5277 if (ntype == NULL)
5278 ntype = lang_hooks.types.type_for_mode
5279 (TYPE_MODE (TREE_TYPE (exp)),
5280 SUBREG_PROMOTED_SIGN (target));
5282 exp = fold_convert_loc (loc, ntype, exp);
5285 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5286 (GET_MODE (SUBREG_REG (target)),
5287 SUBREG_PROMOTED_SIGN (target)),
5288 exp);
5290 inner_target = SUBREG_REG (target);
5293 temp = expand_expr (exp, inner_target, VOIDmode,
5294 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5296 /* Handle bounds returned by call. */
5297 if (TREE_CODE (exp) == CALL_EXPR)
5299 rtx bounds;
5300 chkp_split_slot (temp, &temp, &bounds);
5301 if (bounds && btarget)
5303 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5304 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5305 chkp_set_rtl_bounds (btarget, tmp);
5309 /* If TEMP is a VOIDmode constant, use convert_modes to make
5310 sure that we properly convert it. */
5311 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5313 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5314 temp, SUBREG_PROMOTED_SIGN (target));
5315 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5316 GET_MODE (target), temp,
5317 SUBREG_PROMOTED_SIGN (target));
5320 convert_move (SUBREG_REG (target), temp,
5321 SUBREG_PROMOTED_SIGN (target));
5323 return NULL_RTX;
5325 else if ((TREE_CODE (exp) == STRING_CST
5326 || (TREE_CODE (exp) == MEM_REF
5327 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5328 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5329 == STRING_CST
5330 && integer_zerop (TREE_OPERAND (exp, 1))))
5331 && !nontemporal && !call_param_p
5332 && MEM_P (target))
5334 /* Optimize initialization of an array with a STRING_CST. */
5335 HOST_WIDE_INT exp_len, str_copy_len;
5336 rtx dest_mem;
5337 tree str = TREE_CODE (exp) == STRING_CST
5338 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5340 exp_len = int_expr_size (exp);
5341 if (exp_len <= 0)
5342 goto normal_expr;
5344 if (TREE_STRING_LENGTH (str) <= 0)
5345 goto normal_expr;
5347 str_copy_len = strlen (TREE_STRING_POINTER (str));
5348 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5349 goto normal_expr;
5351 str_copy_len = TREE_STRING_LENGTH (str);
5352 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5353 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5355 str_copy_len += STORE_MAX_PIECES - 1;
5356 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5358 str_copy_len = MIN (str_copy_len, exp_len);
5359 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5360 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5361 MEM_ALIGN (target), false))
5362 goto normal_expr;
5364 dest_mem = target;
5366 dest_mem = store_by_pieces (dest_mem,
5367 str_copy_len, builtin_strncpy_read_str,
5368 CONST_CAST (char *,
5369 TREE_STRING_POINTER (str)),
5370 MEM_ALIGN (target), false,
5371 exp_len > str_copy_len ? 1 : 0);
5372 if (exp_len > str_copy_len)
5373 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5374 GEN_INT (exp_len - str_copy_len),
5375 BLOCK_OP_NORMAL);
5376 return NULL_RTX;
5378 else
5380 rtx tmp_target;
5382 normal_expr:
5383 /* If we want to use a nontemporal store, force the value to
5384 register first. */
5385 tmp_target = nontemporal ? NULL_RTX : target;
5386 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5387 (call_param_p
5388 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5389 &alt_rtl, false);
5391 /* Handle bounds returned by call. */
5392 if (TREE_CODE (exp) == CALL_EXPR)
5394 rtx bounds;
5395 chkp_split_slot (temp, &temp, &bounds);
5396 if (bounds && btarget)
5398 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5399 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5400 chkp_set_rtl_bounds (btarget, tmp);
5405 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5406 the same as that of TARGET, adjust the constant. This is needed, for
5407 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5408 only a word-sized value. */
5409 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5410 && TREE_CODE (exp) != ERROR_MARK
5411 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5412 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5413 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5415 /* If value was not generated in the target, store it there.
5416 Convert the value to TARGET's type first if necessary and emit the
5417 pending incrementations that have been queued when expanding EXP.
5418 Note that we cannot emit the whole queue blindly because this will
5419 effectively disable the POST_INC optimization later.
5421 If TEMP and TARGET compare equal according to rtx_equal_p, but
5422 one or both of them are volatile memory refs, we have to distinguish
5423 two cases:
5424 - expand_expr has used TARGET. In this case, we must not generate
5425 another copy. This can be detected by TARGET being equal according
5426 to == .
5427 - expand_expr has not used TARGET - that means that the source just
5428 happens to have the same RTX form. Since temp will have been created
5429 by expand_expr, it will compare unequal according to == .
5430 We must generate a copy in this case, to reach the correct number
5431 of volatile memory references. */
5433 if ((! rtx_equal_p (temp, target)
5434 || (temp != target && (side_effects_p (temp)
5435 || side_effects_p (target))))
5436 && TREE_CODE (exp) != ERROR_MARK
5437 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5438 but TARGET is not valid memory reference, TEMP will differ
5439 from TARGET although it is really the same location. */
5440 && !(alt_rtl
5441 && rtx_equal_p (alt_rtl, target)
5442 && !side_effects_p (alt_rtl)
5443 && !side_effects_p (target))
5444 /* If there's nothing to copy, don't bother. Don't call
5445 expr_size unless necessary, because some front-ends (C++)
5446 expr_size-hook must not be given objects that are not
5447 supposed to be bit-copied or bit-initialized. */
5448 && expr_size (exp) != const0_rtx)
5450 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5452 if (GET_MODE (target) == BLKmode)
5454 /* Handle calls that return BLKmode values in registers. */
5455 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5456 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5457 else
5458 store_bit_field (target,
5459 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5460 0, 0, 0, GET_MODE (temp), temp);
5462 else
5463 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5466 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5468 /* Handle copying a string constant into an array. The string
5469 constant may be shorter than the array. So copy just the string's
5470 actual length, and clear the rest. First get the size of the data
5471 type of the string, which is actually the size of the target. */
5472 rtx size = expr_size (exp);
5474 if (CONST_INT_P (size)
5475 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5476 emit_block_move (target, temp, size,
5477 (call_param_p
5478 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5479 else
5481 machine_mode pointer_mode
5482 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5483 machine_mode address_mode = get_address_mode (target);
5485 /* Compute the size of the data to copy from the string. */
5486 tree copy_size
5487 = size_binop_loc (loc, MIN_EXPR,
5488 make_tree (sizetype, size),
5489 size_int (TREE_STRING_LENGTH (exp)));
5490 rtx copy_size_rtx
5491 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5492 (call_param_p
5493 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5494 rtx_code_label *label = 0;
5496 /* Copy that much. */
5497 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5498 TYPE_UNSIGNED (sizetype));
5499 emit_block_move (target, temp, copy_size_rtx,
5500 (call_param_p
5501 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5503 /* Figure out how much is left in TARGET that we have to clear.
5504 Do all calculations in pointer_mode. */
5505 if (CONST_INT_P (copy_size_rtx))
5507 size = plus_constant (address_mode, size,
5508 -INTVAL (copy_size_rtx));
5509 target = adjust_address (target, BLKmode,
5510 INTVAL (copy_size_rtx));
5512 else
5514 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5515 copy_size_rtx, NULL_RTX, 0,
5516 OPTAB_LIB_WIDEN);
5518 if (GET_MODE (copy_size_rtx) != address_mode)
5519 copy_size_rtx = convert_to_mode (address_mode,
5520 copy_size_rtx,
5521 TYPE_UNSIGNED (sizetype));
5523 target = offset_address (target, copy_size_rtx,
5524 highest_pow2_factor (copy_size));
5525 label = gen_label_rtx ();
5526 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5527 GET_MODE (size), 0, label);
5530 if (size != const0_rtx)
5531 clear_storage (target, size, BLOCK_OP_NORMAL);
5533 if (label)
5534 emit_label (label);
5537 /* Handle calls that return values in multiple non-contiguous locations.
5538 The Irix 6 ABI has examples of this. */
5539 else if (GET_CODE (target) == PARALLEL)
5541 if (GET_CODE (temp) == PARALLEL)
5542 emit_group_move (target, temp);
5543 else
5544 emit_group_load (target, temp, TREE_TYPE (exp),
5545 int_size_in_bytes (TREE_TYPE (exp)));
5547 else if (GET_CODE (temp) == PARALLEL)
5548 emit_group_store (target, temp, TREE_TYPE (exp),
5549 int_size_in_bytes (TREE_TYPE (exp)));
5550 else if (GET_MODE (temp) == BLKmode)
5551 emit_block_move (target, temp, expr_size (exp),
5552 (call_param_p
5553 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5554 /* If we emit a nontemporal store, there is nothing else to do. */
5555 else if (nontemporal && emit_storent_insn (target, temp))
5557 else
5559 temp = force_operand (temp, target);
5560 if (temp != target)
5561 emit_move_insn (target, temp);
5565 return NULL_RTX;
5568 /* Same as store_expr_with_bounds but ignoring bounds of EXP. */
5570 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
5572 return store_expr_with_bounds (exp, target, call_param_p, nontemporal, NULL);
5575 /* Return true if field F of structure TYPE is a flexible array. */
5577 static bool
5578 flexible_array_member_p (const_tree f, const_tree type)
5580 const_tree tf;
5582 tf = TREE_TYPE (f);
5583 return (DECL_CHAIN (f) == NULL
5584 && TREE_CODE (tf) == ARRAY_TYPE
5585 && TYPE_DOMAIN (tf)
5586 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5587 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5588 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5589 && int_size_in_bytes (type) >= 0);
5592 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5593 must have in order for it to completely initialize a value of type TYPE.
5594 Return -1 if the number isn't known.
5596 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5598 static HOST_WIDE_INT
5599 count_type_elements (const_tree type, bool for_ctor_p)
5601 switch (TREE_CODE (type))
5603 case ARRAY_TYPE:
5605 tree nelts;
5607 nelts = array_type_nelts (type);
5608 if (nelts && tree_fits_uhwi_p (nelts))
5610 unsigned HOST_WIDE_INT n;
5612 n = tree_to_uhwi (nelts) + 1;
5613 if (n == 0 || for_ctor_p)
5614 return n;
5615 else
5616 return n * count_type_elements (TREE_TYPE (type), false);
5618 return for_ctor_p ? -1 : 1;
5621 case RECORD_TYPE:
5623 unsigned HOST_WIDE_INT n;
5624 tree f;
5626 n = 0;
5627 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5628 if (TREE_CODE (f) == FIELD_DECL)
5630 if (!for_ctor_p)
5631 n += count_type_elements (TREE_TYPE (f), false);
5632 else if (!flexible_array_member_p (f, type))
5633 /* Don't count flexible arrays, which are not supposed
5634 to be initialized. */
5635 n += 1;
5638 return n;
5641 case UNION_TYPE:
5642 case QUAL_UNION_TYPE:
5644 tree f;
5645 HOST_WIDE_INT n, m;
5647 gcc_assert (!for_ctor_p);
5648 /* Estimate the number of scalars in each field and pick the
5649 maximum. Other estimates would do instead; the idea is simply
5650 to make sure that the estimate is not sensitive to the ordering
5651 of the fields. */
5652 n = 1;
5653 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5654 if (TREE_CODE (f) == FIELD_DECL)
5656 m = count_type_elements (TREE_TYPE (f), false);
5657 /* If the field doesn't span the whole union, add an extra
5658 scalar for the rest. */
5659 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5660 TYPE_SIZE (type)) != 1)
5661 m++;
5662 if (n < m)
5663 n = m;
5665 return n;
5668 case COMPLEX_TYPE:
5669 return 2;
5671 case VECTOR_TYPE:
5672 return TYPE_VECTOR_SUBPARTS (type);
5674 case INTEGER_TYPE:
5675 case REAL_TYPE:
5676 case FIXED_POINT_TYPE:
5677 case ENUMERAL_TYPE:
5678 case BOOLEAN_TYPE:
5679 case POINTER_TYPE:
5680 case OFFSET_TYPE:
5681 case REFERENCE_TYPE:
5682 case NULLPTR_TYPE:
5683 return 1;
5685 case ERROR_MARK:
5686 return 0;
5688 case VOID_TYPE:
5689 case METHOD_TYPE:
5690 case FUNCTION_TYPE:
5691 case LANG_TYPE:
5692 default:
5693 gcc_unreachable ();
5697 /* Helper for categorize_ctor_elements. Identical interface. */
5699 static bool
5700 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5701 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5703 unsigned HOST_WIDE_INT idx;
5704 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5705 tree value, purpose, elt_type;
5707 /* Whether CTOR is a valid constant initializer, in accordance with what
5708 initializer_constant_valid_p does. If inferred from the constructor
5709 elements, true until proven otherwise. */
5710 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5711 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5713 nz_elts = 0;
5714 init_elts = 0;
5715 num_fields = 0;
5716 elt_type = NULL_TREE;
5718 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5720 HOST_WIDE_INT mult = 1;
5722 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5724 tree lo_index = TREE_OPERAND (purpose, 0);
5725 tree hi_index = TREE_OPERAND (purpose, 1);
5727 if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
5728 mult = (tree_to_uhwi (hi_index)
5729 - tree_to_uhwi (lo_index) + 1);
5731 num_fields += mult;
5732 elt_type = TREE_TYPE (value);
5734 switch (TREE_CODE (value))
5736 case CONSTRUCTOR:
5738 HOST_WIDE_INT nz = 0, ic = 0;
5740 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5741 p_complete);
5743 nz_elts += mult * nz;
5744 init_elts += mult * ic;
5746 if (const_from_elts_p && const_p)
5747 const_p = const_elt_p;
5749 break;
5751 case INTEGER_CST:
5752 case REAL_CST:
5753 case FIXED_CST:
5754 if (!initializer_zerop (value))
5755 nz_elts += mult;
5756 init_elts += mult;
5757 break;
5759 case STRING_CST:
5760 nz_elts += mult * TREE_STRING_LENGTH (value);
5761 init_elts += mult * TREE_STRING_LENGTH (value);
5762 break;
5764 case COMPLEX_CST:
5765 if (!initializer_zerop (TREE_REALPART (value)))
5766 nz_elts += mult;
5767 if (!initializer_zerop (TREE_IMAGPART (value)))
5768 nz_elts += mult;
5769 init_elts += mult;
5770 break;
5772 case VECTOR_CST:
5774 unsigned i;
5775 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5777 tree v = VECTOR_CST_ELT (value, i);
5778 if (!initializer_zerop (v))
5779 nz_elts += mult;
5780 init_elts += mult;
5783 break;
5785 default:
5787 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5788 nz_elts += mult * tc;
5789 init_elts += mult * tc;
5791 if (const_from_elts_p && const_p)
5792 const_p = initializer_constant_valid_p (value, elt_type)
5793 != NULL_TREE;
5795 break;
5799 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5800 num_fields, elt_type))
5801 *p_complete = false;
5803 *p_nz_elts += nz_elts;
5804 *p_init_elts += init_elts;
5806 return const_p;
5809 /* Examine CTOR to discover:
5810 * how many scalar fields are set to nonzero values,
5811 and place it in *P_NZ_ELTS;
5812 * how many scalar fields in total are in CTOR,
5813 and place it in *P_ELT_COUNT.
5814 * whether the constructor is complete -- in the sense that every
5815 meaningful byte is explicitly given a value --
5816 and place it in *P_COMPLETE.
5818 Return whether or not CTOR is a valid static constant initializer, the same
5819 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5821 bool
5822 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5823 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5825 *p_nz_elts = 0;
5826 *p_init_elts = 0;
5827 *p_complete = true;
5829 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5832 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5833 of which had type LAST_TYPE. Each element was itself a complete
5834 initializer, in the sense that every meaningful byte was explicitly
5835 given a value. Return true if the same is true for the constructor
5836 as a whole. */
5838 bool
5839 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5840 const_tree last_type)
5842 if (TREE_CODE (type) == UNION_TYPE
5843 || TREE_CODE (type) == QUAL_UNION_TYPE)
5845 if (num_elts == 0)
5846 return false;
5848 gcc_assert (num_elts == 1 && last_type);
5850 /* ??? We could look at each element of the union, and find the
5851 largest element. Which would avoid comparing the size of the
5852 initialized element against any tail padding in the union.
5853 Doesn't seem worth the effort... */
5854 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5857 return count_type_elements (type, true) == num_elts;
5860 /* Return 1 if EXP contains mostly (3/4) zeros. */
5862 static int
5863 mostly_zeros_p (const_tree exp)
5865 if (TREE_CODE (exp) == CONSTRUCTOR)
5867 HOST_WIDE_INT nz_elts, init_elts;
5868 bool complete_p;
5870 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5871 return !complete_p || nz_elts < init_elts / 4;
5874 return initializer_zerop (exp);
5877 /* Return 1 if EXP contains all zeros. */
5879 static int
5880 all_zeros_p (const_tree exp)
5882 if (TREE_CODE (exp) == CONSTRUCTOR)
5884 HOST_WIDE_INT nz_elts, init_elts;
5885 bool complete_p;
5887 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5888 return nz_elts == 0;
5891 return initializer_zerop (exp);
5894 /* Helper function for store_constructor.
5895 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5896 CLEARED is as for store_constructor.
5897 ALIAS_SET is the alias set to use for any stores.
5899 This provides a recursive shortcut back to store_constructor when it isn't
5900 necessary to go through store_field. This is so that we can pass through
5901 the cleared field to let store_constructor know that we may not have to
5902 clear a substructure if the outer structure has already been cleared. */
5904 static void
5905 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5906 HOST_WIDE_INT bitpos, machine_mode mode,
5907 tree exp, int cleared, alias_set_type alias_set)
5909 if (TREE_CODE (exp) == CONSTRUCTOR
5910 /* We can only call store_constructor recursively if the size and
5911 bit position are on a byte boundary. */
5912 && bitpos % BITS_PER_UNIT == 0
5913 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5914 /* If we have a nonzero bitpos for a register target, then we just
5915 let store_field do the bitfield handling. This is unlikely to
5916 generate unnecessary clear instructions anyways. */
5917 && (bitpos == 0 || MEM_P (target)))
5919 if (MEM_P (target))
5920 target
5921 = adjust_address (target,
5922 GET_MODE (target) == BLKmode
5923 || 0 != (bitpos
5924 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5925 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5928 /* Update the alias set, if required. */
5929 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5930 && MEM_ALIAS_SET (target) != 0)
5932 target = copy_rtx (target);
5933 set_mem_alias_set (target, alias_set);
5936 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5938 else
5939 store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false);
5943 /* Returns the number of FIELD_DECLs in TYPE. */
5945 static int
5946 fields_length (const_tree type)
5948 tree t = TYPE_FIELDS (type);
5949 int count = 0;
5951 for (; t; t = DECL_CHAIN (t))
5952 if (TREE_CODE (t) == FIELD_DECL)
5953 ++count;
5955 return count;
5959 /* Store the value of constructor EXP into the rtx TARGET.
5960 TARGET is either a REG or a MEM; we know it cannot conflict, since
5961 safe_from_p has been called.
5962 CLEARED is true if TARGET is known to have been zero'd.
5963 SIZE is the number of bytes of TARGET we are allowed to modify: this
5964 may not be the same as the size of EXP if we are assigning to a field
5965 which has been packed to exclude padding bits. */
5967 static void
5968 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5970 tree type = TREE_TYPE (exp);
5971 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5973 switch (TREE_CODE (type))
5975 case RECORD_TYPE:
5976 case UNION_TYPE:
5977 case QUAL_UNION_TYPE:
5979 unsigned HOST_WIDE_INT idx;
5980 tree field, value;
5982 /* If size is zero or the target is already cleared, do nothing. */
5983 if (size == 0 || cleared)
5984 cleared = 1;
5985 /* We either clear the aggregate or indicate the value is dead. */
5986 else if ((TREE_CODE (type) == UNION_TYPE
5987 || TREE_CODE (type) == QUAL_UNION_TYPE)
5988 && ! CONSTRUCTOR_ELTS (exp))
5989 /* If the constructor is empty, clear the union. */
5991 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5992 cleared = 1;
5995 /* If we are building a static constructor into a register,
5996 set the initial value as zero so we can fold the value into
5997 a constant. But if more than one register is involved,
5998 this probably loses. */
5999 else if (REG_P (target) && TREE_STATIC (exp)
6000 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
6002 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6003 cleared = 1;
6006 /* If the constructor has fewer fields than the structure or
6007 if we are initializing the structure to mostly zeros, clear
6008 the whole structure first. Don't do this if TARGET is a
6009 register whose mode size isn't equal to SIZE since
6010 clear_storage can't handle this case. */
6011 else if (size > 0
6012 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
6013 != fields_length (type))
6014 || mostly_zeros_p (exp))
6015 && (!REG_P (target)
6016 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
6017 == size)))
6019 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6020 cleared = 1;
6023 if (REG_P (target) && !cleared)
6024 emit_clobber (target);
6026 /* Store each element of the constructor into the
6027 corresponding field of TARGET. */
6028 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
6030 machine_mode mode;
6031 HOST_WIDE_INT bitsize;
6032 HOST_WIDE_INT bitpos = 0;
6033 tree offset;
6034 rtx to_rtx = target;
6036 /* Just ignore missing fields. We cleared the whole
6037 structure, above, if any fields are missing. */
6038 if (field == 0)
6039 continue;
6041 if (cleared && initializer_zerop (value))
6042 continue;
6044 if (tree_fits_uhwi_p (DECL_SIZE (field)))
6045 bitsize = tree_to_uhwi (DECL_SIZE (field));
6046 else
6047 bitsize = -1;
6049 mode = DECL_MODE (field);
6050 if (DECL_BIT_FIELD (field))
6051 mode = VOIDmode;
6053 offset = DECL_FIELD_OFFSET (field);
6054 if (tree_fits_shwi_p (offset)
6055 && tree_fits_shwi_p (bit_position (field)))
6057 bitpos = int_bit_position (field);
6058 offset = 0;
6060 else
6061 bitpos = tree_to_shwi (DECL_FIELD_BIT_OFFSET (field));
6063 if (offset)
6065 machine_mode address_mode;
6066 rtx offset_rtx;
6068 offset
6069 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
6070 make_tree (TREE_TYPE (exp),
6071 target));
6073 offset_rtx = expand_normal (offset);
6074 gcc_assert (MEM_P (to_rtx));
6076 address_mode = get_address_mode (to_rtx);
6077 if (GET_MODE (offset_rtx) != address_mode)
6078 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
6080 to_rtx = offset_address (to_rtx, offset_rtx,
6081 highest_pow2_factor (offset));
6084 /* If this initializes a field that is smaller than a
6085 word, at the start of a word, try to widen it to a full
6086 word. This special case allows us to output C++ member
6087 function initializations in a form that the optimizers
6088 can understand. */
6089 if (WORD_REGISTER_OPERATIONS
6090 && REG_P (target)
6091 && bitsize < BITS_PER_WORD
6092 && bitpos % BITS_PER_WORD == 0
6093 && GET_MODE_CLASS (mode) == MODE_INT
6094 && TREE_CODE (value) == INTEGER_CST
6095 && exp_size >= 0
6096 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6098 tree type = TREE_TYPE (value);
6100 if (TYPE_PRECISION (type) < BITS_PER_WORD)
6102 type = lang_hooks.types.type_for_mode
6103 (word_mode, TYPE_UNSIGNED (type));
6104 value = fold_convert (type, value);
6107 if (BYTES_BIG_ENDIAN)
6108 value
6109 = fold_build2 (LSHIFT_EXPR, type, value,
6110 build_int_cst (type,
6111 BITS_PER_WORD - bitsize));
6112 bitsize = BITS_PER_WORD;
6113 mode = word_mode;
6116 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6117 && DECL_NONADDRESSABLE_P (field))
6119 to_rtx = copy_rtx (to_rtx);
6120 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6123 store_constructor_field (to_rtx, bitsize, bitpos, mode,
6124 value, cleared,
6125 get_alias_set (TREE_TYPE (field)));
6127 break;
6129 case ARRAY_TYPE:
6131 tree value, index;
6132 unsigned HOST_WIDE_INT i;
6133 int need_to_clear;
6134 tree domain;
6135 tree elttype = TREE_TYPE (type);
6136 int const_bounds_p;
6137 HOST_WIDE_INT minelt = 0;
6138 HOST_WIDE_INT maxelt = 0;
6140 domain = TYPE_DOMAIN (type);
6141 const_bounds_p = (TYPE_MIN_VALUE (domain)
6142 && TYPE_MAX_VALUE (domain)
6143 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6144 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6146 /* If we have constant bounds for the range of the type, get them. */
6147 if (const_bounds_p)
6149 minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6150 maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6153 /* If the constructor has fewer elements than the array, clear
6154 the whole array first. Similarly if this is static
6155 constructor of a non-BLKmode object. */
6156 if (cleared)
6157 need_to_clear = 0;
6158 else if (REG_P (target) && TREE_STATIC (exp))
6159 need_to_clear = 1;
6160 else
6162 unsigned HOST_WIDE_INT idx;
6163 tree index, value;
6164 HOST_WIDE_INT count = 0, zero_count = 0;
6165 need_to_clear = ! const_bounds_p;
6167 /* This loop is a more accurate version of the loop in
6168 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6169 is also needed to check for missing elements. */
6170 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6172 HOST_WIDE_INT this_node_count;
6174 if (need_to_clear)
6175 break;
6177 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6179 tree lo_index = TREE_OPERAND (index, 0);
6180 tree hi_index = TREE_OPERAND (index, 1);
6182 if (! tree_fits_uhwi_p (lo_index)
6183 || ! tree_fits_uhwi_p (hi_index))
6185 need_to_clear = 1;
6186 break;
6189 this_node_count = (tree_to_uhwi (hi_index)
6190 - tree_to_uhwi (lo_index) + 1);
6192 else
6193 this_node_count = 1;
6195 count += this_node_count;
6196 if (mostly_zeros_p (value))
6197 zero_count += this_node_count;
6200 /* Clear the entire array first if there are any missing
6201 elements, or if the incidence of zero elements is >=
6202 75%. */
6203 if (! need_to_clear
6204 && (count < maxelt - minelt + 1
6205 || 4 * zero_count >= 3 * count))
6206 need_to_clear = 1;
6209 if (need_to_clear && size > 0)
6211 if (REG_P (target))
6212 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6213 else
6214 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6215 cleared = 1;
6218 if (!cleared && REG_P (target))
6219 /* Inform later passes that the old value is dead. */
6220 emit_clobber (target);
6222 /* Store each element of the constructor into the
6223 corresponding element of TARGET, determined by counting the
6224 elements. */
6225 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6227 machine_mode mode;
6228 HOST_WIDE_INT bitsize;
6229 HOST_WIDE_INT bitpos;
6230 rtx xtarget = target;
6232 if (cleared && initializer_zerop (value))
6233 continue;
6235 mode = TYPE_MODE (elttype);
6236 if (mode == BLKmode)
6237 bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6238 ? tree_to_uhwi (TYPE_SIZE (elttype))
6239 : -1);
6240 else
6241 bitsize = GET_MODE_BITSIZE (mode);
6243 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6245 tree lo_index = TREE_OPERAND (index, 0);
6246 tree hi_index = TREE_OPERAND (index, 1);
6247 rtx index_r, pos_rtx;
6248 HOST_WIDE_INT lo, hi, count;
6249 tree position;
6251 /* If the range is constant and "small", unroll the loop. */
6252 if (const_bounds_p
6253 && tree_fits_shwi_p (lo_index)
6254 && tree_fits_shwi_p (hi_index)
6255 && (lo = tree_to_shwi (lo_index),
6256 hi = tree_to_shwi (hi_index),
6257 count = hi - lo + 1,
6258 (!MEM_P (target)
6259 || count <= 2
6260 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6261 && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6262 <= 40 * 8)))))
6264 lo -= minelt; hi -= minelt;
6265 for (; lo <= hi; lo++)
6267 bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6269 if (MEM_P (target)
6270 && !MEM_KEEP_ALIAS_SET_P (target)
6271 && TREE_CODE (type) == ARRAY_TYPE
6272 && TYPE_NONALIASED_COMPONENT (type))
6274 target = copy_rtx (target);
6275 MEM_KEEP_ALIAS_SET_P (target) = 1;
6278 store_constructor_field
6279 (target, bitsize, bitpos, mode, value, cleared,
6280 get_alias_set (elttype));
6283 else
6285 rtx_code_label *loop_start = gen_label_rtx ();
6286 rtx_code_label *loop_end = gen_label_rtx ();
6287 tree exit_cond;
6289 expand_normal (hi_index);
6291 index = build_decl (EXPR_LOCATION (exp),
6292 VAR_DECL, NULL_TREE, domain);
6293 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6294 SET_DECL_RTL (index, index_r);
6295 store_expr (lo_index, index_r, 0, false);
6297 /* Build the head of the loop. */
6298 do_pending_stack_adjust ();
6299 emit_label (loop_start);
6301 /* Assign value to element index. */
6302 position =
6303 fold_convert (ssizetype,
6304 fold_build2 (MINUS_EXPR,
6305 TREE_TYPE (index),
6306 index,
6307 TYPE_MIN_VALUE (domain)));
6309 position =
6310 size_binop (MULT_EXPR, position,
6311 fold_convert (ssizetype,
6312 TYPE_SIZE_UNIT (elttype)));
6314 pos_rtx = expand_normal (position);
6315 xtarget = offset_address (target, pos_rtx,
6316 highest_pow2_factor (position));
6317 xtarget = adjust_address (xtarget, mode, 0);
6318 if (TREE_CODE (value) == CONSTRUCTOR)
6319 store_constructor (value, xtarget, cleared,
6320 bitsize / BITS_PER_UNIT);
6321 else
6322 store_expr (value, xtarget, 0, false);
6324 /* Generate a conditional jump to exit the loop. */
6325 exit_cond = build2 (LT_EXPR, integer_type_node,
6326 index, hi_index);
6327 jumpif (exit_cond, loop_end, -1);
6329 /* Update the loop counter, and jump to the head of
6330 the loop. */
6331 expand_assignment (index,
6332 build2 (PLUS_EXPR, TREE_TYPE (index),
6333 index, integer_one_node),
6334 false);
6336 emit_jump (loop_start);
6338 /* Build the end of the loop. */
6339 emit_label (loop_end);
6342 else if ((index != 0 && ! tree_fits_shwi_p (index))
6343 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6345 tree position;
6347 if (index == 0)
6348 index = ssize_int (1);
6350 if (minelt)
6351 index = fold_convert (ssizetype,
6352 fold_build2 (MINUS_EXPR,
6353 TREE_TYPE (index),
6354 index,
6355 TYPE_MIN_VALUE (domain)));
6357 position =
6358 size_binop (MULT_EXPR, index,
6359 fold_convert (ssizetype,
6360 TYPE_SIZE_UNIT (elttype)));
6361 xtarget = offset_address (target,
6362 expand_normal (position),
6363 highest_pow2_factor (position));
6364 xtarget = adjust_address (xtarget, mode, 0);
6365 store_expr (value, xtarget, 0, false);
6367 else
6369 if (index != 0)
6370 bitpos = ((tree_to_shwi (index) - minelt)
6371 * tree_to_uhwi (TYPE_SIZE (elttype)));
6372 else
6373 bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6375 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6376 && TREE_CODE (type) == ARRAY_TYPE
6377 && TYPE_NONALIASED_COMPONENT (type))
6379 target = copy_rtx (target);
6380 MEM_KEEP_ALIAS_SET_P (target) = 1;
6382 store_constructor_field (target, bitsize, bitpos, mode, value,
6383 cleared, get_alias_set (elttype));
6386 break;
6389 case VECTOR_TYPE:
6391 unsigned HOST_WIDE_INT idx;
6392 constructor_elt *ce;
6393 int i;
6394 int need_to_clear;
6395 int icode = CODE_FOR_nothing;
6396 tree elttype = TREE_TYPE (type);
6397 int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6398 machine_mode eltmode = TYPE_MODE (elttype);
6399 HOST_WIDE_INT bitsize;
6400 HOST_WIDE_INT bitpos;
6401 rtvec vector = NULL;
6402 unsigned n_elts;
6403 alias_set_type alias;
6405 gcc_assert (eltmode != BLKmode);
6407 n_elts = TYPE_VECTOR_SUBPARTS (type);
6408 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6410 machine_mode mode = GET_MODE (target);
6412 icode = (int) optab_handler (vec_init_optab, mode);
6413 /* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
6414 if (icode != CODE_FOR_nothing)
6416 tree value;
6418 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6419 if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
6421 icode = CODE_FOR_nothing;
6422 break;
6425 if (icode != CODE_FOR_nothing)
6427 unsigned int i;
6429 vector = rtvec_alloc (n_elts);
6430 for (i = 0; i < n_elts; i++)
6431 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6435 /* If the constructor has fewer elements than the vector,
6436 clear the whole array first. Similarly if this is static
6437 constructor of a non-BLKmode object. */
6438 if (cleared)
6439 need_to_clear = 0;
6440 else if (REG_P (target) && TREE_STATIC (exp))
6441 need_to_clear = 1;
6442 else
6444 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6445 tree value;
6447 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6449 int n_elts_here = tree_to_uhwi
6450 (int_const_binop (TRUNC_DIV_EXPR,
6451 TYPE_SIZE (TREE_TYPE (value)),
6452 TYPE_SIZE (elttype)));
6454 count += n_elts_here;
6455 if (mostly_zeros_p (value))
6456 zero_count += n_elts_here;
6459 /* Clear the entire vector first if there are any missing elements,
6460 or if the incidence of zero elements is >= 75%. */
6461 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6464 if (need_to_clear && size > 0 && !vector)
6466 if (REG_P (target))
6467 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6468 else
6469 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6470 cleared = 1;
6473 /* Inform later passes that the old value is dead. */
6474 if (!cleared && !vector && REG_P (target))
6475 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6477 if (MEM_P (target))
6478 alias = MEM_ALIAS_SET (target);
6479 else
6480 alias = get_alias_set (elttype);
6482 /* Store each element of the constructor into the corresponding
6483 element of TARGET, determined by counting the elements. */
6484 for (idx = 0, i = 0;
6485 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6486 idx++, i += bitsize / elt_size)
6488 HOST_WIDE_INT eltpos;
6489 tree value = ce->value;
6491 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6492 if (cleared && initializer_zerop (value))
6493 continue;
6495 if (ce->index)
6496 eltpos = tree_to_uhwi (ce->index);
6497 else
6498 eltpos = i;
6500 if (vector)
6502 /* vec_init<mode> should not be used if there are VECTOR_TYPE
6503 elements. */
6504 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6505 RTVEC_ELT (vector, eltpos)
6506 = expand_normal (value);
6508 else
6510 machine_mode value_mode =
6511 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6512 ? TYPE_MODE (TREE_TYPE (value))
6513 : eltmode;
6514 bitpos = eltpos * elt_size;
6515 store_constructor_field (target, bitsize, bitpos, value_mode,
6516 value, cleared, alias);
6520 if (vector)
6521 emit_insn (GEN_FCN (icode)
6522 (target,
6523 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6524 break;
6527 default:
6528 gcc_unreachable ();
6532 /* Store the value of EXP (an expression tree)
6533 into a subfield of TARGET which has mode MODE and occupies
6534 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6535 If MODE is VOIDmode, it means that we are storing into a bit-field.
6537 BITREGION_START is bitpos of the first bitfield in this region.
6538 BITREGION_END is the bitpos of the ending bitfield in this region.
6539 These two fields are 0, if the C++ memory model does not apply,
6540 or we are not interested in keeping track of bitfield regions.
6542 Always return const0_rtx unless we have something particular to
6543 return.
6545 ALIAS_SET is the alias set for the destination. This value will
6546 (in general) be different from that for TARGET, since TARGET is a
6547 reference to the containing structure.
6549 If NONTEMPORAL is true, try generating a nontemporal store. */
6551 static rtx
6552 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6553 unsigned HOST_WIDE_INT bitregion_start,
6554 unsigned HOST_WIDE_INT bitregion_end,
6555 machine_mode mode, tree exp,
6556 alias_set_type alias_set, bool nontemporal)
6558 if (TREE_CODE (exp) == ERROR_MARK)
6559 return const0_rtx;
6561 /* If we have nothing to store, do nothing unless the expression has
6562 side-effects. */
6563 if (bitsize == 0)
6564 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6566 if (GET_CODE (target) == CONCAT)
6568 /* We're storing into a struct containing a single __complex. */
6570 gcc_assert (!bitpos);
6571 return store_expr (exp, target, 0, nontemporal);
6574 /* If the structure is in a register or if the component
6575 is a bit field, we cannot use addressing to access it.
6576 Use bit-field techniques or SUBREG to store in it. */
6578 if (mode == VOIDmode
6579 || (mode != BLKmode && ! direct_store[(int) mode]
6580 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6581 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6582 || REG_P (target)
6583 || GET_CODE (target) == SUBREG
6584 /* If the field isn't aligned enough to store as an ordinary memref,
6585 store it as a bit field. */
6586 || (mode != BLKmode
6587 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6588 || bitpos % GET_MODE_ALIGNMENT (mode))
6589 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6590 || (bitpos % BITS_PER_UNIT != 0)))
6591 || (bitsize >= 0 && mode != BLKmode
6592 && GET_MODE_BITSIZE (mode) > bitsize)
6593 /* If the RHS and field are a constant size and the size of the
6594 RHS isn't the same size as the bitfield, we must use bitfield
6595 operations. */
6596 || (bitsize >= 0
6597 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6598 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0
6599 /* Except for initialization of full bytes from a CONSTRUCTOR, which
6600 we will handle specially below. */
6601 && !(TREE_CODE (exp) == CONSTRUCTOR
6602 && bitsize % BITS_PER_UNIT == 0))
6603 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6604 decl we must use bitfield operations. */
6605 || (bitsize >= 0
6606 && TREE_CODE (exp) == MEM_REF
6607 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6608 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6609 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6610 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6612 rtx temp;
6613 gimple *nop_def;
6615 /* Using bitwise copy is not safe for TREE_ADDRESSABLE types. */
6616 gcc_assert (!TREE_ADDRESSABLE (TREE_TYPE (exp)));
6618 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6619 implies a mask operation. If the precision is the same size as
6620 the field we're storing into, that mask is redundant. This is
6621 particularly common with bit field assignments generated by the
6622 C front end. */
6623 nop_def = get_def_for_expr (exp, NOP_EXPR);
6624 if (nop_def)
6626 tree type = TREE_TYPE (exp);
6627 if (INTEGRAL_TYPE_P (type)
6628 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6629 && bitsize == TYPE_PRECISION (type))
6631 tree op = gimple_assign_rhs1 (nop_def);
6632 type = TREE_TYPE (op);
6633 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6634 exp = op;
6638 temp = expand_normal (exp);
6640 /* If BITSIZE is narrower than the size of the type of EXP
6641 we will be narrowing TEMP. Normally, what's wanted are the
6642 low-order bits. However, if EXP's type is a record and this is
6643 big-endian machine, we want the upper BITSIZE bits. */
6644 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6645 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6646 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6647 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6648 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6649 NULL_RTX, 1);
6651 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6652 if (mode != VOIDmode && mode != BLKmode
6653 && mode != TYPE_MODE (TREE_TYPE (exp)))
6654 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6656 /* If TEMP is not a PARALLEL (see below) and its mode and that of TARGET
6657 are both BLKmode, both must be in memory and BITPOS must be aligned
6658 on a byte boundary. If so, we simply do a block copy. Likewise for
6659 a BLKmode-like TARGET. */
6660 if (GET_CODE (temp) != PARALLEL
6661 && GET_MODE (temp) == BLKmode
6662 && (GET_MODE (target) == BLKmode
6663 || (MEM_P (target)
6664 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6665 && (bitpos % BITS_PER_UNIT) == 0
6666 && (bitsize % BITS_PER_UNIT) == 0)))
6668 gcc_assert (MEM_P (target) && MEM_P (temp)
6669 && (bitpos % BITS_PER_UNIT) == 0);
6671 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6672 emit_block_move (target, temp,
6673 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6674 / BITS_PER_UNIT),
6675 BLOCK_OP_NORMAL);
6677 return const0_rtx;
6680 /* Handle calls that return values in multiple non-contiguous locations.
6681 The Irix 6 ABI has examples of this. */
6682 if (GET_CODE (temp) == PARALLEL)
6684 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6685 rtx temp_target;
6686 if (mode == BLKmode || mode == VOIDmode)
6687 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6688 temp_target = gen_reg_rtx (mode);
6689 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6690 temp = temp_target;
6692 else if (mode == BLKmode)
6694 /* Handle calls that return BLKmode values in registers. */
6695 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6697 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6698 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6699 temp = temp_target;
6701 else
6703 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6704 rtx temp_target;
6705 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6706 temp_target = gen_reg_rtx (mode);
6707 temp_target
6708 = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
6709 temp_target, mode, mode);
6710 temp = temp_target;
6714 /* Store the value in the bitfield. */
6715 store_bit_field (target, bitsize, bitpos,
6716 bitregion_start, bitregion_end,
6717 mode, temp);
6719 return const0_rtx;
6721 else
6723 /* Now build a reference to just the desired component. */
6724 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6726 if (to_rtx == target)
6727 to_rtx = copy_rtx (to_rtx);
6729 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6730 set_mem_alias_set (to_rtx, alias_set);
6732 /* Above we avoided using bitfield operations for storing a CONSTRUCTOR
6733 into a target smaller than its type; handle that case now. */
6734 if (TREE_CODE (exp) == CONSTRUCTOR && bitsize >= 0)
6736 gcc_assert (bitsize % BITS_PER_UNIT == 0);
6737 store_constructor (exp, to_rtx, 0, bitsize/BITS_PER_UNIT);
6738 return to_rtx;
6741 return store_expr (exp, to_rtx, 0, nontemporal);
6745 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6746 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6747 codes and find the ultimate containing object, which we return.
6749 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6750 bit position, and *PUNSIGNEDP to the signedness of the field.
6751 If the position of the field is variable, we store a tree
6752 giving the variable offset (in units) in *POFFSET.
6753 This offset is in addition to the bit position.
6754 If the position is not variable, we store 0 in *POFFSET.
6756 If any of the extraction expressions is volatile,
6757 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6759 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6760 Otherwise, it is a mode that can be used to access the field.
6762 If the field describes a variable-sized object, *PMODE is set to
6763 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6764 this case, but the address of the object can be found.
6766 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6767 look through nodes that serve as markers of a greater alignment than
6768 the one that can be deduced from the expression. These nodes make it
6769 possible for front-ends to prevent temporaries from being created by
6770 the middle-end on alignment considerations. For that purpose, the
6771 normal operating mode at high-level is to always pass FALSE so that
6772 the ultimate containing object is really returned; moreover, the
6773 associated predicate handled_component_p will always return TRUE
6774 on these nodes, thus indicating that they are essentially handled
6775 by get_inner_reference. TRUE should only be passed when the caller
6776 is scanning the expression in order to build another representation
6777 and specifically knows how to handle these nodes; as such, this is
6778 the normal operating mode in the RTL expanders. */
6780 tree
6781 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6782 HOST_WIDE_INT *pbitpos, tree *poffset,
6783 machine_mode *pmode, int *punsignedp,
6784 int *pvolatilep, bool keep_aligning)
6786 tree size_tree = 0;
6787 machine_mode mode = VOIDmode;
6788 bool blkmode_bitfield = false;
6789 tree offset = size_zero_node;
6790 offset_int bit_offset = 0;
6792 /* First get the mode, signedness, and size. We do this from just the
6793 outermost expression. */
6794 *pbitsize = -1;
6795 if (TREE_CODE (exp) == COMPONENT_REF)
6797 tree field = TREE_OPERAND (exp, 1);
6798 size_tree = DECL_SIZE (field);
6799 if (flag_strict_volatile_bitfields > 0
6800 && TREE_THIS_VOLATILE (exp)
6801 && DECL_BIT_FIELD_TYPE (field)
6802 && DECL_MODE (field) != BLKmode)
6803 /* Volatile bitfields should be accessed in the mode of the
6804 field's type, not the mode computed based on the bit
6805 size. */
6806 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6807 else if (!DECL_BIT_FIELD (field))
6808 mode = DECL_MODE (field);
6809 else if (DECL_MODE (field) == BLKmode)
6810 blkmode_bitfield = true;
6812 *punsignedp = DECL_UNSIGNED (field);
6814 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6816 size_tree = TREE_OPERAND (exp, 1);
6817 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6818 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6820 /* For vector types, with the correct size of access, use the mode of
6821 inner type. */
6822 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6823 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6824 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6825 mode = TYPE_MODE (TREE_TYPE (exp));
6827 else
6829 mode = TYPE_MODE (TREE_TYPE (exp));
6830 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6832 if (mode == BLKmode)
6833 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6834 else
6835 *pbitsize = GET_MODE_BITSIZE (mode);
6838 if (size_tree != 0)
6840 if (! tree_fits_uhwi_p (size_tree))
6841 mode = BLKmode, *pbitsize = -1;
6842 else
6843 *pbitsize = tree_to_uhwi (size_tree);
6846 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6847 and find the ultimate containing object. */
6848 while (1)
6850 switch (TREE_CODE (exp))
6852 case BIT_FIELD_REF:
6853 bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
6854 break;
6856 case COMPONENT_REF:
6858 tree field = TREE_OPERAND (exp, 1);
6859 tree this_offset = component_ref_field_offset (exp);
6861 /* If this field hasn't been filled in yet, don't go past it.
6862 This should only happen when folding expressions made during
6863 type construction. */
6864 if (this_offset == 0)
6865 break;
6867 offset = size_binop (PLUS_EXPR, offset, this_offset);
6868 bit_offset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
6870 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6872 break;
6874 case ARRAY_REF:
6875 case ARRAY_RANGE_REF:
6877 tree index = TREE_OPERAND (exp, 1);
6878 tree low_bound = array_ref_low_bound (exp);
6879 tree unit_size = array_ref_element_size (exp);
6881 /* We assume all arrays have sizes that are a multiple of a byte.
6882 First subtract the lower bound, if any, in the type of the
6883 index, then convert to sizetype and multiply by the size of
6884 the array element. */
6885 if (! integer_zerop (low_bound))
6886 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6887 index, low_bound);
6889 offset = size_binop (PLUS_EXPR, offset,
6890 size_binop (MULT_EXPR,
6891 fold_convert (sizetype, index),
6892 unit_size));
6894 break;
6896 case REALPART_EXPR:
6897 break;
6899 case IMAGPART_EXPR:
6900 bit_offset += *pbitsize;
6901 break;
6903 case VIEW_CONVERT_EXPR:
6904 if (keep_aligning && STRICT_ALIGNMENT
6905 && (TYPE_ALIGN (TREE_TYPE (exp))
6906 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6907 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6908 < BIGGEST_ALIGNMENT)
6909 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6910 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6911 goto done;
6912 break;
6914 case MEM_REF:
6915 /* Hand back the decl for MEM[&decl, off]. */
6916 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6918 tree off = TREE_OPERAND (exp, 1);
6919 if (!integer_zerop (off))
6921 offset_int boff, coff = mem_ref_offset (exp);
6922 boff = wi::lshift (coff, LOG2_BITS_PER_UNIT);
6923 bit_offset += boff;
6925 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6927 goto done;
6929 default:
6930 goto done;
6933 /* If any reference in the chain is volatile, the effect is volatile. */
6934 if (TREE_THIS_VOLATILE (exp))
6935 *pvolatilep = 1;
6937 exp = TREE_OPERAND (exp, 0);
6939 done:
6941 /* If OFFSET is constant, see if we can return the whole thing as a
6942 constant bit position. Make sure to handle overflow during
6943 this conversion. */
6944 if (TREE_CODE (offset) == INTEGER_CST)
6946 offset_int tem = wi::sext (wi::to_offset (offset),
6947 TYPE_PRECISION (sizetype));
6948 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
6949 tem += bit_offset;
6950 if (wi::fits_shwi_p (tem))
6952 *pbitpos = tem.to_shwi ();
6953 *poffset = offset = NULL_TREE;
6957 /* Otherwise, split it up. */
6958 if (offset)
6960 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6961 if (wi::neg_p (bit_offset) || !wi::fits_shwi_p (bit_offset))
6963 offset_int mask = wi::mask <offset_int> (LOG2_BITS_PER_UNIT, false);
6964 offset_int tem = bit_offset.and_not (mask);
6965 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6966 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6967 bit_offset -= tem;
6968 tem = wi::arshift (tem, LOG2_BITS_PER_UNIT);
6969 offset = size_binop (PLUS_EXPR, offset,
6970 wide_int_to_tree (sizetype, tem));
6973 *pbitpos = bit_offset.to_shwi ();
6974 *poffset = offset;
6977 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6978 if (mode == VOIDmode
6979 && blkmode_bitfield
6980 && (*pbitpos % BITS_PER_UNIT) == 0
6981 && (*pbitsize % BITS_PER_UNIT) == 0)
6982 *pmode = BLKmode;
6983 else
6984 *pmode = mode;
6986 return exp;
6989 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6991 static unsigned HOST_WIDE_INT
6992 target_align (const_tree target)
6994 /* We might have a chain of nested references with intermediate misaligning
6995 bitfields components, so need to recurse to find out. */
6997 unsigned HOST_WIDE_INT this_align, outer_align;
6999 switch (TREE_CODE (target))
7001 case BIT_FIELD_REF:
7002 return 1;
7004 case COMPONENT_REF:
7005 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7006 outer_align = target_align (TREE_OPERAND (target, 0));
7007 return MIN (this_align, outer_align);
7009 case ARRAY_REF:
7010 case ARRAY_RANGE_REF:
7011 this_align = TYPE_ALIGN (TREE_TYPE (target));
7012 outer_align = target_align (TREE_OPERAND (target, 0));
7013 return MIN (this_align, outer_align);
7015 CASE_CONVERT:
7016 case NON_LVALUE_EXPR:
7017 case VIEW_CONVERT_EXPR:
7018 this_align = TYPE_ALIGN (TREE_TYPE (target));
7019 outer_align = target_align (TREE_OPERAND (target, 0));
7020 return MAX (this_align, outer_align);
7022 default:
7023 return TYPE_ALIGN (TREE_TYPE (target));
7028 /* Given an rtx VALUE that may contain additions and multiplications, return
7029 an equivalent value that just refers to a register, memory, or constant.
7030 This is done by generating instructions to perform the arithmetic and
7031 returning a pseudo-register containing the value.
7033 The returned value may be a REG, SUBREG, MEM or constant. */
7036 force_operand (rtx value, rtx target)
7038 rtx op1, op2;
7039 /* Use subtarget as the target for operand 0 of a binary operation. */
7040 rtx subtarget = get_subtarget (target);
7041 enum rtx_code code = GET_CODE (value);
7043 /* Check for subreg applied to an expression produced by loop optimizer. */
7044 if (code == SUBREG
7045 && !REG_P (SUBREG_REG (value))
7046 && !MEM_P (SUBREG_REG (value)))
7048 value
7049 = simplify_gen_subreg (GET_MODE (value),
7050 force_reg (GET_MODE (SUBREG_REG (value)),
7051 force_operand (SUBREG_REG (value),
7052 NULL_RTX)),
7053 GET_MODE (SUBREG_REG (value)),
7054 SUBREG_BYTE (value));
7055 code = GET_CODE (value);
7058 /* Check for a PIC address load. */
7059 if ((code == PLUS || code == MINUS)
7060 && XEXP (value, 0) == pic_offset_table_rtx
7061 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7062 || GET_CODE (XEXP (value, 1)) == LABEL_REF
7063 || GET_CODE (XEXP (value, 1)) == CONST))
7065 if (!subtarget)
7066 subtarget = gen_reg_rtx (GET_MODE (value));
7067 emit_move_insn (subtarget, value);
7068 return subtarget;
7071 if (ARITHMETIC_P (value))
7073 op2 = XEXP (value, 1);
7074 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7075 subtarget = 0;
7076 if (code == MINUS && CONST_INT_P (op2))
7078 code = PLUS;
7079 op2 = negate_rtx (GET_MODE (value), op2);
7082 /* Check for an addition with OP2 a constant integer and our first
7083 operand a PLUS of a virtual register and something else. In that
7084 case, we want to emit the sum of the virtual register and the
7085 constant first and then add the other value. This allows virtual
7086 register instantiation to simply modify the constant rather than
7087 creating another one around this addition. */
7088 if (code == PLUS && CONST_INT_P (op2)
7089 && GET_CODE (XEXP (value, 0)) == PLUS
7090 && REG_P (XEXP (XEXP (value, 0), 0))
7091 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7092 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7094 rtx temp = expand_simple_binop (GET_MODE (value), code,
7095 XEXP (XEXP (value, 0), 0), op2,
7096 subtarget, 0, OPTAB_LIB_WIDEN);
7097 return expand_simple_binop (GET_MODE (value), code, temp,
7098 force_operand (XEXP (XEXP (value,
7099 0), 1), 0),
7100 target, 0, OPTAB_LIB_WIDEN);
7103 op1 = force_operand (XEXP (value, 0), subtarget);
7104 op2 = force_operand (op2, NULL_RTX);
7105 switch (code)
7107 case MULT:
7108 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7109 case DIV:
7110 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7111 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7112 target, 1, OPTAB_LIB_WIDEN);
7113 else
7114 return expand_divmod (0,
7115 FLOAT_MODE_P (GET_MODE (value))
7116 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7117 GET_MODE (value), op1, op2, target, 0);
7118 case MOD:
7119 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7120 target, 0);
7121 case UDIV:
7122 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7123 target, 1);
7124 case UMOD:
7125 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7126 target, 1);
7127 case ASHIFTRT:
7128 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7129 target, 0, OPTAB_LIB_WIDEN);
7130 default:
7131 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7132 target, 1, OPTAB_LIB_WIDEN);
7135 if (UNARY_P (value))
7137 if (!target)
7138 target = gen_reg_rtx (GET_MODE (value));
7139 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7140 switch (code)
7142 case ZERO_EXTEND:
7143 case SIGN_EXTEND:
7144 case TRUNCATE:
7145 case FLOAT_EXTEND:
7146 case FLOAT_TRUNCATE:
7147 convert_move (target, op1, code == ZERO_EXTEND);
7148 return target;
7150 case FIX:
7151 case UNSIGNED_FIX:
7152 expand_fix (target, op1, code == UNSIGNED_FIX);
7153 return target;
7155 case FLOAT:
7156 case UNSIGNED_FLOAT:
7157 expand_float (target, op1, code == UNSIGNED_FLOAT);
7158 return target;
7160 default:
7161 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7165 #ifdef INSN_SCHEDULING
7166 /* On machines that have insn scheduling, we want all memory reference to be
7167 explicit, so we need to deal with such paradoxical SUBREGs. */
7168 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7169 value
7170 = simplify_gen_subreg (GET_MODE (value),
7171 force_reg (GET_MODE (SUBREG_REG (value)),
7172 force_operand (SUBREG_REG (value),
7173 NULL_RTX)),
7174 GET_MODE (SUBREG_REG (value)),
7175 SUBREG_BYTE (value));
7176 #endif
7178 return value;
7181 /* Subroutine of expand_expr: return nonzero iff there is no way that
7182 EXP can reference X, which is being modified. TOP_P is nonzero if this
7183 call is going to be used to determine whether we need a temporary
7184 for EXP, as opposed to a recursive call to this function.
7186 It is always safe for this routine to return zero since it merely
7187 searches for optimization opportunities. */
7190 safe_from_p (const_rtx x, tree exp, int top_p)
7192 rtx exp_rtl = 0;
7193 int i, nops;
7195 if (x == 0
7196 /* If EXP has varying size, we MUST use a target since we currently
7197 have no way of allocating temporaries of variable size
7198 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7199 So we assume here that something at a higher level has prevented a
7200 clash. This is somewhat bogus, but the best we can do. Only
7201 do this when X is BLKmode and when we are at the top level. */
7202 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7203 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7204 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7205 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7206 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7207 != INTEGER_CST)
7208 && GET_MODE (x) == BLKmode)
7209 /* If X is in the outgoing argument area, it is always safe. */
7210 || (MEM_P (x)
7211 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7212 || (GET_CODE (XEXP (x, 0)) == PLUS
7213 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7214 return 1;
7216 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7217 find the underlying pseudo. */
7218 if (GET_CODE (x) == SUBREG)
7220 x = SUBREG_REG (x);
7221 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7222 return 0;
7225 /* Now look at our tree code and possibly recurse. */
7226 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7228 case tcc_declaration:
7229 exp_rtl = DECL_RTL_IF_SET (exp);
7230 break;
7232 case tcc_constant:
7233 return 1;
7235 case tcc_exceptional:
7236 if (TREE_CODE (exp) == TREE_LIST)
7238 while (1)
7240 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7241 return 0;
7242 exp = TREE_CHAIN (exp);
7243 if (!exp)
7244 return 1;
7245 if (TREE_CODE (exp) != TREE_LIST)
7246 return safe_from_p (x, exp, 0);
7249 else if (TREE_CODE (exp) == CONSTRUCTOR)
7251 constructor_elt *ce;
7252 unsigned HOST_WIDE_INT idx;
7254 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7255 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7256 || !safe_from_p (x, ce->value, 0))
7257 return 0;
7258 return 1;
7260 else if (TREE_CODE (exp) == ERROR_MARK)
7261 return 1; /* An already-visited SAVE_EXPR? */
7262 else
7263 return 0;
7265 case tcc_statement:
7266 /* The only case we look at here is the DECL_INITIAL inside a
7267 DECL_EXPR. */
7268 return (TREE_CODE (exp) != DECL_EXPR
7269 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7270 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7271 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7273 case tcc_binary:
7274 case tcc_comparison:
7275 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7276 return 0;
7277 /* Fall through. */
7279 case tcc_unary:
7280 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7282 case tcc_expression:
7283 case tcc_reference:
7284 case tcc_vl_exp:
7285 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7286 the expression. If it is set, we conflict iff we are that rtx or
7287 both are in memory. Otherwise, we check all operands of the
7288 expression recursively. */
7290 switch (TREE_CODE (exp))
7292 case ADDR_EXPR:
7293 /* If the operand is static or we are static, we can't conflict.
7294 Likewise if we don't conflict with the operand at all. */
7295 if (staticp (TREE_OPERAND (exp, 0))
7296 || TREE_STATIC (exp)
7297 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7298 return 1;
7300 /* Otherwise, the only way this can conflict is if we are taking
7301 the address of a DECL a that address if part of X, which is
7302 very rare. */
7303 exp = TREE_OPERAND (exp, 0);
7304 if (DECL_P (exp))
7306 if (!DECL_RTL_SET_P (exp)
7307 || !MEM_P (DECL_RTL (exp)))
7308 return 0;
7309 else
7310 exp_rtl = XEXP (DECL_RTL (exp), 0);
7312 break;
7314 case MEM_REF:
7315 if (MEM_P (x)
7316 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7317 get_alias_set (exp)))
7318 return 0;
7319 break;
7321 case CALL_EXPR:
7322 /* Assume that the call will clobber all hard registers and
7323 all of memory. */
7324 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7325 || MEM_P (x))
7326 return 0;
7327 break;
7329 case WITH_CLEANUP_EXPR:
7330 case CLEANUP_POINT_EXPR:
7331 /* Lowered by gimplify.c. */
7332 gcc_unreachable ();
7334 case SAVE_EXPR:
7335 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7337 default:
7338 break;
7341 /* If we have an rtx, we do not need to scan our operands. */
7342 if (exp_rtl)
7343 break;
7345 nops = TREE_OPERAND_LENGTH (exp);
7346 for (i = 0; i < nops; i++)
7347 if (TREE_OPERAND (exp, i) != 0
7348 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7349 return 0;
7351 break;
7353 case tcc_type:
7354 /* Should never get a type here. */
7355 gcc_unreachable ();
7358 /* If we have an rtl, find any enclosed object. Then see if we conflict
7359 with it. */
7360 if (exp_rtl)
7362 if (GET_CODE (exp_rtl) == SUBREG)
7364 exp_rtl = SUBREG_REG (exp_rtl);
7365 if (REG_P (exp_rtl)
7366 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7367 return 0;
7370 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7371 are memory and they conflict. */
7372 return ! (rtx_equal_p (x, exp_rtl)
7373 || (MEM_P (x) && MEM_P (exp_rtl)
7374 && true_dependence (exp_rtl, VOIDmode, x)));
7377 /* If we reach here, it is safe. */
7378 return 1;
7382 /* Return the highest power of two that EXP is known to be a multiple of.
7383 This is used in updating alignment of MEMs in array references. */
7385 unsigned HOST_WIDE_INT
7386 highest_pow2_factor (const_tree exp)
7388 unsigned HOST_WIDE_INT ret;
7389 int trailing_zeros = tree_ctz (exp);
7390 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7391 return BIGGEST_ALIGNMENT;
7392 ret = (unsigned HOST_WIDE_INT) 1 << trailing_zeros;
7393 if (ret > BIGGEST_ALIGNMENT)
7394 return BIGGEST_ALIGNMENT;
7395 return ret;
7398 /* Similar, except that the alignment requirements of TARGET are
7399 taken into account. Assume it is at least as aligned as its
7400 type, unless it is a COMPONENT_REF in which case the layout of
7401 the structure gives the alignment. */
7403 static unsigned HOST_WIDE_INT
7404 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7406 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7407 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7409 return MAX (factor, talign);
7412 /* Convert the tree comparison code TCODE to the rtl one where the
7413 signedness is UNSIGNEDP. */
7415 static enum rtx_code
7416 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7418 enum rtx_code code;
7419 switch (tcode)
7421 case EQ_EXPR:
7422 code = EQ;
7423 break;
7424 case NE_EXPR:
7425 code = NE;
7426 break;
7427 case LT_EXPR:
7428 code = unsignedp ? LTU : LT;
7429 break;
7430 case LE_EXPR:
7431 code = unsignedp ? LEU : LE;
7432 break;
7433 case GT_EXPR:
7434 code = unsignedp ? GTU : GT;
7435 break;
7436 case GE_EXPR:
7437 code = unsignedp ? GEU : GE;
7438 break;
7439 case UNORDERED_EXPR:
7440 code = UNORDERED;
7441 break;
7442 case ORDERED_EXPR:
7443 code = ORDERED;
7444 break;
7445 case UNLT_EXPR:
7446 code = UNLT;
7447 break;
7448 case UNLE_EXPR:
7449 code = UNLE;
7450 break;
7451 case UNGT_EXPR:
7452 code = UNGT;
7453 break;
7454 case UNGE_EXPR:
7455 code = UNGE;
7456 break;
7457 case UNEQ_EXPR:
7458 code = UNEQ;
7459 break;
7460 case LTGT_EXPR:
7461 code = LTGT;
7462 break;
7464 default:
7465 gcc_unreachable ();
7467 return code;
7470 /* Subroutine of expand_expr. Expand the two operands of a binary
7471 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7472 The value may be stored in TARGET if TARGET is nonzero. The
7473 MODIFIER argument is as documented by expand_expr. */
7475 void
7476 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7477 enum expand_modifier modifier)
7479 if (! safe_from_p (target, exp1, 1))
7480 target = 0;
7481 if (operand_equal_p (exp0, exp1, 0))
7483 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7484 *op1 = copy_rtx (*op0);
7486 else
7488 /* If we need to preserve evaluation order, copy exp0 into its own
7489 temporary variable so that it can't be clobbered by exp1. */
7490 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7491 exp0 = save_expr (exp0);
7492 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7493 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7498 /* Return a MEM that contains constant EXP. DEFER is as for
7499 output_constant_def and MODIFIER is as for expand_expr. */
7501 static rtx
7502 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7504 rtx mem;
7506 mem = output_constant_def (exp, defer);
7507 if (modifier != EXPAND_INITIALIZER)
7508 mem = use_anchored_address (mem);
7509 return mem;
7512 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7513 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7515 static rtx
7516 expand_expr_addr_expr_1 (tree exp, rtx target, machine_mode tmode,
7517 enum expand_modifier modifier, addr_space_t as)
7519 rtx result, subtarget;
7520 tree inner, offset;
7521 HOST_WIDE_INT bitsize, bitpos;
7522 int volatilep, unsignedp;
7523 machine_mode mode1;
7525 /* If we are taking the address of a constant and are at the top level,
7526 we have to use output_constant_def since we can't call force_const_mem
7527 at top level. */
7528 /* ??? This should be considered a front-end bug. We should not be
7529 generating ADDR_EXPR of something that isn't an LVALUE. The only
7530 exception here is STRING_CST. */
7531 if (CONSTANT_CLASS_P (exp))
7533 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7534 if (modifier < EXPAND_SUM)
7535 result = force_operand (result, target);
7536 return result;
7539 /* Everything must be something allowed by is_gimple_addressable. */
7540 switch (TREE_CODE (exp))
7542 case INDIRECT_REF:
7543 /* This case will happen via recursion for &a->b. */
7544 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7546 case MEM_REF:
7548 tree tem = TREE_OPERAND (exp, 0);
7549 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7550 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7551 return expand_expr (tem, target, tmode, modifier);
7554 case CONST_DECL:
7555 /* Expand the initializer like constants above. */
7556 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7557 0, modifier), 0);
7558 if (modifier < EXPAND_SUM)
7559 result = force_operand (result, target);
7560 return result;
7562 case REALPART_EXPR:
7563 /* The real part of the complex number is always first, therefore
7564 the address is the same as the address of the parent object. */
7565 offset = 0;
7566 bitpos = 0;
7567 inner = TREE_OPERAND (exp, 0);
7568 break;
7570 case IMAGPART_EXPR:
7571 /* The imaginary part of the complex number is always second.
7572 The expression is therefore always offset by the size of the
7573 scalar type. */
7574 offset = 0;
7575 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7576 inner = TREE_OPERAND (exp, 0);
7577 break;
7579 case COMPOUND_LITERAL_EXPR:
7580 /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
7581 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
7582 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
7583 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
7584 the initializers aren't gimplified. */
7585 if (COMPOUND_LITERAL_EXPR_DECL (exp)
7586 && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp)))
7587 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7588 target, tmode, modifier, as);
7589 /* FALLTHRU */
7590 default:
7591 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7592 expand_expr, as that can have various side effects; LABEL_DECLs for
7593 example, may not have their DECL_RTL set yet. Expand the rtl of
7594 CONSTRUCTORs too, which should yield a memory reference for the
7595 constructor's contents. Assume language specific tree nodes can
7596 be expanded in some interesting way. */
7597 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7598 if (DECL_P (exp)
7599 || TREE_CODE (exp) == CONSTRUCTOR
7600 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7602 result = expand_expr (exp, target, tmode,
7603 modifier == EXPAND_INITIALIZER
7604 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7606 /* If the DECL isn't in memory, then the DECL wasn't properly
7607 marked TREE_ADDRESSABLE, which will be either a front-end
7608 or a tree optimizer bug. */
7610 gcc_assert (MEM_P (result));
7611 result = XEXP (result, 0);
7613 /* ??? Is this needed anymore? */
7614 if (DECL_P (exp))
7615 TREE_USED (exp) = 1;
7617 if (modifier != EXPAND_INITIALIZER
7618 && modifier != EXPAND_CONST_ADDRESS
7619 && modifier != EXPAND_SUM)
7620 result = force_operand (result, target);
7621 return result;
7624 /* Pass FALSE as the last argument to get_inner_reference although
7625 we are expanding to RTL. The rationale is that we know how to
7626 handle "aligning nodes" here: we can just bypass them because
7627 they won't change the final object whose address will be returned
7628 (they actually exist only for that purpose). */
7629 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7630 &mode1, &unsignedp, &volatilep, false);
7631 break;
7634 /* We must have made progress. */
7635 gcc_assert (inner != exp);
7637 subtarget = offset || bitpos ? NULL_RTX : target;
7638 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7639 inner alignment, force the inner to be sufficiently aligned. */
7640 if (CONSTANT_CLASS_P (inner)
7641 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7643 inner = copy_node (inner);
7644 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7645 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7646 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7648 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7650 if (offset)
7652 rtx tmp;
7654 if (modifier != EXPAND_NORMAL)
7655 result = force_operand (result, NULL);
7656 tmp = expand_expr (offset, NULL_RTX, tmode,
7657 modifier == EXPAND_INITIALIZER
7658 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7660 /* expand_expr is allowed to return an object in a mode other
7661 than TMODE. If it did, we need to convert. */
7662 if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
7663 tmp = convert_modes (tmode, GET_MODE (tmp),
7664 tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
7665 result = convert_memory_address_addr_space (tmode, result, as);
7666 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7668 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7669 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7670 else
7672 subtarget = bitpos ? NULL_RTX : target;
7673 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7674 1, OPTAB_LIB_WIDEN);
7678 if (bitpos)
7680 /* Someone beforehand should have rejected taking the address
7681 of such an object. */
7682 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7684 result = convert_memory_address_addr_space (tmode, result, as);
7685 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7686 if (modifier < EXPAND_SUM)
7687 result = force_operand (result, target);
7690 return result;
7693 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7694 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7696 static rtx
7697 expand_expr_addr_expr (tree exp, rtx target, machine_mode tmode,
7698 enum expand_modifier modifier)
7700 addr_space_t as = ADDR_SPACE_GENERIC;
7701 machine_mode address_mode = Pmode;
7702 machine_mode pointer_mode = ptr_mode;
7703 machine_mode rmode;
7704 rtx result;
7706 /* Target mode of VOIDmode says "whatever's natural". */
7707 if (tmode == VOIDmode)
7708 tmode = TYPE_MODE (TREE_TYPE (exp));
7710 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7712 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7713 address_mode = targetm.addr_space.address_mode (as);
7714 pointer_mode = targetm.addr_space.pointer_mode (as);
7717 /* We can get called with some Weird Things if the user does silliness
7718 like "(short) &a". In that case, convert_memory_address won't do
7719 the right thing, so ignore the given target mode. */
7720 if (tmode != address_mode && tmode != pointer_mode)
7721 tmode = address_mode;
7723 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7724 tmode, modifier, as);
7726 /* Despite expand_expr claims concerning ignoring TMODE when not
7727 strictly convenient, stuff breaks if we don't honor it. Note
7728 that combined with the above, we only do this for pointer modes. */
7729 rmode = GET_MODE (result);
7730 if (rmode == VOIDmode)
7731 rmode = tmode;
7732 if (rmode != tmode)
7733 result = convert_memory_address_addr_space (tmode, result, as);
7735 return result;
7738 /* Generate code for computing CONSTRUCTOR EXP.
7739 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7740 is TRUE, instead of creating a temporary variable in memory
7741 NULL is returned and the caller needs to handle it differently. */
7743 static rtx
7744 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7745 bool avoid_temp_mem)
7747 tree type = TREE_TYPE (exp);
7748 machine_mode mode = TYPE_MODE (type);
7750 /* Try to avoid creating a temporary at all. This is possible
7751 if all of the initializer is zero.
7752 FIXME: try to handle all [0..255] initializers we can handle
7753 with memset. */
7754 if (TREE_STATIC (exp)
7755 && !TREE_ADDRESSABLE (exp)
7756 && target != 0 && mode == BLKmode
7757 && all_zeros_p (exp))
7759 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7760 return target;
7763 /* All elts simple constants => refer to a constant in memory. But
7764 if this is a non-BLKmode mode, let it store a field at a time
7765 since that should make a CONST_INT, CONST_WIDE_INT or
7766 CONST_DOUBLE when we fold. Likewise, if we have a target we can
7767 use, it is best to store directly into the target unless the type
7768 is large enough that memcpy will be used. If we are making an
7769 initializer and all operands are constant, put it in memory as
7770 well.
7772 FIXME: Avoid trying to fill vector constructors piece-meal.
7773 Output them with output_constant_def below unless we're sure
7774 they're zeros. This should go away when vector initializers
7775 are treated like VECTOR_CST instead of arrays. */
7776 if ((TREE_STATIC (exp)
7777 && ((mode == BLKmode
7778 && ! (target != 0 && safe_from_p (target, exp, 1)))
7779 || TREE_ADDRESSABLE (exp)
7780 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
7781 && (! can_move_by_pieces
7782 (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
7783 TYPE_ALIGN (type)))
7784 && ! mostly_zeros_p (exp))))
7785 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7786 && TREE_CONSTANT (exp)))
7788 rtx constructor;
7790 if (avoid_temp_mem)
7791 return NULL_RTX;
7793 constructor = expand_expr_constant (exp, 1, modifier);
7795 if (modifier != EXPAND_CONST_ADDRESS
7796 && modifier != EXPAND_INITIALIZER
7797 && modifier != EXPAND_SUM)
7798 constructor = validize_mem (constructor);
7800 return constructor;
7803 /* Handle calls that pass values in multiple non-contiguous
7804 locations. The Irix 6 ABI has examples of this. */
7805 if (target == 0 || ! safe_from_p (target, exp, 1)
7806 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7808 if (avoid_temp_mem)
7809 return NULL_RTX;
7811 target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
7814 store_constructor (exp, target, 0, int_expr_size (exp));
7815 return target;
7819 /* expand_expr: generate code for computing expression EXP.
7820 An rtx for the computed value is returned. The value is never null.
7821 In the case of a void EXP, const0_rtx is returned.
7823 The value may be stored in TARGET if TARGET is nonzero.
7824 TARGET is just a suggestion; callers must assume that
7825 the rtx returned may not be the same as TARGET.
7827 If TARGET is CONST0_RTX, it means that the value will be ignored.
7829 If TMODE is not VOIDmode, it suggests generating the
7830 result in mode TMODE. But this is done only when convenient.
7831 Otherwise, TMODE is ignored and the value generated in its natural mode.
7832 TMODE is just a suggestion; callers must assume that
7833 the rtx returned may not have mode TMODE.
7835 Note that TARGET may have neither TMODE nor MODE. In that case, it
7836 probably will not be used.
7838 If MODIFIER is EXPAND_SUM then when EXP is an addition
7839 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7840 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7841 products as above, or REG or MEM, or constant.
7842 Ordinarily in such cases we would output mul or add instructions
7843 and then return a pseudo reg containing the sum.
7845 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7846 it also marks a label as absolutely required (it can't be dead).
7847 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7848 This is used for outputting expressions used in initializers.
7850 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7851 with a constant address even if that address is not normally legitimate.
7852 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7854 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7855 a call parameter. Such targets require special care as we haven't yet
7856 marked TARGET so that it's safe from being trashed by libcalls. We
7857 don't want to use TARGET for anything but the final result;
7858 Intermediate values must go elsewhere. Additionally, calls to
7859 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7861 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7862 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7863 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7864 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7865 recursively.
7867 If INNER_REFERENCE_P is true, we are expanding an inner reference.
7868 In this case, we don't adjust a returned MEM rtx that wouldn't be
7869 sufficiently aligned for its mode; instead, it's up to the caller
7870 to deal with it afterwards. This is used to make sure that unaligned
7871 base objects for which out-of-bounds accesses are supported, for
7872 example record types with trailing arrays, aren't realigned behind
7873 the back of the caller.
7874 The normal operating mode is to pass FALSE for this parameter. */
7877 expand_expr_real (tree exp, rtx target, machine_mode tmode,
7878 enum expand_modifier modifier, rtx *alt_rtl,
7879 bool inner_reference_p)
7881 rtx ret;
7883 /* Handle ERROR_MARK before anybody tries to access its type. */
7884 if (TREE_CODE (exp) == ERROR_MARK
7885 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7887 ret = CONST0_RTX (tmode);
7888 return ret ? ret : const0_rtx;
7891 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
7892 inner_reference_p);
7893 return ret;
7896 /* Try to expand the conditional expression which is represented by
7897 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If it succeeds
7898 return the rtl reg which represents the result. Otherwise return
7899 NULL_RTX. */
7901 static rtx
7902 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
7903 tree treeop1 ATTRIBUTE_UNUSED,
7904 tree treeop2 ATTRIBUTE_UNUSED)
7906 rtx insn;
7907 rtx op00, op01, op1, op2;
7908 enum rtx_code comparison_code;
7909 machine_mode comparison_mode;
7910 gimple *srcstmt;
7911 rtx temp;
7912 tree type = TREE_TYPE (treeop1);
7913 int unsignedp = TYPE_UNSIGNED (type);
7914 machine_mode mode = TYPE_MODE (type);
7915 machine_mode orig_mode = mode;
7917 /* If we cannot do a conditional move on the mode, try doing it
7918 with the promoted mode. */
7919 if (!can_conditionally_move_p (mode))
7921 mode = promote_mode (type, mode, &unsignedp);
7922 if (!can_conditionally_move_p (mode))
7923 return NULL_RTX;
7924 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
7926 else
7927 temp = assign_temp (type, 0, 1);
7929 start_sequence ();
7930 expand_operands (treeop1, treeop2,
7931 temp, &op1, &op2, EXPAND_NORMAL);
7933 if (TREE_CODE (treeop0) == SSA_NAME
7934 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
7936 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
7937 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
7938 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
7939 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
7940 comparison_mode = TYPE_MODE (type);
7941 unsignedp = TYPE_UNSIGNED (type);
7942 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7944 else if (COMPARISON_CLASS_P (treeop0))
7946 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
7947 enum tree_code cmpcode = TREE_CODE (treeop0);
7948 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
7949 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
7950 unsignedp = TYPE_UNSIGNED (type);
7951 comparison_mode = TYPE_MODE (type);
7952 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7954 else
7956 op00 = expand_normal (treeop0);
7957 op01 = const0_rtx;
7958 comparison_code = NE;
7959 comparison_mode = GET_MODE (op00);
7960 if (comparison_mode == VOIDmode)
7961 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
7964 if (GET_MODE (op1) != mode)
7965 op1 = gen_lowpart (mode, op1);
7967 if (GET_MODE (op2) != mode)
7968 op2 = gen_lowpart (mode, op2);
7970 /* Try to emit the conditional move. */
7971 insn = emit_conditional_move (temp, comparison_code,
7972 op00, op01, comparison_mode,
7973 op1, op2, mode,
7974 unsignedp);
7976 /* If we could do the conditional move, emit the sequence,
7977 and return. */
7978 if (insn)
7980 rtx_insn *seq = get_insns ();
7981 end_sequence ();
7982 emit_insn (seq);
7983 return convert_modes (orig_mode, mode, temp, 0);
7986 /* Otherwise discard the sequence and fall back to code with
7987 branches. */
7988 end_sequence ();
7989 return NULL_RTX;
7993 expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
7994 enum expand_modifier modifier)
7996 rtx op0, op1, op2, temp;
7997 rtx_code_label *lab;
7998 tree type;
7999 int unsignedp;
8000 machine_mode mode;
8001 enum tree_code code = ops->code;
8002 optab this_optab;
8003 rtx subtarget, original_target;
8004 int ignore;
8005 bool reduce_bit_field;
8006 location_t loc = ops->location;
8007 tree treeop0, treeop1, treeop2;
8008 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8009 ? reduce_to_bit_field_precision ((expr), \
8010 target, \
8011 type) \
8012 : (expr))
8014 type = ops->type;
8015 mode = TYPE_MODE (type);
8016 unsignedp = TYPE_UNSIGNED (type);
8018 treeop0 = ops->op0;
8019 treeop1 = ops->op1;
8020 treeop2 = ops->op2;
8022 /* We should be called only on simple (binary or unary) expressions,
8023 exactly those that are valid in gimple expressions that aren't
8024 GIMPLE_SINGLE_RHS (or invalid). */
8025 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8026 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8027 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8029 ignore = (target == const0_rtx
8030 || ((CONVERT_EXPR_CODE_P (code)
8031 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8032 && TREE_CODE (type) == VOID_TYPE));
8034 /* We should be called only if we need the result. */
8035 gcc_assert (!ignore);
8037 /* An operation in what may be a bit-field type needs the
8038 result to be reduced to the precision of the bit-field type,
8039 which is narrower than that of the type's mode. */
8040 reduce_bit_field = (INTEGRAL_TYPE_P (type)
8041 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8043 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8044 target = 0;
8046 /* Use subtarget as the target for operand 0 of a binary operation. */
8047 subtarget = get_subtarget (target);
8048 original_target = target;
8050 switch (code)
8052 case NON_LVALUE_EXPR:
8053 case PAREN_EXPR:
8054 CASE_CONVERT:
8055 if (treeop0 == error_mark_node)
8056 return const0_rtx;
8058 if (TREE_CODE (type) == UNION_TYPE)
8060 tree valtype = TREE_TYPE (treeop0);
8062 /* If both input and output are BLKmode, this conversion isn't doing
8063 anything except possibly changing memory attribute. */
8064 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8066 rtx result = expand_expr (treeop0, target, tmode,
8067 modifier);
8069 result = copy_rtx (result);
8070 set_mem_attributes (result, type, 0);
8071 return result;
8074 if (target == 0)
8076 if (TYPE_MODE (type) != BLKmode)
8077 target = gen_reg_rtx (TYPE_MODE (type));
8078 else
8079 target = assign_temp (type, 1, 1);
8082 if (MEM_P (target))
8083 /* Store data into beginning of memory target. */
8084 store_expr (treeop0,
8085 adjust_address (target, TYPE_MODE (valtype), 0),
8086 modifier == EXPAND_STACK_PARM,
8087 false);
8089 else
8091 gcc_assert (REG_P (target));
8093 /* Store this field into a union of the proper type. */
8094 store_field (target,
8095 MIN ((int_size_in_bytes (TREE_TYPE
8096 (treeop0))
8097 * BITS_PER_UNIT),
8098 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8099 0, 0, 0, TYPE_MODE (valtype), treeop0, 0, false);
8102 /* Return the entire union. */
8103 return target;
8106 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8108 op0 = expand_expr (treeop0, target, VOIDmode,
8109 modifier);
8111 /* If the signedness of the conversion differs and OP0 is
8112 a promoted SUBREG, clear that indication since we now
8113 have to do the proper extension. */
8114 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8115 && GET_CODE (op0) == SUBREG)
8116 SUBREG_PROMOTED_VAR_P (op0) = 0;
8118 return REDUCE_BIT_FIELD (op0);
8121 op0 = expand_expr (treeop0, NULL_RTX, mode,
8122 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8123 if (GET_MODE (op0) == mode)
8126 /* If OP0 is a constant, just convert it into the proper mode. */
8127 else if (CONSTANT_P (op0))
8129 tree inner_type = TREE_TYPE (treeop0);
8130 machine_mode inner_mode = GET_MODE (op0);
8132 if (inner_mode == VOIDmode)
8133 inner_mode = TYPE_MODE (inner_type);
8135 if (modifier == EXPAND_INITIALIZER)
8136 op0 = lowpart_subreg (mode, op0, inner_mode);
8137 else
8138 op0= convert_modes (mode, inner_mode, op0,
8139 TYPE_UNSIGNED (inner_type));
8142 else if (modifier == EXPAND_INITIALIZER)
8143 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8145 else if (target == 0)
8146 op0 = convert_to_mode (mode, op0,
8147 TYPE_UNSIGNED (TREE_TYPE
8148 (treeop0)));
8149 else
8151 convert_move (target, op0,
8152 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8153 op0 = target;
8156 return REDUCE_BIT_FIELD (op0);
8158 case ADDR_SPACE_CONVERT_EXPR:
8160 tree treeop0_type = TREE_TYPE (treeop0);
8162 gcc_assert (POINTER_TYPE_P (type));
8163 gcc_assert (POINTER_TYPE_P (treeop0_type));
8165 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8166 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8168 /* Conversions between pointers to the same address space should
8169 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8170 gcc_assert (as_to != as_from);
8172 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8174 /* Ask target code to handle conversion between pointers
8175 to overlapping address spaces. */
8176 if (targetm.addr_space.subset_p (as_to, as_from)
8177 || targetm.addr_space.subset_p (as_from, as_to))
8179 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8181 else
8183 /* For disjoint address spaces, converting anything but a null
8184 pointer invokes undefined behaviour. We truncate or extend the
8185 value as if we'd converted via integers, which handles 0 as
8186 required, and all others as the programmer likely expects. */
8187 #ifndef POINTERS_EXTEND_UNSIGNED
8188 const int POINTERS_EXTEND_UNSIGNED = 1;
8189 #endif
8190 op0 = convert_modes (mode, TYPE_MODE (treeop0_type),
8191 op0, POINTERS_EXTEND_UNSIGNED);
8193 gcc_assert (op0);
8194 return op0;
8197 case POINTER_PLUS_EXPR:
8198 /* Even though the sizetype mode and the pointer's mode can be different
8199 expand is able to handle this correctly and get the correct result out
8200 of the PLUS_EXPR code. */
8201 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8202 if sizetype precision is smaller than pointer precision. */
8203 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8204 treeop1 = fold_convert_loc (loc, type,
8205 fold_convert_loc (loc, ssizetype,
8206 treeop1));
8207 /* If sizetype precision is larger than pointer precision, truncate the
8208 offset to have matching modes. */
8209 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8210 treeop1 = fold_convert_loc (loc, type, treeop1);
8212 case PLUS_EXPR:
8213 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8214 something else, make sure we add the register to the constant and
8215 then to the other thing. This case can occur during strength
8216 reduction and doing it this way will produce better code if the
8217 frame pointer or argument pointer is eliminated.
8219 fold-const.c will ensure that the constant is always in the inner
8220 PLUS_EXPR, so the only case we need to do anything about is if
8221 sp, ap, or fp is our second argument, in which case we must swap
8222 the innermost first argument and our second argument. */
8224 if (TREE_CODE (treeop0) == PLUS_EXPR
8225 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8226 && TREE_CODE (treeop1) == VAR_DECL
8227 && (DECL_RTL (treeop1) == frame_pointer_rtx
8228 || DECL_RTL (treeop1) == stack_pointer_rtx
8229 || DECL_RTL (treeop1) == arg_pointer_rtx))
8231 gcc_unreachable ();
8234 /* If the result is to be ptr_mode and we are adding an integer to
8235 something, we might be forming a constant. So try to use
8236 plus_constant. If it produces a sum and we can't accept it,
8237 use force_operand. This allows P = &ARR[const] to generate
8238 efficient code on machines where a SYMBOL_REF is not a valid
8239 address.
8241 If this is an EXPAND_SUM call, always return the sum. */
8242 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8243 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8245 if (modifier == EXPAND_STACK_PARM)
8246 target = 0;
8247 if (TREE_CODE (treeop0) == INTEGER_CST
8248 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8249 && TREE_CONSTANT (treeop1))
8251 rtx constant_part;
8252 HOST_WIDE_INT wc;
8253 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
8255 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8256 EXPAND_SUM);
8257 /* Use wi::shwi to ensure that the constant is
8258 truncated according to the mode of OP1, then sign extended
8259 to a HOST_WIDE_INT. Using the constant directly can result
8260 in non-canonical RTL in a 64x32 cross compile. */
8261 wc = TREE_INT_CST_LOW (treeop0);
8262 constant_part =
8263 immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8264 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8265 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8266 op1 = force_operand (op1, target);
8267 return REDUCE_BIT_FIELD (op1);
8270 else if (TREE_CODE (treeop1) == INTEGER_CST
8271 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8272 && TREE_CONSTANT (treeop0))
8274 rtx constant_part;
8275 HOST_WIDE_INT wc;
8276 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
8278 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8279 (modifier == EXPAND_INITIALIZER
8280 ? EXPAND_INITIALIZER : EXPAND_SUM));
8281 if (! CONSTANT_P (op0))
8283 op1 = expand_expr (treeop1, NULL_RTX,
8284 VOIDmode, modifier);
8285 /* Return a PLUS if modifier says it's OK. */
8286 if (modifier == EXPAND_SUM
8287 || modifier == EXPAND_INITIALIZER)
8288 return simplify_gen_binary (PLUS, mode, op0, op1);
8289 goto binop2;
8291 /* Use wi::shwi to ensure that the constant is
8292 truncated according to the mode of OP1, then sign extended
8293 to a HOST_WIDE_INT. Using the constant directly can result
8294 in non-canonical RTL in a 64x32 cross compile. */
8295 wc = TREE_INT_CST_LOW (treeop1);
8296 constant_part
8297 = immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8298 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8299 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8300 op0 = force_operand (op0, target);
8301 return REDUCE_BIT_FIELD (op0);
8305 /* Use TER to expand pointer addition of a negated value
8306 as pointer subtraction. */
8307 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8308 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8309 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8310 && TREE_CODE (treeop1) == SSA_NAME
8311 && TYPE_MODE (TREE_TYPE (treeop0))
8312 == TYPE_MODE (TREE_TYPE (treeop1)))
8314 gimple *def = get_def_for_expr (treeop1, NEGATE_EXPR);
8315 if (def)
8317 treeop1 = gimple_assign_rhs1 (def);
8318 code = MINUS_EXPR;
8319 goto do_minus;
8323 /* No sense saving up arithmetic to be done
8324 if it's all in the wrong mode to form part of an address.
8325 And force_operand won't know whether to sign-extend or
8326 zero-extend. */
8327 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8328 || mode != ptr_mode)
8330 expand_operands (treeop0, treeop1,
8331 subtarget, &op0, &op1, EXPAND_NORMAL);
8332 if (op0 == const0_rtx)
8333 return op1;
8334 if (op1 == const0_rtx)
8335 return op0;
8336 goto binop2;
8339 expand_operands (treeop0, treeop1,
8340 subtarget, &op0, &op1, modifier);
8341 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8343 case MINUS_EXPR:
8344 do_minus:
8345 /* For initializers, we are allowed to return a MINUS of two
8346 symbolic constants. Here we handle all cases when both operands
8347 are constant. */
8348 /* Handle difference of two symbolic constants,
8349 for the sake of an initializer. */
8350 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8351 && really_constant_p (treeop0)
8352 && really_constant_p (treeop1))
8354 expand_operands (treeop0, treeop1,
8355 NULL_RTX, &op0, &op1, modifier);
8357 /* If the last operand is a CONST_INT, use plus_constant of
8358 the negated constant. Else make the MINUS. */
8359 if (CONST_INT_P (op1))
8360 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8361 -INTVAL (op1)));
8362 else
8363 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8366 /* No sense saving up arithmetic to be done
8367 if it's all in the wrong mode to form part of an address.
8368 And force_operand won't know whether to sign-extend or
8369 zero-extend. */
8370 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8371 || mode != ptr_mode)
8372 goto binop;
8374 expand_operands (treeop0, treeop1,
8375 subtarget, &op0, &op1, modifier);
8377 /* Convert A - const to A + (-const). */
8378 if (CONST_INT_P (op1))
8380 op1 = negate_rtx (mode, op1);
8381 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8384 goto binop2;
8386 case WIDEN_MULT_PLUS_EXPR:
8387 case WIDEN_MULT_MINUS_EXPR:
8388 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8389 op2 = expand_normal (treeop2);
8390 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8391 target, unsignedp);
8392 return target;
8394 case WIDEN_MULT_EXPR:
8395 /* If first operand is constant, swap them.
8396 Thus the following special case checks need only
8397 check the second operand. */
8398 if (TREE_CODE (treeop0) == INTEGER_CST)
8399 std::swap (treeop0, treeop1);
8401 /* First, check if we have a multiplication of one signed and one
8402 unsigned operand. */
8403 if (TREE_CODE (treeop1) != INTEGER_CST
8404 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8405 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8407 machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8408 this_optab = usmul_widen_optab;
8409 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8410 != CODE_FOR_nothing)
8412 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8413 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8414 EXPAND_NORMAL);
8415 else
8416 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8417 EXPAND_NORMAL);
8418 /* op0 and op1 might still be constant, despite the above
8419 != INTEGER_CST check. Handle it. */
8420 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8422 op0 = convert_modes (innermode, mode, op0, true);
8423 op1 = convert_modes (innermode, mode, op1, false);
8424 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8425 target, unsignedp));
8427 goto binop3;
8430 /* Check for a multiplication with matching signedness. */
8431 else if ((TREE_CODE (treeop1) == INTEGER_CST
8432 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8433 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8434 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8436 tree op0type = TREE_TYPE (treeop0);
8437 machine_mode innermode = TYPE_MODE (op0type);
8438 bool zextend_p = TYPE_UNSIGNED (op0type);
8439 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8440 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8442 if (TREE_CODE (treeop0) != INTEGER_CST)
8444 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8445 != CODE_FOR_nothing)
8447 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8448 EXPAND_NORMAL);
8449 /* op0 and op1 might still be constant, despite the above
8450 != INTEGER_CST check. Handle it. */
8451 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8453 widen_mult_const:
8454 op0 = convert_modes (innermode, mode, op0, zextend_p);
8456 = convert_modes (innermode, mode, op1,
8457 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8458 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8459 target,
8460 unsignedp));
8462 temp = expand_widening_mult (mode, op0, op1, target,
8463 unsignedp, this_optab);
8464 return REDUCE_BIT_FIELD (temp);
8466 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8467 != CODE_FOR_nothing
8468 && innermode == word_mode)
8470 rtx htem, hipart;
8471 op0 = expand_normal (treeop0);
8472 if (TREE_CODE (treeop1) == INTEGER_CST)
8473 op1 = convert_modes (innermode, mode,
8474 expand_normal (treeop1),
8475 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8476 else
8477 op1 = expand_normal (treeop1);
8478 /* op0 and op1 might still be constant, despite the above
8479 != INTEGER_CST check. Handle it. */
8480 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8481 goto widen_mult_const;
8482 temp = expand_binop (mode, other_optab, op0, op1, target,
8483 unsignedp, OPTAB_LIB_WIDEN);
8484 hipart = gen_highpart (innermode, temp);
8485 htem = expand_mult_highpart_adjust (innermode, hipart,
8486 op0, op1, hipart,
8487 zextend_p);
8488 if (htem != hipart)
8489 emit_move_insn (hipart, htem);
8490 return REDUCE_BIT_FIELD (temp);
8494 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8495 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8496 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8497 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8499 case FMA_EXPR:
8501 optab opt = fma_optab;
8502 gimple *def0, *def2;
8504 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8505 call. */
8506 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8508 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8509 tree call_expr;
8511 gcc_assert (fn != NULL_TREE);
8512 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8513 return expand_builtin (call_expr, target, subtarget, mode, false);
8516 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8517 /* The multiplication is commutative - look at its 2nd operand
8518 if the first isn't fed by a negate. */
8519 if (!def0)
8521 def0 = get_def_for_expr (treeop1, NEGATE_EXPR);
8522 /* Swap operands if the 2nd operand is fed by a negate. */
8523 if (def0)
8524 std::swap (treeop0, treeop1);
8526 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8528 op0 = op2 = NULL;
8530 if (def0 && def2
8531 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8533 opt = fnms_optab;
8534 op0 = expand_normal (gimple_assign_rhs1 (def0));
8535 op2 = expand_normal (gimple_assign_rhs1 (def2));
8537 else if (def0
8538 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8540 opt = fnma_optab;
8541 op0 = expand_normal (gimple_assign_rhs1 (def0));
8543 else if (def2
8544 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8546 opt = fms_optab;
8547 op2 = expand_normal (gimple_assign_rhs1 (def2));
8550 if (op0 == NULL)
8551 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8552 if (op2 == NULL)
8553 op2 = expand_normal (treeop2);
8554 op1 = expand_normal (treeop1);
8556 return expand_ternary_op (TYPE_MODE (type), opt,
8557 op0, op1, op2, target, 0);
8560 case MULT_EXPR:
8561 /* If this is a fixed-point operation, then we cannot use the code
8562 below because "expand_mult" doesn't support sat/no-sat fixed-point
8563 multiplications. */
8564 if (ALL_FIXED_POINT_MODE_P (mode))
8565 goto binop;
8567 /* If first operand is constant, swap them.
8568 Thus the following special case checks need only
8569 check the second operand. */
8570 if (TREE_CODE (treeop0) == INTEGER_CST)
8571 std::swap (treeop0, treeop1);
8573 /* Attempt to return something suitable for generating an
8574 indexed address, for machines that support that. */
8576 if (modifier == EXPAND_SUM && mode == ptr_mode
8577 && tree_fits_shwi_p (treeop1))
8579 tree exp1 = treeop1;
8581 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8582 EXPAND_SUM);
8584 if (!REG_P (op0))
8585 op0 = force_operand (op0, NULL_RTX);
8586 if (!REG_P (op0))
8587 op0 = copy_to_mode_reg (mode, op0);
8589 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8590 gen_int_mode (tree_to_shwi (exp1),
8591 TYPE_MODE (TREE_TYPE (exp1)))));
8594 if (modifier == EXPAND_STACK_PARM)
8595 target = 0;
8597 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8598 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8600 case TRUNC_DIV_EXPR:
8601 case FLOOR_DIV_EXPR:
8602 case CEIL_DIV_EXPR:
8603 case ROUND_DIV_EXPR:
8604 case EXACT_DIV_EXPR:
8605 /* If this is a fixed-point operation, then we cannot use the code
8606 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8607 divisions. */
8608 if (ALL_FIXED_POINT_MODE_P (mode))
8609 goto binop;
8611 if (modifier == EXPAND_STACK_PARM)
8612 target = 0;
8613 /* Possible optimization: compute the dividend with EXPAND_SUM
8614 then if the divisor is constant can optimize the case
8615 where some terms of the dividend have coeffs divisible by it. */
8616 expand_operands (treeop0, treeop1,
8617 subtarget, &op0, &op1, EXPAND_NORMAL);
8618 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8620 case RDIV_EXPR:
8621 goto binop;
8623 case MULT_HIGHPART_EXPR:
8624 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8625 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8626 gcc_assert (temp);
8627 return temp;
8629 case TRUNC_MOD_EXPR:
8630 case FLOOR_MOD_EXPR:
8631 case CEIL_MOD_EXPR:
8632 case ROUND_MOD_EXPR:
8633 if (modifier == EXPAND_STACK_PARM)
8634 target = 0;
8635 expand_operands (treeop0, treeop1,
8636 subtarget, &op0, &op1, EXPAND_NORMAL);
8637 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8639 case FIXED_CONVERT_EXPR:
8640 op0 = expand_normal (treeop0);
8641 if (target == 0 || modifier == EXPAND_STACK_PARM)
8642 target = gen_reg_rtx (mode);
8644 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8645 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8646 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8647 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8648 else
8649 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8650 return target;
8652 case FIX_TRUNC_EXPR:
8653 op0 = expand_normal (treeop0);
8654 if (target == 0 || modifier == EXPAND_STACK_PARM)
8655 target = gen_reg_rtx (mode);
8656 expand_fix (target, op0, unsignedp);
8657 return target;
8659 case FLOAT_EXPR:
8660 op0 = expand_normal (treeop0);
8661 if (target == 0 || modifier == EXPAND_STACK_PARM)
8662 target = gen_reg_rtx (mode);
8663 /* expand_float can't figure out what to do if FROM has VOIDmode.
8664 So give it the correct mode. With -O, cse will optimize this. */
8665 if (GET_MODE (op0) == VOIDmode)
8666 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8667 op0);
8668 expand_float (target, op0,
8669 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8670 return target;
8672 case NEGATE_EXPR:
8673 op0 = expand_expr (treeop0, subtarget,
8674 VOIDmode, EXPAND_NORMAL);
8675 if (modifier == EXPAND_STACK_PARM)
8676 target = 0;
8677 temp = expand_unop (mode,
8678 optab_for_tree_code (NEGATE_EXPR, type,
8679 optab_default),
8680 op0, target, 0);
8681 gcc_assert (temp);
8682 return REDUCE_BIT_FIELD (temp);
8684 case ABS_EXPR:
8685 op0 = expand_expr (treeop0, subtarget,
8686 VOIDmode, EXPAND_NORMAL);
8687 if (modifier == EXPAND_STACK_PARM)
8688 target = 0;
8690 /* ABS_EXPR is not valid for complex arguments. */
8691 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8692 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8694 /* Unsigned abs is simply the operand. Testing here means we don't
8695 risk generating incorrect code below. */
8696 if (TYPE_UNSIGNED (type))
8697 return op0;
8699 return expand_abs (mode, op0, target, unsignedp,
8700 safe_from_p (target, treeop0, 1));
8702 case MAX_EXPR:
8703 case MIN_EXPR:
8704 target = original_target;
8705 if (target == 0
8706 || modifier == EXPAND_STACK_PARM
8707 || (MEM_P (target) && MEM_VOLATILE_P (target))
8708 || GET_MODE (target) != mode
8709 || (REG_P (target)
8710 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8711 target = gen_reg_rtx (mode);
8712 expand_operands (treeop0, treeop1,
8713 target, &op0, &op1, EXPAND_NORMAL);
8715 /* First try to do it with a special MIN or MAX instruction.
8716 If that does not win, use a conditional jump to select the proper
8717 value. */
8718 this_optab = optab_for_tree_code (code, type, optab_default);
8719 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8720 OPTAB_WIDEN);
8721 if (temp != 0)
8722 return temp;
8724 /* At this point, a MEM target is no longer useful; we will get better
8725 code without it. */
8727 if (! REG_P (target))
8728 target = gen_reg_rtx (mode);
8730 /* If op1 was placed in target, swap op0 and op1. */
8731 if (target != op0 && target == op1)
8732 std::swap (op0, op1);
8734 /* We generate better code and avoid problems with op1 mentioning
8735 target by forcing op1 into a pseudo if it isn't a constant. */
8736 if (! CONSTANT_P (op1))
8737 op1 = force_reg (mode, op1);
8740 enum rtx_code comparison_code;
8741 rtx cmpop1 = op1;
8743 if (code == MAX_EXPR)
8744 comparison_code = unsignedp ? GEU : GE;
8745 else
8746 comparison_code = unsignedp ? LEU : LE;
8748 /* Canonicalize to comparisons against 0. */
8749 if (op1 == const1_rtx)
8751 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8752 or (a != 0 ? a : 1) for unsigned.
8753 For MIN we are safe converting (a <= 1 ? a : 1)
8754 into (a <= 0 ? a : 1) */
8755 cmpop1 = const0_rtx;
8756 if (code == MAX_EXPR)
8757 comparison_code = unsignedp ? NE : GT;
8759 if (op1 == constm1_rtx && !unsignedp)
8761 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8762 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8763 cmpop1 = const0_rtx;
8764 if (code == MIN_EXPR)
8765 comparison_code = LT;
8768 /* Use a conditional move if possible. */
8769 if (can_conditionally_move_p (mode))
8771 rtx insn;
8773 start_sequence ();
8775 /* Try to emit the conditional move. */
8776 insn = emit_conditional_move (target, comparison_code,
8777 op0, cmpop1, mode,
8778 op0, op1, mode,
8779 unsignedp);
8781 /* If we could do the conditional move, emit the sequence,
8782 and return. */
8783 if (insn)
8785 rtx_insn *seq = get_insns ();
8786 end_sequence ();
8787 emit_insn (seq);
8788 return target;
8791 /* Otherwise discard the sequence and fall back to code with
8792 branches. */
8793 end_sequence ();
8796 if (target != op0)
8797 emit_move_insn (target, op0);
8799 lab = gen_label_rtx ();
8800 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8801 unsignedp, mode, NULL_RTX, NULL, lab,
8802 -1);
8804 emit_move_insn (target, op1);
8805 emit_label (lab);
8806 return target;
8808 case BIT_NOT_EXPR:
8809 op0 = expand_expr (treeop0, subtarget,
8810 VOIDmode, EXPAND_NORMAL);
8811 if (modifier == EXPAND_STACK_PARM)
8812 target = 0;
8813 /* In case we have to reduce the result to bitfield precision
8814 for unsigned bitfield expand this as XOR with a proper constant
8815 instead. */
8816 if (reduce_bit_field && TYPE_UNSIGNED (type))
8818 wide_int mask = wi::mask (TYPE_PRECISION (type),
8819 false, GET_MODE_PRECISION (mode));
8821 temp = expand_binop (mode, xor_optab, op0,
8822 immed_wide_int_const (mask, mode),
8823 target, 1, OPTAB_LIB_WIDEN);
8825 else
8826 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8827 gcc_assert (temp);
8828 return temp;
8830 /* ??? Can optimize bitwise operations with one arg constant.
8831 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8832 and (a bitwise1 b) bitwise2 b (etc)
8833 but that is probably not worth while. */
8835 case BIT_AND_EXPR:
8836 case BIT_IOR_EXPR:
8837 case BIT_XOR_EXPR:
8838 goto binop;
8840 case LROTATE_EXPR:
8841 case RROTATE_EXPR:
8842 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8843 || (GET_MODE_PRECISION (TYPE_MODE (type))
8844 == TYPE_PRECISION (type)));
8845 /* fall through */
8847 case LSHIFT_EXPR:
8848 case RSHIFT_EXPR:
8850 /* If this is a fixed-point operation, then we cannot use the code
8851 below because "expand_shift" doesn't support sat/no-sat fixed-point
8852 shifts. */
8853 if (ALL_FIXED_POINT_MODE_P (mode))
8854 goto binop;
8856 if (! safe_from_p (subtarget, treeop1, 1))
8857 subtarget = 0;
8858 if (modifier == EXPAND_STACK_PARM)
8859 target = 0;
8860 op0 = expand_expr (treeop0, subtarget,
8861 VOIDmode, EXPAND_NORMAL);
8863 /* Left shift optimization when shifting across word_size boundary.
8865 If mode == GET_MODE_WIDER_MODE (word_mode), then normally there isn't
8866 native instruction to support this wide mode left shift. Given below
8867 scenario:
8869 Type A = (Type) B << C
8871 |< T >|
8872 | dest_high | dest_low |
8874 | word_size |
8876 If the shift amount C caused we shift B to across the word size
8877 boundary, i.e part of B shifted into high half of destination
8878 register, and part of B remains in the low half, then GCC will use
8879 the following left shift expand logic:
8881 1. Initialize dest_low to B.
8882 2. Initialize every bit of dest_high to the sign bit of B.
8883 3. Logic left shift dest_low by C bit to finalize dest_low.
8884 The value of dest_low before this shift is kept in a temp D.
8885 4. Logic left shift dest_high by C.
8886 5. Logic right shift D by (word_size - C).
8887 6. Or the result of 4 and 5 to finalize dest_high.
8889 While, by checking gimple statements, if operand B is coming from
8890 signed extension, then we can simplify above expand logic into:
8892 1. dest_high = src_low >> (word_size - C).
8893 2. dest_low = src_low << C.
8895 We can use one arithmetic right shift to finish all the purpose of
8896 steps 2, 4, 5, 6, thus we reduce the steps needed from 6 into 2. */
8898 temp = NULL_RTX;
8899 if (code == LSHIFT_EXPR
8900 && target
8901 && REG_P (target)
8902 && ! unsignedp
8903 && mode == GET_MODE_WIDER_MODE (word_mode)
8904 && GET_MODE_SIZE (mode) == 2 * GET_MODE_SIZE (word_mode)
8905 && TREE_CONSTANT (treeop1)
8906 && TREE_CODE (treeop0) == SSA_NAME)
8908 gimple *def = SSA_NAME_DEF_STMT (treeop0);
8909 if (is_gimple_assign (def)
8910 && gimple_assign_rhs_code (def) == NOP_EXPR)
8912 machine_mode rmode = TYPE_MODE
8913 (TREE_TYPE (gimple_assign_rhs1 (def)));
8915 if (GET_MODE_SIZE (rmode) < GET_MODE_SIZE (mode)
8916 && TREE_INT_CST_LOW (treeop1) < GET_MODE_BITSIZE (word_mode)
8917 && ((TREE_INT_CST_LOW (treeop1) + GET_MODE_BITSIZE (rmode))
8918 >= GET_MODE_BITSIZE (word_mode)))
8920 rtx_insn *seq, *seq_old;
8921 unsigned int high_off = subreg_highpart_offset (word_mode,
8922 mode);
8923 rtx low = lowpart_subreg (word_mode, op0, mode);
8924 rtx dest_low = lowpart_subreg (word_mode, target, mode);
8925 rtx dest_high = simplify_gen_subreg (word_mode, target,
8926 mode, high_off);
8927 HOST_WIDE_INT ramount = (BITS_PER_WORD
8928 - TREE_INT_CST_LOW (treeop1));
8929 tree rshift = build_int_cst (TREE_TYPE (treeop1), ramount);
8931 start_sequence ();
8932 /* dest_high = src_low >> (word_size - C). */
8933 temp = expand_variable_shift (RSHIFT_EXPR, word_mode, low,
8934 rshift, dest_high, unsignedp);
8935 if (temp != dest_high)
8936 emit_move_insn (dest_high, temp);
8938 /* dest_low = src_low << C. */
8939 temp = expand_variable_shift (LSHIFT_EXPR, word_mode, low,
8940 treeop1, dest_low, unsignedp);
8941 if (temp != dest_low)
8942 emit_move_insn (dest_low, temp);
8944 seq = get_insns ();
8945 end_sequence ();
8946 temp = target ;
8948 if (have_insn_for (ASHIFT, mode))
8950 bool speed_p = optimize_insn_for_speed_p ();
8951 start_sequence ();
8952 rtx ret_old = expand_variable_shift (code, mode, op0,
8953 treeop1, target,
8954 unsignedp);
8956 seq_old = get_insns ();
8957 end_sequence ();
8958 if (seq_cost (seq, speed_p)
8959 >= seq_cost (seq_old, speed_p))
8961 seq = seq_old;
8962 temp = ret_old;
8965 emit_insn (seq);
8970 if (temp == NULL_RTX)
8971 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8972 unsignedp);
8973 if (code == LSHIFT_EXPR)
8974 temp = REDUCE_BIT_FIELD (temp);
8975 return temp;
8978 /* Could determine the answer when only additive constants differ. Also,
8979 the addition of one can be handled by changing the condition. */
8980 case LT_EXPR:
8981 case LE_EXPR:
8982 case GT_EXPR:
8983 case GE_EXPR:
8984 case EQ_EXPR:
8985 case NE_EXPR:
8986 case UNORDERED_EXPR:
8987 case ORDERED_EXPR:
8988 case UNLT_EXPR:
8989 case UNLE_EXPR:
8990 case UNGT_EXPR:
8991 case UNGE_EXPR:
8992 case UNEQ_EXPR:
8993 case LTGT_EXPR:
8995 temp = do_store_flag (ops,
8996 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8997 tmode != VOIDmode ? tmode : mode);
8998 if (temp)
8999 return temp;
9001 /* Use a compare and a jump for BLKmode comparisons, or for function
9002 type comparisons is have_canonicalize_funcptr_for_compare. */
9004 if ((target == 0
9005 || modifier == EXPAND_STACK_PARM
9006 || ! safe_from_p (target, treeop0, 1)
9007 || ! safe_from_p (target, treeop1, 1)
9008 /* Make sure we don't have a hard reg (such as function's return
9009 value) live across basic blocks, if not optimizing. */
9010 || (!optimize && REG_P (target)
9011 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9012 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9014 emit_move_insn (target, const0_rtx);
9016 rtx_code_label *lab1 = gen_label_rtx ();
9017 jumpifnot_1 (code, treeop0, treeop1, lab1, -1);
9019 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
9020 emit_move_insn (target, constm1_rtx);
9021 else
9022 emit_move_insn (target, const1_rtx);
9024 emit_label (lab1);
9025 return target;
9027 case COMPLEX_EXPR:
9028 /* Get the rtx code of the operands. */
9029 op0 = expand_normal (treeop0);
9030 op1 = expand_normal (treeop1);
9032 if (!target)
9033 target = gen_reg_rtx (TYPE_MODE (type));
9034 else
9035 /* If target overlaps with op1, then either we need to force
9036 op1 into a pseudo (if target also overlaps with op0),
9037 or write the complex parts in reverse order. */
9038 switch (GET_CODE (target))
9040 case CONCAT:
9041 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
9043 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
9045 complex_expr_force_op1:
9046 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
9047 emit_move_insn (temp, op1);
9048 op1 = temp;
9049 break;
9051 complex_expr_swap_order:
9052 /* Move the imaginary (op1) and real (op0) parts to their
9053 location. */
9054 write_complex_part (target, op1, true);
9055 write_complex_part (target, op0, false);
9057 return target;
9059 break;
9060 case MEM:
9061 temp = adjust_address_nv (target,
9062 GET_MODE_INNER (GET_MODE (target)), 0);
9063 if (reg_overlap_mentioned_p (temp, op1))
9065 machine_mode imode = GET_MODE_INNER (GET_MODE (target));
9066 temp = adjust_address_nv (target, imode,
9067 GET_MODE_SIZE (imode));
9068 if (reg_overlap_mentioned_p (temp, op0))
9069 goto complex_expr_force_op1;
9070 goto complex_expr_swap_order;
9072 break;
9073 default:
9074 if (reg_overlap_mentioned_p (target, op1))
9076 if (reg_overlap_mentioned_p (target, op0))
9077 goto complex_expr_force_op1;
9078 goto complex_expr_swap_order;
9080 break;
9083 /* Move the real (op0) and imaginary (op1) parts to their location. */
9084 write_complex_part (target, op0, false);
9085 write_complex_part (target, op1, true);
9087 return target;
9089 case WIDEN_SUM_EXPR:
9091 tree oprnd0 = treeop0;
9092 tree oprnd1 = treeop1;
9094 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9095 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
9096 target, unsignedp);
9097 return target;
9100 case REDUC_MAX_EXPR:
9101 case REDUC_MIN_EXPR:
9102 case REDUC_PLUS_EXPR:
9104 op0 = expand_normal (treeop0);
9105 this_optab = optab_for_tree_code (code, type, optab_default);
9106 machine_mode vec_mode = TYPE_MODE (TREE_TYPE (treeop0));
9108 if (optab_handler (this_optab, vec_mode) != CODE_FOR_nothing)
9110 struct expand_operand ops[2];
9111 enum insn_code icode = optab_handler (this_optab, vec_mode);
9113 create_output_operand (&ops[0], target, mode);
9114 create_input_operand (&ops[1], op0, vec_mode);
9115 if (maybe_expand_insn (icode, 2, ops))
9117 target = ops[0].value;
9118 if (GET_MODE (target) != mode)
9119 return gen_lowpart (tmode, target);
9120 return target;
9123 /* Fall back to optab with vector result, and then extract scalar. */
9124 this_optab = scalar_reduc_to_vector (this_optab, type);
9125 temp = expand_unop (vec_mode, this_optab, op0, NULL_RTX, unsignedp);
9126 gcc_assert (temp);
9127 /* The tree code produces a scalar result, but (somewhat by convention)
9128 the optab produces a vector with the result in element 0 if
9129 little-endian, or element N-1 if big-endian. So pull the scalar
9130 result out of that element. */
9131 int index = BYTES_BIG_ENDIAN ? GET_MODE_NUNITS (vec_mode) - 1 : 0;
9132 int bitsize = GET_MODE_UNIT_BITSIZE (vec_mode);
9133 temp = extract_bit_field (temp, bitsize, bitsize * index, unsignedp,
9134 target, mode, mode);
9135 gcc_assert (temp);
9136 return temp;
9139 case VEC_UNPACK_HI_EXPR:
9140 case VEC_UNPACK_LO_EXPR:
9142 op0 = expand_normal (treeop0);
9143 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9144 target, unsignedp);
9145 gcc_assert (temp);
9146 return temp;
9149 case VEC_UNPACK_FLOAT_HI_EXPR:
9150 case VEC_UNPACK_FLOAT_LO_EXPR:
9152 op0 = expand_normal (treeop0);
9153 /* The signedness is determined from input operand. */
9154 temp = expand_widen_pattern_expr
9155 (ops, op0, NULL_RTX, NULL_RTX,
9156 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9158 gcc_assert (temp);
9159 return temp;
9162 case VEC_WIDEN_MULT_HI_EXPR:
9163 case VEC_WIDEN_MULT_LO_EXPR:
9164 case VEC_WIDEN_MULT_EVEN_EXPR:
9165 case VEC_WIDEN_MULT_ODD_EXPR:
9166 case VEC_WIDEN_LSHIFT_HI_EXPR:
9167 case VEC_WIDEN_LSHIFT_LO_EXPR:
9168 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9169 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9170 target, unsignedp);
9171 gcc_assert (target);
9172 return target;
9174 case VEC_PACK_TRUNC_EXPR:
9175 case VEC_PACK_SAT_EXPR:
9176 case VEC_PACK_FIX_TRUNC_EXPR:
9177 mode = TYPE_MODE (TREE_TYPE (treeop0));
9178 goto binop;
9180 case VEC_PERM_EXPR:
9181 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9182 op2 = expand_normal (treeop2);
9184 /* Careful here: if the target doesn't support integral vector modes,
9185 a constant selection vector could wind up smooshed into a normal
9186 integral constant. */
9187 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9189 tree sel_type = TREE_TYPE (treeop2);
9190 machine_mode vmode
9191 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9192 TYPE_VECTOR_SUBPARTS (sel_type));
9193 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9194 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9195 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9197 else
9198 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9200 temp = expand_vec_perm (mode, op0, op1, op2, target);
9201 gcc_assert (temp);
9202 return temp;
9204 case DOT_PROD_EXPR:
9206 tree oprnd0 = treeop0;
9207 tree oprnd1 = treeop1;
9208 tree oprnd2 = treeop2;
9209 rtx op2;
9211 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9212 op2 = expand_normal (oprnd2);
9213 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9214 target, unsignedp);
9215 return target;
9218 case SAD_EXPR:
9220 tree oprnd0 = treeop0;
9221 tree oprnd1 = treeop1;
9222 tree oprnd2 = treeop2;
9223 rtx op2;
9225 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9226 op2 = expand_normal (oprnd2);
9227 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9228 target, unsignedp);
9229 return target;
9232 case REALIGN_LOAD_EXPR:
9234 tree oprnd0 = treeop0;
9235 tree oprnd1 = treeop1;
9236 tree oprnd2 = treeop2;
9237 rtx op2;
9239 this_optab = optab_for_tree_code (code, type, optab_default);
9240 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9241 op2 = expand_normal (oprnd2);
9242 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9243 target, unsignedp);
9244 gcc_assert (temp);
9245 return temp;
9248 case COND_EXPR:
9250 /* A COND_EXPR with its type being VOID_TYPE represents a
9251 conditional jump and is handled in
9252 expand_gimple_cond_expr. */
9253 gcc_assert (!VOID_TYPE_P (type));
9255 /* Note that COND_EXPRs whose type is a structure or union
9256 are required to be constructed to contain assignments of
9257 a temporary variable, so that we can evaluate them here
9258 for side effect only. If type is void, we must do likewise. */
9260 gcc_assert (!TREE_ADDRESSABLE (type)
9261 && !ignore
9262 && TREE_TYPE (treeop1) != void_type_node
9263 && TREE_TYPE (treeop2) != void_type_node);
9265 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9266 if (temp)
9267 return temp;
9269 /* If we are not to produce a result, we have no target. Otherwise,
9270 if a target was specified use it; it will not be used as an
9271 intermediate target unless it is safe. If no target, use a
9272 temporary. */
9274 if (modifier != EXPAND_STACK_PARM
9275 && original_target
9276 && safe_from_p (original_target, treeop0, 1)
9277 && GET_MODE (original_target) == mode
9278 && !MEM_P (original_target))
9279 temp = original_target;
9280 else
9281 temp = assign_temp (type, 0, 1);
9283 do_pending_stack_adjust ();
9284 NO_DEFER_POP;
9285 rtx_code_label *lab0 = gen_label_rtx ();
9286 rtx_code_label *lab1 = gen_label_rtx ();
9287 jumpifnot (treeop0, lab0, -1);
9288 store_expr (treeop1, temp,
9289 modifier == EXPAND_STACK_PARM,
9290 false);
9292 emit_jump_insn (targetm.gen_jump (lab1));
9293 emit_barrier ();
9294 emit_label (lab0);
9295 store_expr (treeop2, temp,
9296 modifier == EXPAND_STACK_PARM,
9297 false);
9299 emit_label (lab1);
9300 OK_DEFER_POP;
9301 return temp;
9304 case VEC_COND_EXPR:
9305 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9306 return target;
9308 default:
9309 gcc_unreachable ();
9312 /* Here to do an ordinary binary operator. */
9313 binop:
9314 expand_operands (treeop0, treeop1,
9315 subtarget, &op0, &op1, EXPAND_NORMAL);
9316 binop2:
9317 this_optab = optab_for_tree_code (code, type, optab_default);
9318 binop3:
9319 if (modifier == EXPAND_STACK_PARM)
9320 target = 0;
9321 temp = expand_binop (mode, this_optab, op0, op1, target,
9322 unsignedp, OPTAB_LIB_WIDEN);
9323 gcc_assert (temp);
9324 /* Bitwise operations do not need bitfield reduction as we expect their
9325 operands being properly truncated. */
9326 if (code == BIT_XOR_EXPR
9327 || code == BIT_AND_EXPR
9328 || code == BIT_IOR_EXPR)
9329 return temp;
9330 return REDUCE_BIT_FIELD (temp);
9332 #undef REDUCE_BIT_FIELD
9335 /* Return TRUE if expression STMT is suitable for replacement.
9336 Never consider memory loads as replaceable, because those don't ever lead
9337 into constant expressions. */
9339 static bool
9340 stmt_is_replaceable_p (gimple *stmt)
9342 if (ssa_is_replaceable_p (stmt))
9344 /* Don't move around loads. */
9345 if (!gimple_assign_single_p (stmt)
9346 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9347 return true;
9349 return false;
9353 expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
9354 enum expand_modifier modifier, rtx *alt_rtl,
9355 bool inner_reference_p)
9357 rtx op0, op1, temp, decl_rtl;
9358 tree type;
9359 int unsignedp;
9360 machine_mode mode, dmode;
9361 enum tree_code code = TREE_CODE (exp);
9362 rtx subtarget, original_target;
9363 int ignore;
9364 tree context;
9365 bool reduce_bit_field;
9366 location_t loc = EXPR_LOCATION (exp);
9367 struct separate_ops ops;
9368 tree treeop0, treeop1, treeop2;
9369 tree ssa_name = NULL_TREE;
9370 gimple *g;
9372 type = TREE_TYPE (exp);
9373 mode = TYPE_MODE (type);
9374 unsignedp = TYPE_UNSIGNED (type);
9376 treeop0 = treeop1 = treeop2 = NULL_TREE;
9377 if (!VL_EXP_CLASS_P (exp))
9378 switch (TREE_CODE_LENGTH (code))
9380 default:
9381 case 3: treeop2 = TREE_OPERAND (exp, 2);
9382 case 2: treeop1 = TREE_OPERAND (exp, 1);
9383 case 1: treeop0 = TREE_OPERAND (exp, 0);
9384 case 0: break;
9386 ops.code = code;
9387 ops.type = type;
9388 ops.op0 = treeop0;
9389 ops.op1 = treeop1;
9390 ops.op2 = treeop2;
9391 ops.location = loc;
9393 ignore = (target == const0_rtx
9394 || ((CONVERT_EXPR_CODE_P (code)
9395 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9396 && TREE_CODE (type) == VOID_TYPE));
9398 /* An operation in what may be a bit-field type needs the
9399 result to be reduced to the precision of the bit-field type,
9400 which is narrower than that of the type's mode. */
9401 reduce_bit_field = (!ignore
9402 && INTEGRAL_TYPE_P (type)
9403 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9405 /* If we are going to ignore this result, we need only do something
9406 if there is a side-effect somewhere in the expression. If there
9407 is, short-circuit the most common cases here. Note that we must
9408 not call expand_expr with anything but const0_rtx in case this
9409 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9411 if (ignore)
9413 if (! TREE_SIDE_EFFECTS (exp))
9414 return const0_rtx;
9416 /* Ensure we reference a volatile object even if value is ignored, but
9417 don't do this if all we are doing is taking its address. */
9418 if (TREE_THIS_VOLATILE (exp)
9419 && TREE_CODE (exp) != FUNCTION_DECL
9420 && mode != VOIDmode && mode != BLKmode
9421 && modifier != EXPAND_CONST_ADDRESS)
9423 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9424 if (MEM_P (temp))
9425 copy_to_reg (temp);
9426 return const0_rtx;
9429 if (TREE_CODE_CLASS (code) == tcc_unary
9430 || code == BIT_FIELD_REF
9431 || code == COMPONENT_REF
9432 || code == INDIRECT_REF)
9433 return expand_expr (treeop0, const0_rtx, VOIDmode,
9434 modifier);
9436 else if (TREE_CODE_CLASS (code) == tcc_binary
9437 || TREE_CODE_CLASS (code) == tcc_comparison
9438 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9440 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9441 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9442 return const0_rtx;
9445 target = 0;
9448 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9449 target = 0;
9451 /* Use subtarget as the target for operand 0 of a binary operation. */
9452 subtarget = get_subtarget (target);
9453 original_target = target;
9455 switch (code)
9457 case LABEL_DECL:
9459 tree function = decl_function_context (exp);
9461 temp = label_rtx (exp);
9462 temp = gen_rtx_LABEL_REF (Pmode, temp);
9464 if (function != current_function_decl
9465 && function != 0)
9466 LABEL_REF_NONLOCAL_P (temp) = 1;
9468 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9469 return temp;
9472 case SSA_NAME:
9473 /* ??? ivopts calls expander, without any preparation from
9474 out-of-ssa. So fake instructions as if this was an access to the
9475 base variable. This unnecessarily allocates a pseudo, see how we can
9476 reuse it, if partition base vars have it set already. */
9477 if (!currently_expanding_to_rtl)
9479 tree var = SSA_NAME_VAR (exp);
9480 if (var && DECL_RTL_SET_P (var))
9481 return DECL_RTL (var);
9482 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9483 LAST_VIRTUAL_REGISTER + 1);
9486 g = get_gimple_for_ssa_name (exp);
9487 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9488 if (g == NULL
9489 && modifier == EXPAND_INITIALIZER
9490 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9491 && (optimize || !SSA_NAME_VAR (exp)
9492 || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9493 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9494 g = SSA_NAME_DEF_STMT (exp);
9495 if (g)
9497 rtx r;
9498 location_t saved_loc = curr_insn_location ();
9499 location_t loc = gimple_location (g);
9500 if (loc != UNKNOWN_LOCATION)
9501 set_curr_insn_location (loc);
9502 ops.code = gimple_assign_rhs_code (g);
9503 switch (get_gimple_rhs_class (ops.code))
9505 case GIMPLE_TERNARY_RHS:
9506 ops.op2 = gimple_assign_rhs3 (g);
9507 /* Fallthru */
9508 case GIMPLE_BINARY_RHS:
9509 ops.op1 = gimple_assign_rhs2 (g);
9511 /* Try to expand conditonal compare. */
9512 if (targetm.gen_ccmp_first)
9514 gcc_checking_assert (targetm.gen_ccmp_next != NULL);
9515 r = expand_ccmp_expr (g);
9516 if (r)
9517 break;
9519 /* Fallthru */
9520 case GIMPLE_UNARY_RHS:
9521 ops.op0 = gimple_assign_rhs1 (g);
9522 ops.type = TREE_TYPE (gimple_assign_lhs (g));
9523 ops.location = loc;
9524 r = expand_expr_real_2 (&ops, target, tmode, modifier);
9525 break;
9526 case GIMPLE_SINGLE_RHS:
9528 r = expand_expr_real (gimple_assign_rhs1 (g), target,
9529 tmode, modifier, NULL, inner_reference_p);
9530 break;
9532 default:
9533 gcc_unreachable ();
9535 set_curr_insn_location (saved_loc);
9536 if (REG_P (r) && !REG_EXPR (r))
9537 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9538 return r;
9541 ssa_name = exp;
9542 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9543 exp = SSA_NAME_VAR (ssa_name);
9544 goto expand_decl_rtl;
9546 case PARM_DECL:
9547 case VAR_DECL:
9548 /* If a static var's type was incomplete when the decl was written,
9549 but the type is complete now, lay out the decl now. */
9550 if (DECL_SIZE (exp) == 0
9551 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9552 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9553 layout_decl (exp, 0);
9555 /* ... fall through ... */
9557 case FUNCTION_DECL:
9558 case RESULT_DECL:
9559 decl_rtl = DECL_RTL (exp);
9560 expand_decl_rtl:
9561 gcc_assert (decl_rtl);
9562 decl_rtl = copy_rtx (decl_rtl);
9563 /* Record writes to register variables. */
9564 if (modifier == EXPAND_WRITE
9565 && REG_P (decl_rtl)
9566 && HARD_REGISTER_P (decl_rtl))
9567 add_to_hard_reg_set (&crtl->asm_clobbers,
9568 GET_MODE (decl_rtl), REGNO (decl_rtl));
9570 /* Ensure variable marked as used even if it doesn't go through
9571 a parser. If it hasn't be used yet, write out an external
9572 definition. */
9573 if (exp)
9574 TREE_USED (exp) = 1;
9576 /* Show we haven't gotten RTL for this yet. */
9577 temp = 0;
9579 /* Variables inherited from containing functions should have
9580 been lowered by this point. */
9581 if (exp)
9582 context = decl_function_context (exp);
9583 gcc_assert (!exp
9584 || SCOPE_FILE_SCOPE_P (context)
9585 || context == current_function_decl
9586 || TREE_STATIC (exp)
9587 || DECL_EXTERNAL (exp)
9588 /* ??? C++ creates functions that are not TREE_STATIC. */
9589 || TREE_CODE (exp) == FUNCTION_DECL);
9591 /* This is the case of an array whose size is to be determined
9592 from its initializer, while the initializer is still being parsed.
9593 ??? We aren't parsing while expanding anymore. */
9595 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9596 temp = validize_mem (decl_rtl);
9598 /* If DECL_RTL is memory, we are in the normal case and the
9599 address is not valid, get the address into a register. */
9601 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9603 if (alt_rtl)
9604 *alt_rtl = decl_rtl;
9605 decl_rtl = use_anchored_address (decl_rtl);
9606 if (modifier != EXPAND_CONST_ADDRESS
9607 && modifier != EXPAND_SUM
9608 && !memory_address_addr_space_p (exp ? DECL_MODE (exp)
9609 : GET_MODE (decl_rtl),
9610 XEXP (decl_rtl, 0),
9611 MEM_ADDR_SPACE (decl_rtl)))
9612 temp = replace_equiv_address (decl_rtl,
9613 copy_rtx (XEXP (decl_rtl, 0)));
9616 /* If we got something, return it. But first, set the alignment
9617 if the address is a register. */
9618 if (temp != 0)
9620 if (exp && MEM_P (temp) && REG_P (XEXP (temp, 0)))
9621 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9623 return temp;
9626 if (exp)
9627 dmode = DECL_MODE (exp);
9628 else
9629 dmode = TYPE_MODE (TREE_TYPE (ssa_name));
9631 /* If the mode of DECL_RTL does not match that of the decl,
9632 there are two cases: we are dealing with a BLKmode value
9633 that is returned in a register, or we are dealing with
9634 a promoted value. In the latter case, return a SUBREG
9635 of the wanted mode, but mark it so that we know that it
9636 was already extended. */
9637 if (REG_P (decl_rtl)
9638 && dmode != BLKmode
9639 && GET_MODE (decl_rtl) != dmode)
9641 machine_mode pmode;
9643 /* Get the signedness to be used for this variable. Ensure we get
9644 the same mode we got when the variable was declared. */
9645 if (code != SSA_NAME)
9646 pmode = promote_decl_mode (exp, &unsignedp);
9647 else if ((g = SSA_NAME_DEF_STMT (ssa_name))
9648 && gimple_code (g) == GIMPLE_CALL
9649 && !gimple_call_internal_p (g))
9650 pmode = promote_function_mode (type, mode, &unsignedp,
9651 gimple_call_fntype (g),
9653 else
9654 pmode = promote_ssa_mode (ssa_name, &unsignedp);
9655 gcc_assert (GET_MODE (decl_rtl) == pmode);
9657 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9658 SUBREG_PROMOTED_VAR_P (temp) = 1;
9659 SUBREG_PROMOTED_SET (temp, unsignedp);
9660 return temp;
9663 return decl_rtl;
9665 case INTEGER_CST:
9666 /* Given that TYPE_PRECISION (type) is not always equal to
9667 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9668 the former to the latter according to the signedness of the
9669 type. */
9670 temp = immed_wide_int_const (wide_int::from
9671 (exp,
9672 GET_MODE_PRECISION (TYPE_MODE (type)),
9673 TYPE_SIGN (type)),
9674 TYPE_MODE (type));
9675 return temp;
9677 case VECTOR_CST:
9679 tree tmp = NULL_TREE;
9680 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9681 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9682 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9683 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9684 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9685 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9686 return const_vector_from_tree (exp);
9687 if (GET_MODE_CLASS (mode) == MODE_INT)
9689 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9690 if (type_for_mode)
9691 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9693 if (!tmp)
9695 vec<constructor_elt, va_gc> *v;
9696 unsigned i;
9697 vec_alloc (v, VECTOR_CST_NELTS (exp));
9698 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9699 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9700 tmp = build_constructor (type, v);
9702 return expand_expr (tmp, ignore ? const0_rtx : target,
9703 tmode, modifier);
9706 case CONST_DECL:
9707 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9709 case REAL_CST:
9710 /* If optimized, generate immediate CONST_DOUBLE
9711 which will be turned into memory by reload if necessary.
9713 We used to force a register so that loop.c could see it. But
9714 this does not allow gen_* patterns to perform optimizations with
9715 the constants. It also produces two insns in cases like "x = 1.0;".
9716 On most machines, floating-point constants are not permitted in
9717 many insns, so we'd end up copying it to a register in any case.
9719 Now, we do the copying in expand_binop, if appropriate. */
9720 return const_double_from_real_value (TREE_REAL_CST (exp),
9721 TYPE_MODE (TREE_TYPE (exp)));
9723 case FIXED_CST:
9724 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9725 TYPE_MODE (TREE_TYPE (exp)));
9727 case COMPLEX_CST:
9728 /* Handle evaluating a complex constant in a CONCAT target. */
9729 if (original_target && GET_CODE (original_target) == CONCAT)
9731 machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9732 rtx rtarg, itarg;
9734 rtarg = XEXP (original_target, 0);
9735 itarg = XEXP (original_target, 1);
9737 /* Move the real and imaginary parts separately. */
9738 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9739 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9741 if (op0 != rtarg)
9742 emit_move_insn (rtarg, op0);
9743 if (op1 != itarg)
9744 emit_move_insn (itarg, op1);
9746 return original_target;
9749 /* ... fall through ... */
9751 case STRING_CST:
9752 temp = expand_expr_constant (exp, 1, modifier);
9754 /* temp contains a constant address.
9755 On RISC machines where a constant address isn't valid,
9756 make some insns to get that address into a register. */
9757 if (modifier != EXPAND_CONST_ADDRESS
9758 && modifier != EXPAND_INITIALIZER
9759 && modifier != EXPAND_SUM
9760 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9761 MEM_ADDR_SPACE (temp)))
9762 return replace_equiv_address (temp,
9763 copy_rtx (XEXP (temp, 0)));
9764 return temp;
9766 case SAVE_EXPR:
9768 tree val = treeop0;
9769 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
9770 inner_reference_p);
9772 if (!SAVE_EXPR_RESOLVED_P (exp))
9774 /* We can indeed still hit this case, typically via builtin
9775 expanders calling save_expr immediately before expanding
9776 something. Assume this means that we only have to deal
9777 with non-BLKmode values. */
9778 gcc_assert (GET_MODE (ret) != BLKmode);
9780 val = build_decl (curr_insn_location (),
9781 VAR_DECL, NULL, TREE_TYPE (exp));
9782 DECL_ARTIFICIAL (val) = 1;
9783 DECL_IGNORED_P (val) = 1;
9784 treeop0 = val;
9785 TREE_OPERAND (exp, 0) = treeop0;
9786 SAVE_EXPR_RESOLVED_P (exp) = 1;
9788 if (!CONSTANT_P (ret))
9789 ret = copy_to_reg (ret);
9790 SET_DECL_RTL (val, ret);
9793 return ret;
9797 case CONSTRUCTOR:
9798 /* If we don't need the result, just ensure we evaluate any
9799 subexpressions. */
9800 if (ignore)
9802 unsigned HOST_WIDE_INT idx;
9803 tree value;
9805 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9806 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9808 return const0_rtx;
9811 return expand_constructor (exp, target, modifier, false);
9813 case TARGET_MEM_REF:
9815 addr_space_t as
9816 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9817 enum insn_code icode;
9818 unsigned int align;
9820 op0 = addr_for_mem_ref (exp, as, true);
9821 op0 = memory_address_addr_space (mode, op0, as);
9822 temp = gen_rtx_MEM (mode, op0);
9823 set_mem_attributes (temp, exp, 0);
9824 set_mem_addr_space (temp, as);
9825 align = get_object_alignment (exp);
9826 if (modifier != EXPAND_WRITE
9827 && modifier != EXPAND_MEMORY
9828 && mode != BLKmode
9829 && align < GET_MODE_ALIGNMENT (mode)
9830 /* If the target does not have special handling for unaligned
9831 loads of mode then it can use regular moves for them. */
9832 && ((icode = optab_handler (movmisalign_optab, mode))
9833 != CODE_FOR_nothing))
9835 struct expand_operand ops[2];
9837 /* We've already validated the memory, and we're creating a
9838 new pseudo destination. The predicates really can't fail,
9839 nor can the generator. */
9840 create_output_operand (&ops[0], NULL_RTX, mode);
9841 create_fixed_operand (&ops[1], temp);
9842 expand_insn (icode, 2, ops);
9843 temp = ops[0].value;
9845 return temp;
9848 case MEM_REF:
9850 addr_space_t as
9851 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9852 machine_mode address_mode;
9853 tree base = TREE_OPERAND (exp, 0);
9854 gimple *def_stmt;
9855 enum insn_code icode;
9856 unsigned align;
9857 /* Handle expansion of non-aliased memory with non-BLKmode. That
9858 might end up in a register. */
9859 if (mem_ref_refers_to_non_mem_p (exp))
9861 HOST_WIDE_INT offset = mem_ref_offset (exp).to_short_addr ();
9862 base = TREE_OPERAND (base, 0);
9863 if (offset == 0
9864 && tree_fits_uhwi_p (TYPE_SIZE (type))
9865 && (GET_MODE_BITSIZE (DECL_MODE (base))
9866 == tree_to_uhwi (TYPE_SIZE (type))))
9867 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
9868 target, tmode, modifier);
9869 if (TYPE_MODE (type) == BLKmode)
9871 temp = assign_stack_temp (DECL_MODE (base),
9872 GET_MODE_SIZE (DECL_MODE (base)));
9873 store_expr (base, temp, 0, false);
9874 temp = adjust_address (temp, BLKmode, offset);
9875 set_mem_size (temp, int_size_in_bytes (type));
9876 return temp;
9878 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
9879 bitsize_int (offset * BITS_PER_UNIT));
9880 return expand_expr (exp, target, tmode, modifier);
9882 address_mode = targetm.addr_space.address_mode (as);
9883 base = TREE_OPERAND (exp, 0);
9884 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9886 tree mask = gimple_assign_rhs2 (def_stmt);
9887 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9888 gimple_assign_rhs1 (def_stmt), mask);
9889 TREE_OPERAND (exp, 0) = base;
9891 align = get_object_alignment (exp);
9892 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9893 op0 = memory_address_addr_space (mode, op0, as);
9894 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9896 rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
9897 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9898 op0 = memory_address_addr_space (mode, op0, as);
9900 temp = gen_rtx_MEM (mode, op0);
9901 set_mem_attributes (temp, exp, 0);
9902 set_mem_addr_space (temp, as);
9903 if (TREE_THIS_VOLATILE (exp))
9904 MEM_VOLATILE_P (temp) = 1;
9905 if (modifier != EXPAND_WRITE
9906 && modifier != EXPAND_MEMORY
9907 && !inner_reference_p
9908 && mode != BLKmode
9909 && align < GET_MODE_ALIGNMENT (mode))
9911 if ((icode = optab_handler (movmisalign_optab, mode))
9912 != CODE_FOR_nothing)
9914 struct expand_operand ops[2];
9916 /* We've already validated the memory, and we're creating a
9917 new pseudo destination. The predicates really can't fail,
9918 nor can the generator. */
9919 create_output_operand (&ops[0], NULL_RTX, mode);
9920 create_fixed_operand (&ops[1], temp);
9921 expand_insn (icode, 2, ops);
9922 temp = ops[0].value;
9924 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9925 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9926 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9927 (modifier == EXPAND_STACK_PARM
9928 ? NULL_RTX : target),
9929 mode, mode);
9931 return temp;
9934 case ARRAY_REF:
9937 tree array = treeop0;
9938 tree index = treeop1;
9939 tree init;
9941 /* Fold an expression like: "foo"[2].
9942 This is not done in fold so it won't happen inside &.
9943 Don't fold if this is for wide characters since it's too
9944 difficult to do correctly and this is a very rare case. */
9946 if (modifier != EXPAND_CONST_ADDRESS
9947 && modifier != EXPAND_INITIALIZER
9948 && modifier != EXPAND_MEMORY)
9950 tree t = fold_read_from_constant_string (exp);
9952 if (t)
9953 return expand_expr (t, target, tmode, modifier);
9956 /* If this is a constant index into a constant array,
9957 just get the value from the array. Handle both the cases when
9958 we have an explicit constructor and when our operand is a variable
9959 that was declared const. */
9961 if (modifier != EXPAND_CONST_ADDRESS
9962 && modifier != EXPAND_INITIALIZER
9963 && modifier != EXPAND_MEMORY
9964 && TREE_CODE (array) == CONSTRUCTOR
9965 && ! TREE_SIDE_EFFECTS (array)
9966 && TREE_CODE (index) == INTEGER_CST)
9968 unsigned HOST_WIDE_INT ix;
9969 tree field, value;
9971 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9972 field, value)
9973 if (tree_int_cst_equal (field, index))
9975 if (!TREE_SIDE_EFFECTS (value))
9976 return expand_expr (fold (value), target, tmode, modifier);
9977 break;
9981 else if (optimize >= 1
9982 && modifier != EXPAND_CONST_ADDRESS
9983 && modifier != EXPAND_INITIALIZER
9984 && modifier != EXPAND_MEMORY
9985 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9986 && TREE_CODE (index) == INTEGER_CST
9987 && (TREE_CODE (array) == VAR_DECL
9988 || TREE_CODE (array) == CONST_DECL)
9989 && (init = ctor_for_folding (array)) != error_mark_node)
9991 if (init == NULL_TREE)
9993 tree value = build_zero_cst (type);
9994 if (TREE_CODE (value) == CONSTRUCTOR)
9996 /* If VALUE is a CONSTRUCTOR, this optimization is only
9997 useful if this doesn't store the CONSTRUCTOR into
9998 memory. If it does, it is more efficient to just
9999 load the data from the array directly. */
10000 rtx ret = expand_constructor (value, target,
10001 modifier, true);
10002 if (ret == NULL_RTX)
10003 value = NULL_TREE;
10006 if (value)
10007 return expand_expr (value, target, tmode, modifier);
10009 else if (TREE_CODE (init) == CONSTRUCTOR)
10011 unsigned HOST_WIDE_INT ix;
10012 tree field, value;
10014 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
10015 field, value)
10016 if (tree_int_cst_equal (field, index))
10018 if (TREE_SIDE_EFFECTS (value))
10019 break;
10021 if (TREE_CODE (value) == CONSTRUCTOR)
10023 /* If VALUE is a CONSTRUCTOR, this
10024 optimization is only useful if
10025 this doesn't store the CONSTRUCTOR
10026 into memory. If it does, it is more
10027 efficient to just load the data from
10028 the array directly. */
10029 rtx ret = expand_constructor (value, target,
10030 modifier, true);
10031 if (ret == NULL_RTX)
10032 break;
10035 return
10036 expand_expr (fold (value), target, tmode, modifier);
10039 else if (TREE_CODE (init) == STRING_CST)
10041 tree low_bound = array_ref_low_bound (exp);
10042 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
10044 /* Optimize the special case of a zero lower bound.
10046 We convert the lower bound to sizetype to avoid problems
10047 with constant folding. E.g. suppose the lower bound is
10048 1 and its mode is QI. Without the conversion
10049 (ARRAY + (INDEX - (unsigned char)1))
10050 becomes
10051 (ARRAY + (-(unsigned char)1) + INDEX)
10052 which becomes
10053 (ARRAY + 255 + INDEX). Oops! */
10054 if (!integer_zerop (low_bound))
10055 index1 = size_diffop_loc (loc, index1,
10056 fold_convert_loc (loc, sizetype,
10057 low_bound));
10059 if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
10061 tree type = TREE_TYPE (TREE_TYPE (init));
10062 machine_mode mode = TYPE_MODE (type);
10064 if (GET_MODE_CLASS (mode) == MODE_INT
10065 && GET_MODE_SIZE (mode) == 1)
10066 return gen_int_mode (TREE_STRING_POINTER (init)
10067 [TREE_INT_CST_LOW (index1)],
10068 mode);
10073 goto normal_inner_ref;
10075 case COMPONENT_REF:
10076 /* If the operand is a CONSTRUCTOR, we can just extract the
10077 appropriate field if it is present. */
10078 if (TREE_CODE (treeop0) == CONSTRUCTOR)
10080 unsigned HOST_WIDE_INT idx;
10081 tree field, value;
10083 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
10084 idx, field, value)
10085 if (field == treeop1
10086 /* We can normally use the value of the field in the
10087 CONSTRUCTOR. However, if this is a bitfield in
10088 an integral mode that we can fit in a HOST_WIDE_INT,
10089 we must mask only the number of bits in the bitfield,
10090 since this is done implicitly by the constructor. If
10091 the bitfield does not meet either of those conditions,
10092 we can't do this optimization. */
10093 && (! DECL_BIT_FIELD (field)
10094 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
10095 && (GET_MODE_PRECISION (DECL_MODE (field))
10096 <= HOST_BITS_PER_WIDE_INT))))
10098 if (DECL_BIT_FIELD (field)
10099 && modifier == EXPAND_STACK_PARM)
10100 target = 0;
10101 op0 = expand_expr (value, target, tmode, modifier);
10102 if (DECL_BIT_FIELD (field))
10104 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
10105 machine_mode imode = TYPE_MODE (TREE_TYPE (field));
10107 if (TYPE_UNSIGNED (TREE_TYPE (field)))
10109 op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
10110 imode);
10111 op0 = expand_and (imode, op0, op1, target);
10113 else
10115 int count = GET_MODE_PRECISION (imode) - bitsize;
10117 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
10118 target, 0);
10119 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
10120 target, 0);
10124 return op0;
10127 goto normal_inner_ref;
10129 case BIT_FIELD_REF:
10130 case ARRAY_RANGE_REF:
10131 normal_inner_ref:
10133 machine_mode mode1, mode2;
10134 HOST_WIDE_INT bitsize, bitpos;
10135 tree offset;
10136 int volatilep = 0, must_force_mem;
10137 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
10138 &mode1, &unsignedp, &volatilep, true);
10139 rtx orig_op0, memloc;
10140 bool clear_mem_expr = false;
10142 /* If we got back the original object, something is wrong. Perhaps
10143 we are evaluating an expression too early. In any event, don't
10144 infinitely recurse. */
10145 gcc_assert (tem != exp);
10147 /* If TEM's type is a union of variable size, pass TARGET to the inner
10148 computation, since it will need a temporary and TARGET is known
10149 to have to do. This occurs in unchecked conversion in Ada. */
10150 orig_op0 = op0
10151 = expand_expr_real (tem,
10152 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10153 && COMPLETE_TYPE_P (TREE_TYPE (tem))
10154 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10155 != INTEGER_CST)
10156 && modifier != EXPAND_STACK_PARM
10157 ? target : NULL_RTX),
10158 VOIDmode,
10159 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10160 NULL, true);
10162 /* If the field has a mode, we want to access it in the
10163 field's mode, not the computed mode.
10164 If a MEM has VOIDmode (external with incomplete type),
10165 use BLKmode for it instead. */
10166 if (MEM_P (op0))
10168 if (mode1 != VOIDmode)
10169 op0 = adjust_address (op0, mode1, 0);
10170 else if (GET_MODE (op0) == VOIDmode)
10171 op0 = adjust_address (op0, BLKmode, 0);
10174 mode2
10175 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10177 /* If we have either an offset, a BLKmode result, or a reference
10178 outside the underlying object, we must force it to memory.
10179 Such a case can occur in Ada if we have unchecked conversion
10180 of an expression from a scalar type to an aggregate type or
10181 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10182 passed a partially uninitialized object or a view-conversion
10183 to a larger size. */
10184 must_force_mem = (offset
10185 || mode1 == BLKmode
10186 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
10188 /* Handle CONCAT first. */
10189 if (GET_CODE (op0) == CONCAT && !must_force_mem)
10191 if (bitpos == 0
10192 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
10193 return op0;
10194 if (bitpos == 0
10195 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10196 && bitsize)
10198 op0 = XEXP (op0, 0);
10199 mode2 = GET_MODE (op0);
10201 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10202 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
10203 && bitpos
10204 && bitsize)
10206 op0 = XEXP (op0, 1);
10207 bitpos = 0;
10208 mode2 = GET_MODE (op0);
10210 else
10211 /* Otherwise force into memory. */
10212 must_force_mem = 1;
10215 /* If this is a constant, put it in a register if it is a legitimate
10216 constant and we don't need a memory reference. */
10217 if (CONSTANT_P (op0)
10218 && mode2 != BLKmode
10219 && targetm.legitimate_constant_p (mode2, op0)
10220 && !must_force_mem)
10221 op0 = force_reg (mode2, op0);
10223 /* Otherwise, if this is a constant, try to force it to the constant
10224 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10225 is a legitimate constant. */
10226 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10227 op0 = validize_mem (memloc);
10229 /* Otherwise, if this is a constant or the object is not in memory
10230 and need be, put it there. */
10231 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10233 memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10234 emit_move_insn (memloc, op0);
10235 op0 = memloc;
10236 clear_mem_expr = true;
10239 if (offset)
10241 machine_mode address_mode;
10242 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10243 EXPAND_SUM);
10245 gcc_assert (MEM_P (op0));
10247 address_mode = get_address_mode (op0);
10248 if (GET_MODE (offset_rtx) != address_mode)
10250 /* We cannot be sure that the RTL in offset_rtx is valid outside
10251 of a memory address context, so force it into a register
10252 before attempting to convert it to the desired mode. */
10253 offset_rtx = force_operand (offset_rtx, NULL_RTX);
10254 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10257 /* See the comment in expand_assignment for the rationale. */
10258 if (mode1 != VOIDmode
10259 && bitpos != 0
10260 && bitsize > 0
10261 && (bitpos % bitsize) == 0
10262 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
10263 && MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
10265 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10266 bitpos = 0;
10269 op0 = offset_address (op0, offset_rtx,
10270 highest_pow2_factor (offset));
10273 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10274 record its alignment as BIGGEST_ALIGNMENT. */
10275 if (MEM_P (op0) && bitpos == 0 && offset != 0
10276 && is_aligning_offset (offset, tem))
10277 set_mem_align (op0, BIGGEST_ALIGNMENT);
10279 /* Don't forget about volatility even if this is a bitfield. */
10280 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10282 if (op0 == orig_op0)
10283 op0 = copy_rtx (op0);
10285 MEM_VOLATILE_P (op0) = 1;
10288 /* In cases where an aligned union has an unaligned object
10289 as a field, we might be extracting a BLKmode value from
10290 an integer-mode (e.g., SImode) object. Handle this case
10291 by doing the extract into an object as wide as the field
10292 (which we know to be the width of a basic mode), then
10293 storing into memory, and changing the mode to BLKmode. */
10294 if (mode1 == VOIDmode
10295 || REG_P (op0) || GET_CODE (op0) == SUBREG
10296 || (mode1 != BLKmode && ! direct_load[(int) mode1]
10297 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10298 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10299 && modifier != EXPAND_CONST_ADDRESS
10300 && modifier != EXPAND_INITIALIZER
10301 && modifier != EXPAND_MEMORY)
10302 /* If the bitfield is volatile and the bitsize
10303 is narrower than the access size of the bitfield,
10304 we need to extract bitfields from the access. */
10305 || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10306 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10307 && mode1 != BLKmode
10308 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)
10309 /* If the field isn't aligned enough to fetch as a memref,
10310 fetch it as a bit field. */
10311 || (mode1 != BLKmode
10312 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10313 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10314 || (MEM_P (op0)
10315 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10316 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10317 && modifier != EXPAND_MEMORY
10318 && ((modifier == EXPAND_CONST_ADDRESS
10319 || modifier == EXPAND_INITIALIZER)
10320 ? STRICT_ALIGNMENT
10321 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10322 || (bitpos % BITS_PER_UNIT != 0)))
10323 /* If the type and the field are a constant size and the
10324 size of the type isn't the same size as the bitfield,
10325 we must use bitfield operations. */
10326 || (bitsize >= 0
10327 && TYPE_SIZE (TREE_TYPE (exp))
10328 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10329 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10330 bitsize)))
10332 machine_mode ext_mode = mode;
10334 if (ext_mode == BLKmode
10335 && ! (target != 0 && MEM_P (op0)
10336 && MEM_P (target)
10337 && bitpos % BITS_PER_UNIT == 0))
10338 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10340 if (ext_mode == BLKmode)
10342 if (target == 0)
10343 target = assign_temp (type, 1, 1);
10345 /* ??? Unlike the similar test a few lines below, this one is
10346 very likely obsolete. */
10347 if (bitsize == 0)
10348 return target;
10350 /* In this case, BITPOS must start at a byte boundary and
10351 TARGET, if specified, must be a MEM. */
10352 gcc_assert (MEM_P (op0)
10353 && (!target || MEM_P (target))
10354 && !(bitpos % BITS_PER_UNIT));
10356 emit_block_move (target,
10357 adjust_address (op0, VOIDmode,
10358 bitpos / BITS_PER_UNIT),
10359 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10360 / BITS_PER_UNIT),
10361 (modifier == EXPAND_STACK_PARM
10362 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10364 return target;
10367 /* If we have nothing to extract, the result will be 0 for targets
10368 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10369 return 0 for the sake of consistency, as reading a zero-sized
10370 bitfield is valid in Ada and the value is fully specified. */
10371 if (bitsize == 0)
10372 return const0_rtx;
10374 op0 = validize_mem (op0);
10376 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10377 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10379 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10380 (modifier == EXPAND_STACK_PARM
10381 ? NULL_RTX : target),
10382 ext_mode, ext_mode);
10384 /* If the result is a record type and BITSIZE is narrower than
10385 the mode of OP0, an integral mode, and this is a big endian
10386 machine, we must put the field into the high-order bits. */
10387 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10388 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10389 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10390 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10391 GET_MODE_BITSIZE (GET_MODE (op0))
10392 - bitsize, op0, 1);
10394 /* If the result type is BLKmode, store the data into a temporary
10395 of the appropriate type, but with the mode corresponding to the
10396 mode for the data we have (op0's mode). */
10397 if (mode == BLKmode)
10399 rtx new_rtx
10400 = assign_stack_temp_for_type (ext_mode,
10401 GET_MODE_BITSIZE (ext_mode),
10402 type);
10403 emit_move_insn (new_rtx, op0);
10404 op0 = copy_rtx (new_rtx);
10405 PUT_MODE (op0, BLKmode);
10408 return op0;
10411 /* If the result is BLKmode, use that to access the object
10412 now as well. */
10413 if (mode == BLKmode)
10414 mode1 = BLKmode;
10416 /* Get a reference to just this component. */
10417 if (modifier == EXPAND_CONST_ADDRESS
10418 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10419 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10420 else
10421 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10423 if (op0 == orig_op0)
10424 op0 = copy_rtx (op0);
10426 set_mem_attributes (op0, exp, 0);
10428 if (REG_P (XEXP (op0, 0)))
10429 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10431 /* If op0 is a temporary because the original expressions was forced
10432 to memory, clear MEM_EXPR so that the original expression cannot
10433 be marked as addressable through MEM_EXPR of the temporary. */
10434 if (clear_mem_expr)
10435 set_mem_expr (op0, NULL_TREE);
10437 MEM_VOLATILE_P (op0) |= volatilep;
10438 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10439 || modifier == EXPAND_CONST_ADDRESS
10440 || modifier == EXPAND_INITIALIZER)
10441 return op0;
10443 if (target == 0)
10444 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10446 convert_move (target, op0, unsignedp);
10447 return target;
10450 case OBJ_TYPE_REF:
10451 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10453 case CALL_EXPR:
10454 /* All valid uses of __builtin_va_arg_pack () are removed during
10455 inlining. */
10456 if (CALL_EXPR_VA_ARG_PACK (exp))
10457 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10459 tree fndecl = get_callee_fndecl (exp), attr;
10461 if (fndecl
10462 && (attr = lookup_attribute ("error",
10463 DECL_ATTRIBUTES (fndecl))) != NULL)
10464 error ("%Kcall to %qs declared with attribute error: %s",
10465 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10466 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10467 if (fndecl
10468 && (attr = lookup_attribute ("warning",
10469 DECL_ATTRIBUTES (fndecl))) != NULL)
10470 warning_at (tree_nonartificial_location (exp),
10471 0, "%Kcall to %qs declared with attribute warning: %s",
10472 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10473 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10475 /* Check for a built-in function. */
10476 if (fndecl && DECL_BUILT_IN (fndecl))
10478 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10479 if (CALL_WITH_BOUNDS_P (exp))
10480 return expand_builtin_with_bounds (exp, target, subtarget,
10481 tmode, ignore);
10482 else
10483 return expand_builtin (exp, target, subtarget, tmode, ignore);
10486 return expand_call (exp, target, ignore);
10488 case VIEW_CONVERT_EXPR:
10489 op0 = NULL_RTX;
10491 /* If we are converting to BLKmode, try to avoid an intermediate
10492 temporary by fetching an inner memory reference. */
10493 if (mode == BLKmode
10494 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10495 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10496 && handled_component_p (treeop0))
10498 machine_mode mode1;
10499 HOST_WIDE_INT bitsize, bitpos;
10500 tree offset;
10501 int unsignedp;
10502 int volatilep = 0;
10503 tree tem
10504 = get_inner_reference (treeop0, &bitsize, &bitpos,
10505 &offset, &mode1, &unsignedp, &volatilep,
10506 true);
10507 rtx orig_op0;
10509 /* ??? We should work harder and deal with non-zero offsets. */
10510 if (!offset
10511 && (bitpos % BITS_PER_UNIT) == 0
10512 && bitsize >= 0
10513 && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10515 /* See the normal_inner_ref case for the rationale. */
10516 orig_op0
10517 = expand_expr_real (tem,
10518 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10519 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10520 != INTEGER_CST)
10521 && modifier != EXPAND_STACK_PARM
10522 ? target : NULL_RTX),
10523 VOIDmode,
10524 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10525 NULL, true);
10527 if (MEM_P (orig_op0))
10529 op0 = orig_op0;
10531 /* Get a reference to just this component. */
10532 if (modifier == EXPAND_CONST_ADDRESS
10533 || modifier == EXPAND_SUM
10534 || modifier == EXPAND_INITIALIZER)
10535 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10536 else
10537 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10539 if (op0 == orig_op0)
10540 op0 = copy_rtx (op0);
10542 set_mem_attributes (op0, treeop0, 0);
10543 if (REG_P (XEXP (op0, 0)))
10544 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10546 MEM_VOLATILE_P (op0) |= volatilep;
10551 if (!op0)
10552 op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
10553 NULL, inner_reference_p);
10555 /* If the input and output modes are both the same, we are done. */
10556 if (mode == GET_MODE (op0))
10558 /* If neither mode is BLKmode, and both modes are the same size
10559 then we can use gen_lowpart. */
10560 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10561 && (GET_MODE_PRECISION (mode)
10562 == GET_MODE_PRECISION (GET_MODE (op0)))
10563 && !COMPLEX_MODE_P (GET_MODE (op0)))
10565 if (GET_CODE (op0) == SUBREG)
10566 op0 = force_reg (GET_MODE (op0), op0);
10567 temp = gen_lowpart_common (mode, op0);
10568 if (temp)
10569 op0 = temp;
10570 else
10572 if (!REG_P (op0) && !MEM_P (op0))
10573 op0 = force_reg (GET_MODE (op0), op0);
10574 op0 = gen_lowpart (mode, op0);
10577 /* If both types are integral, convert from one mode to the other. */
10578 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10579 op0 = convert_modes (mode, GET_MODE (op0), op0,
10580 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10581 /* If the output type is a bit-field type, do an extraction. */
10582 else if (reduce_bit_field)
10583 return extract_bit_field (op0, TYPE_PRECISION (type), 0,
10584 TYPE_UNSIGNED (type), NULL_RTX,
10585 mode, mode);
10586 /* As a last resort, spill op0 to memory, and reload it in a
10587 different mode. */
10588 else if (!MEM_P (op0))
10590 /* If the operand is not a MEM, force it into memory. Since we
10591 are going to be changing the mode of the MEM, don't call
10592 force_const_mem for constants because we don't allow pool
10593 constants to change mode. */
10594 tree inner_type = TREE_TYPE (treeop0);
10596 gcc_assert (!TREE_ADDRESSABLE (exp));
10598 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10599 target
10600 = assign_stack_temp_for_type
10601 (TYPE_MODE (inner_type),
10602 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10604 emit_move_insn (target, op0);
10605 op0 = target;
10608 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10609 output type is such that the operand is known to be aligned, indicate
10610 that it is. Otherwise, we need only be concerned about alignment for
10611 non-BLKmode results. */
10612 if (MEM_P (op0))
10614 enum insn_code icode;
10616 if (TYPE_ALIGN_OK (type))
10618 /* ??? Copying the MEM without substantially changing it might
10619 run afoul of the code handling volatile memory references in
10620 store_expr, which assumes that TARGET is returned unmodified
10621 if it has been used. */
10622 op0 = copy_rtx (op0);
10623 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10625 else if (modifier != EXPAND_WRITE
10626 && modifier != EXPAND_MEMORY
10627 && !inner_reference_p
10628 && mode != BLKmode
10629 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10631 /* If the target does have special handling for unaligned
10632 loads of mode then use them. */
10633 if ((icode = optab_handler (movmisalign_optab, mode))
10634 != CODE_FOR_nothing)
10636 rtx reg;
10638 op0 = adjust_address (op0, mode, 0);
10639 /* We've already validated the memory, and we're creating a
10640 new pseudo destination. The predicates really can't
10641 fail. */
10642 reg = gen_reg_rtx (mode);
10644 /* Nor can the insn generator. */
10645 rtx_insn *insn = GEN_FCN (icode) (reg, op0);
10646 emit_insn (insn);
10647 return reg;
10649 else if (STRICT_ALIGNMENT)
10651 tree inner_type = TREE_TYPE (treeop0);
10652 HOST_WIDE_INT temp_size
10653 = MAX (int_size_in_bytes (inner_type),
10654 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10655 rtx new_rtx
10656 = assign_stack_temp_for_type (mode, temp_size, type);
10657 rtx new_with_op0_mode
10658 = adjust_address (new_rtx, GET_MODE (op0), 0);
10660 gcc_assert (!TREE_ADDRESSABLE (exp));
10662 if (GET_MODE (op0) == BLKmode)
10663 emit_block_move (new_with_op0_mode, op0,
10664 GEN_INT (GET_MODE_SIZE (mode)),
10665 (modifier == EXPAND_STACK_PARM
10666 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10667 else
10668 emit_move_insn (new_with_op0_mode, op0);
10670 op0 = new_rtx;
10674 op0 = adjust_address (op0, mode, 0);
10677 return op0;
10679 case MODIFY_EXPR:
10681 tree lhs = treeop0;
10682 tree rhs = treeop1;
10683 gcc_assert (ignore);
10685 /* Check for |= or &= of a bitfield of size one into another bitfield
10686 of size 1. In this case, (unless we need the result of the
10687 assignment) we can do this more efficiently with a
10688 test followed by an assignment, if necessary.
10690 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10691 things change so we do, this code should be enhanced to
10692 support it. */
10693 if (TREE_CODE (lhs) == COMPONENT_REF
10694 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10695 || TREE_CODE (rhs) == BIT_AND_EXPR)
10696 && TREE_OPERAND (rhs, 0) == lhs
10697 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10698 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10699 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10701 rtx_code_label *label = gen_label_rtx ();
10702 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10703 do_jump (TREE_OPERAND (rhs, 1),
10704 value ? label : 0,
10705 value ? 0 : label, -1);
10706 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10707 false);
10708 do_pending_stack_adjust ();
10709 emit_label (label);
10710 return const0_rtx;
10713 expand_assignment (lhs, rhs, false);
10714 return const0_rtx;
10717 case ADDR_EXPR:
10718 return expand_expr_addr_expr (exp, target, tmode, modifier);
10720 case REALPART_EXPR:
10721 op0 = expand_normal (treeop0);
10722 return read_complex_part (op0, false);
10724 case IMAGPART_EXPR:
10725 op0 = expand_normal (treeop0);
10726 return read_complex_part (op0, true);
10728 case RETURN_EXPR:
10729 case LABEL_EXPR:
10730 case GOTO_EXPR:
10731 case SWITCH_EXPR:
10732 case ASM_EXPR:
10733 /* Expanded in cfgexpand.c. */
10734 gcc_unreachable ();
10736 case TRY_CATCH_EXPR:
10737 case CATCH_EXPR:
10738 case EH_FILTER_EXPR:
10739 case TRY_FINALLY_EXPR:
10740 /* Lowered by tree-eh.c. */
10741 gcc_unreachable ();
10743 case WITH_CLEANUP_EXPR:
10744 case CLEANUP_POINT_EXPR:
10745 case TARGET_EXPR:
10746 case CASE_LABEL_EXPR:
10747 case VA_ARG_EXPR:
10748 case BIND_EXPR:
10749 case INIT_EXPR:
10750 case CONJ_EXPR:
10751 case COMPOUND_EXPR:
10752 case PREINCREMENT_EXPR:
10753 case PREDECREMENT_EXPR:
10754 case POSTINCREMENT_EXPR:
10755 case POSTDECREMENT_EXPR:
10756 case LOOP_EXPR:
10757 case EXIT_EXPR:
10758 case COMPOUND_LITERAL_EXPR:
10759 /* Lowered by gimplify.c. */
10760 gcc_unreachable ();
10762 case FDESC_EXPR:
10763 /* Function descriptors are not valid except for as
10764 initialization constants, and should not be expanded. */
10765 gcc_unreachable ();
10767 case WITH_SIZE_EXPR:
10768 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10769 have pulled out the size to use in whatever context it needed. */
10770 return expand_expr_real (treeop0, original_target, tmode,
10771 modifier, alt_rtl, inner_reference_p);
10773 default:
10774 return expand_expr_real_2 (&ops, target, tmode, modifier);
10778 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10779 signedness of TYPE), possibly returning the result in TARGET. */
10780 static rtx
10781 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10783 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10784 if (target && GET_MODE (target) != GET_MODE (exp))
10785 target = 0;
10786 /* For constant values, reduce using build_int_cst_type. */
10787 if (CONST_INT_P (exp))
10789 HOST_WIDE_INT value = INTVAL (exp);
10790 tree t = build_int_cst_type (type, value);
10791 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10793 else if (TYPE_UNSIGNED (type))
10795 machine_mode mode = GET_MODE (exp);
10796 rtx mask = immed_wide_int_const
10797 (wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
10798 return expand_and (mode, exp, mask, target);
10800 else
10802 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10803 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10804 exp, count, target, 0);
10805 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10806 exp, count, target, 0);
10810 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10811 when applied to the address of EXP produces an address known to be
10812 aligned more than BIGGEST_ALIGNMENT. */
10814 static int
10815 is_aligning_offset (const_tree offset, const_tree exp)
10817 /* Strip off any conversions. */
10818 while (CONVERT_EXPR_P (offset))
10819 offset = TREE_OPERAND (offset, 0);
10821 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10822 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10823 if (TREE_CODE (offset) != BIT_AND_EXPR
10824 || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
10825 || compare_tree_int (TREE_OPERAND (offset, 1),
10826 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10827 || exact_log2 (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1) < 0)
10828 return 0;
10830 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10831 It must be NEGATE_EXPR. Then strip any more conversions. */
10832 offset = TREE_OPERAND (offset, 0);
10833 while (CONVERT_EXPR_P (offset))
10834 offset = TREE_OPERAND (offset, 0);
10836 if (TREE_CODE (offset) != NEGATE_EXPR)
10837 return 0;
10839 offset = TREE_OPERAND (offset, 0);
10840 while (CONVERT_EXPR_P (offset))
10841 offset = TREE_OPERAND (offset, 0);
10843 /* This must now be the address of EXP. */
10844 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10847 /* Return the tree node if an ARG corresponds to a string constant or zero
10848 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10849 in bytes within the string that ARG is accessing. The type of the
10850 offset will be `sizetype'. */
10852 tree
10853 string_constant (tree arg, tree *ptr_offset)
10855 tree array, offset, lower_bound;
10856 STRIP_NOPS (arg);
10858 if (TREE_CODE (arg) == ADDR_EXPR)
10860 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10862 *ptr_offset = size_zero_node;
10863 return TREE_OPERAND (arg, 0);
10865 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10867 array = TREE_OPERAND (arg, 0);
10868 offset = size_zero_node;
10870 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10872 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10873 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10874 if (TREE_CODE (array) != STRING_CST
10875 && TREE_CODE (array) != VAR_DECL)
10876 return 0;
10878 /* Check if the array has a nonzero lower bound. */
10879 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10880 if (!integer_zerop (lower_bound))
10882 /* If the offset and base aren't both constants, return 0. */
10883 if (TREE_CODE (lower_bound) != INTEGER_CST)
10884 return 0;
10885 if (TREE_CODE (offset) != INTEGER_CST)
10886 return 0;
10887 /* Adjust offset by the lower bound. */
10888 offset = size_diffop (fold_convert (sizetype, offset),
10889 fold_convert (sizetype, lower_bound));
10892 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10894 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10895 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10896 if (TREE_CODE (array) != ADDR_EXPR)
10897 return 0;
10898 array = TREE_OPERAND (array, 0);
10899 if (TREE_CODE (array) != STRING_CST
10900 && TREE_CODE (array) != VAR_DECL)
10901 return 0;
10903 else
10904 return 0;
10906 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10908 tree arg0 = TREE_OPERAND (arg, 0);
10909 tree arg1 = TREE_OPERAND (arg, 1);
10911 STRIP_NOPS (arg0);
10912 STRIP_NOPS (arg1);
10914 if (TREE_CODE (arg0) == ADDR_EXPR
10915 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10916 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10918 array = TREE_OPERAND (arg0, 0);
10919 offset = arg1;
10921 else if (TREE_CODE (arg1) == ADDR_EXPR
10922 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10923 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10925 array = TREE_OPERAND (arg1, 0);
10926 offset = arg0;
10928 else
10929 return 0;
10931 else
10932 return 0;
10934 if (TREE_CODE (array) == STRING_CST)
10936 *ptr_offset = fold_convert (sizetype, offset);
10937 return array;
10939 else if (TREE_CODE (array) == VAR_DECL
10940 || TREE_CODE (array) == CONST_DECL)
10942 int length;
10943 tree init = ctor_for_folding (array);
10945 /* Variables initialized to string literals can be handled too. */
10946 if (init == error_mark_node
10947 || !init
10948 || TREE_CODE (init) != STRING_CST)
10949 return 0;
10951 /* Avoid const char foo[4] = "abcde"; */
10952 if (DECL_SIZE_UNIT (array) == NULL_TREE
10953 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10954 || (length = TREE_STRING_LENGTH (init)) <= 0
10955 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10956 return 0;
10958 /* If variable is bigger than the string literal, OFFSET must be constant
10959 and inside of the bounds of the string literal. */
10960 offset = fold_convert (sizetype, offset);
10961 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10962 && (! tree_fits_uhwi_p (offset)
10963 || compare_tree_int (offset, length) >= 0))
10964 return 0;
10966 *ptr_offset = offset;
10967 return init;
10970 return 0;
10973 /* Generate code to calculate OPS, and exploded expression
10974 using a store-flag instruction and return an rtx for the result.
10975 OPS reflects a comparison.
10977 If TARGET is nonzero, store the result there if convenient.
10979 Return zero if there is no suitable set-flag instruction
10980 available on this machine.
10982 Once expand_expr has been called on the arguments of the comparison,
10983 we are committed to doing the store flag, since it is not safe to
10984 re-evaluate the expression. We emit the store-flag insn by calling
10985 emit_store_flag, but only expand the arguments if we have a reason
10986 to believe that emit_store_flag will be successful. If we think that
10987 it will, but it isn't, we have to simulate the store-flag with a
10988 set/jump/set sequence. */
10990 static rtx
10991 do_store_flag (sepops ops, rtx target, machine_mode mode)
10993 enum rtx_code code;
10994 tree arg0, arg1, type;
10995 machine_mode operand_mode;
10996 int unsignedp;
10997 rtx op0, op1;
10998 rtx subtarget = target;
10999 location_t loc = ops->location;
11001 arg0 = ops->op0;
11002 arg1 = ops->op1;
11004 /* Don't crash if the comparison was erroneous. */
11005 if (arg0 == error_mark_node || arg1 == error_mark_node)
11006 return const0_rtx;
11008 type = TREE_TYPE (arg0);
11009 operand_mode = TYPE_MODE (type);
11010 unsignedp = TYPE_UNSIGNED (type);
11012 /* We won't bother with BLKmode store-flag operations because it would mean
11013 passing a lot of information to emit_store_flag. */
11014 if (operand_mode == BLKmode)
11015 return 0;
11017 /* We won't bother with store-flag operations involving function pointers
11018 when function pointers must be canonicalized before comparisons. */
11019 if (targetm.have_canonicalize_funcptr_for_compare ()
11020 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
11021 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
11022 == FUNCTION_TYPE))
11023 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
11024 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
11025 == FUNCTION_TYPE))))
11026 return 0;
11028 STRIP_NOPS (arg0);
11029 STRIP_NOPS (arg1);
11031 /* For vector typed comparisons emit code to generate the desired
11032 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
11033 expander for this. */
11034 if (TREE_CODE (ops->type) == VECTOR_TYPE)
11036 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
11037 tree if_true = constant_boolean_node (true, ops->type);
11038 tree if_false = constant_boolean_node (false, ops->type);
11039 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
11042 /* Get the rtx comparison code to use. We know that EXP is a comparison
11043 operation of some type. Some comparisons against 1 and -1 can be
11044 converted to comparisons with zero. Do so here so that the tests
11045 below will be aware that we have a comparison with zero. These
11046 tests will not catch constants in the first operand, but constants
11047 are rarely passed as the first operand. */
11049 switch (ops->code)
11051 case EQ_EXPR:
11052 code = EQ;
11053 break;
11054 case NE_EXPR:
11055 code = NE;
11056 break;
11057 case LT_EXPR:
11058 if (integer_onep (arg1))
11059 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11060 else
11061 code = unsignedp ? LTU : LT;
11062 break;
11063 case LE_EXPR:
11064 if (! unsignedp && integer_all_onesp (arg1))
11065 arg1 = integer_zero_node, code = LT;
11066 else
11067 code = unsignedp ? LEU : LE;
11068 break;
11069 case GT_EXPR:
11070 if (! unsignedp && integer_all_onesp (arg1))
11071 arg1 = integer_zero_node, code = GE;
11072 else
11073 code = unsignedp ? GTU : GT;
11074 break;
11075 case GE_EXPR:
11076 if (integer_onep (arg1))
11077 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11078 else
11079 code = unsignedp ? GEU : GE;
11080 break;
11082 case UNORDERED_EXPR:
11083 code = UNORDERED;
11084 break;
11085 case ORDERED_EXPR:
11086 code = ORDERED;
11087 break;
11088 case UNLT_EXPR:
11089 code = UNLT;
11090 break;
11091 case UNLE_EXPR:
11092 code = UNLE;
11093 break;
11094 case UNGT_EXPR:
11095 code = UNGT;
11096 break;
11097 case UNGE_EXPR:
11098 code = UNGE;
11099 break;
11100 case UNEQ_EXPR:
11101 code = UNEQ;
11102 break;
11103 case LTGT_EXPR:
11104 code = LTGT;
11105 break;
11107 default:
11108 gcc_unreachable ();
11111 /* Put a constant second. */
11112 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
11113 || TREE_CODE (arg0) == FIXED_CST)
11115 std::swap (arg0, arg1);
11116 code = swap_condition (code);
11119 /* If this is an equality or inequality test of a single bit, we can
11120 do this by shifting the bit being tested to the low-order bit and
11121 masking the result with the constant 1. If the condition was EQ,
11122 we xor it with 1. This does not require an scc insn and is faster
11123 than an scc insn even if we have it.
11125 The code to make this transformation was moved into fold_single_bit_test,
11126 so we just call into the folder and expand its result. */
11128 if ((code == NE || code == EQ)
11129 && integer_zerop (arg1)
11130 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
11132 gimple *srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
11133 if (srcstmt
11134 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
11136 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
11137 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
11138 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
11139 gimple_assign_rhs1 (srcstmt),
11140 gimple_assign_rhs2 (srcstmt));
11141 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
11142 if (temp)
11143 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
11147 if (! get_subtarget (target)
11148 || GET_MODE (subtarget) != operand_mode)
11149 subtarget = 0;
11151 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
11153 if (target == 0)
11154 target = gen_reg_rtx (mode);
11156 /* Try a cstore if possible. */
11157 return emit_store_flag_force (target, code, op0, op1,
11158 operand_mode, unsignedp,
11159 (TYPE_PRECISION (ops->type) == 1
11160 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
11163 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11164 0 otherwise (i.e. if there is no casesi instruction).
11166 DEFAULT_PROBABILITY is the probability of jumping to the default
11167 label. */
11169 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
11170 rtx table_label, rtx default_label, rtx fallback_label,
11171 int default_probability)
11173 struct expand_operand ops[5];
11174 machine_mode index_mode = SImode;
11175 rtx op1, op2, index;
11177 if (! targetm.have_casesi ())
11178 return 0;
11180 /* Convert the index to SImode. */
11181 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11183 machine_mode omode = TYPE_MODE (index_type);
11184 rtx rangertx = expand_normal (range);
11186 /* We must handle the endpoints in the original mode. */
11187 index_expr = build2 (MINUS_EXPR, index_type,
11188 index_expr, minval);
11189 minval = integer_zero_node;
11190 index = expand_normal (index_expr);
11191 if (default_label)
11192 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11193 omode, 1, default_label,
11194 default_probability);
11195 /* Now we can safely truncate. */
11196 index = convert_to_mode (index_mode, index, 0);
11198 else
11200 if (TYPE_MODE (index_type) != index_mode)
11202 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
11203 index_expr = fold_convert (index_type, index_expr);
11206 index = expand_normal (index_expr);
11209 do_pending_stack_adjust ();
11211 op1 = expand_normal (minval);
11212 op2 = expand_normal (range);
11214 create_input_operand (&ops[0], index, index_mode);
11215 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11216 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11217 create_fixed_operand (&ops[3], table_label);
11218 create_fixed_operand (&ops[4], (default_label
11219 ? default_label
11220 : fallback_label));
11221 expand_jump_insn (targetm.code_for_casesi, 5, ops);
11222 return 1;
11225 /* Attempt to generate a tablejump instruction; same concept. */
11226 /* Subroutine of the next function.
11228 INDEX is the value being switched on, with the lowest value
11229 in the table already subtracted.
11230 MODE is its expected mode (needed if INDEX is constant).
11231 RANGE is the length of the jump table.
11232 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11234 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11235 index value is out of range.
11236 DEFAULT_PROBABILITY is the probability of jumping to
11237 the default label. */
11239 static void
11240 do_tablejump (rtx index, machine_mode mode, rtx range, rtx table_label,
11241 rtx default_label, int default_probability)
11243 rtx temp, vector;
11245 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11246 cfun->cfg->max_jumptable_ents = INTVAL (range);
11248 /* Do an unsigned comparison (in the proper mode) between the index
11249 expression and the value which represents the length of the range.
11250 Since we just finished subtracting the lower bound of the range
11251 from the index expression, this comparison allows us to simultaneously
11252 check that the original index expression value is both greater than
11253 or equal to the minimum value of the range and less than or equal to
11254 the maximum value of the range. */
11256 if (default_label)
11257 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11258 default_label, default_probability);
11261 /* If index is in range, it must fit in Pmode.
11262 Convert to Pmode so we can index with it. */
11263 if (mode != Pmode)
11264 index = convert_to_mode (Pmode, index, 1);
11266 /* Don't let a MEM slip through, because then INDEX that comes
11267 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11268 and break_out_memory_refs will go to work on it and mess it up. */
11269 #ifdef PIC_CASE_VECTOR_ADDRESS
11270 if (flag_pic && !REG_P (index))
11271 index = copy_to_mode_reg (Pmode, index);
11272 #endif
11274 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11275 GET_MODE_SIZE, because this indicates how large insns are. The other
11276 uses should all be Pmode, because they are addresses. This code
11277 could fail if addresses and insns are not the same size. */
11278 index = simplify_gen_binary (MULT, Pmode, index,
11279 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
11280 Pmode));
11281 index = simplify_gen_binary (PLUS, Pmode, index,
11282 gen_rtx_LABEL_REF (Pmode, table_label));
11284 #ifdef PIC_CASE_VECTOR_ADDRESS
11285 if (flag_pic)
11286 index = PIC_CASE_VECTOR_ADDRESS (index);
11287 else
11288 #endif
11289 index = memory_address (CASE_VECTOR_MODE, index);
11290 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11291 vector = gen_const_mem (CASE_VECTOR_MODE, index);
11292 convert_move (temp, vector, 0);
11294 emit_jump_insn (targetm.gen_tablejump (temp, table_label));
11296 /* If we are generating PIC code or if the table is PC-relative, the
11297 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11298 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11299 emit_barrier ();
11303 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11304 rtx table_label, rtx default_label, int default_probability)
11306 rtx index;
11308 if (! targetm.have_tablejump ())
11309 return 0;
11311 index_expr = fold_build2 (MINUS_EXPR, index_type,
11312 fold_convert (index_type, index_expr),
11313 fold_convert (index_type, minval));
11314 index = expand_normal (index_expr);
11315 do_pending_stack_adjust ();
11317 do_tablejump (index, TYPE_MODE (index_type),
11318 convert_modes (TYPE_MODE (index_type),
11319 TYPE_MODE (TREE_TYPE (range)),
11320 expand_normal (range),
11321 TYPE_UNSIGNED (TREE_TYPE (range))),
11322 table_label, default_label, default_probability);
11323 return 1;
11326 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11327 static rtx
11328 const_vector_from_tree (tree exp)
11330 rtvec v;
11331 unsigned i;
11332 int units;
11333 tree elt;
11334 machine_mode inner, mode;
11336 mode = TYPE_MODE (TREE_TYPE (exp));
11338 if (initializer_zerop (exp))
11339 return CONST0_RTX (mode);
11341 units = GET_MODE_NUNITS (mode);
11342 inner = GET_MODE_INNER (mode);
11344 v = rtvec_alloc (units);
11346 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11348 elt = VECTOR_CST_ELT (exp, i);
11350 if (TREE_CODE (elt) == REAL_CST)
11351 RTVEC_ELT (v, i) = const_double_from_real_value (TREE_REAL_CST (elt),
11352 inner);
11353 else if (TREE_CODE (elt) == FIXED_CST)
11354 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11355 inner);
11356 else
11357 RTVEC_ELT (v, i) = immed_wide_int_const (elt, inner);
11360 return gen_rtx_CONST_VECTOR (mode, v);
11363 /* Build a decl for a personality function given a language prefix. */
11365 tree
11366 build_personality_function (const char *lang)
11368 const char *unwind_and_version;
11369 tree decl, type;
11370 char *name;
11372 switch (targetm_common.except_unwind_info (&global_options))
11374 case UI_NONE:
11375 return NULL;
11376 case UI_SJLJ:
11377 unwind_and_version = "_sj0";
11378 break;
11379 case UI_DWARF2:
11380 case UI_TARGET:
11381 unwind_and_version = "_v0";
11382 break;
11383 case UI_SEH:
11384 unwind_and_version = "_seh0";
11385 break;
11386 default:
11387 gcc_unreachable ();
11390 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11392 type = build_function_type_list (integer_type_node, integer_type_node,
11393 long_long_unsigned_type_node,
11394 ptr_type_node, ptr_type_node, NULL_TREE);
11395 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11396 get_identifier (name), type);
11397 DECL_ARTIFICIAL (decl) = 1;
11398 DECL_EXTERNAL (decl) = 1;
11399 TREE_PUBLIC (decl) = 1;
11401 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11402 are the flags assigned by targetm.encode_section_info. */
11403 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11405 return decl;
11408 /* Extracts the personality function of DECL and returns the corresponding
11409 libfunc. */
11412 get_personality_function (tree decl)
11414 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11415 enum eh_personality_kind pk;
11417 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11418 if (pk == eh_personality_none)
11419 return NULL;
11421 if (!personality
11422 && pk == eh_personality_any)
11423 personality = lang_hooks.eh_personality ();
11425 if (pk == eh_personality_lang)
11426 gcc_assert (personality != NULL_TREE);
11428 return XEXP (DECL_RTL (personality), 0);
11431 /* Returns a tree for the size of EXP in bytes. */
11433 static tree
11434 tree_expr_size (const_tree exp)
11436 if (DECL_P (exp)
11437 && DECL_SIZE_UNIT (exp) != 0)
11438 return DECL_SIZE_UNIT (exp);
11439 else
11440 return size_in_bytes (TREE_TYPE (exp));
11443 /* Return an rtx for the size in bytes of the value of EXP. */
11446 expr_size (tree exp)
11448 tree size;
11450 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11451 size = TREE_OPERAND (exp, 1);
11452 else
11454 size = tree_expr_size (exp);
11455 gcc_assert (size);
11456 gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
11459 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
11462 /* Return a wide integer for the size in bytes of the value of EXP, or -1
11463 if the size can vary or is larger than an integer. */
11465 static HOST_WIDE_INT
11466 int_expr_size (tree exp)
11468 tree size;
11470 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11471 size = TREE_OPERAND (exp, 1);
11472 else
11474 size = tree_expr_size (exp);
11475 gcc_assert (size);
11478 if (size == 0 || !tree_fits_shwi_p (size))
11479 return -1;
11481 return tree_to_shwi (size);
11484 #include "gt-expr.h"