PR target/68729
[official-gcc.git] / gcc / expr.c
blobbd43dc4f3fdcaed7683c3df28ce52dc364130cd0
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "predict.h"
29 #include "tm_p.h"
30 #include "ssa.h"
31 #include "expmed.h"
32 #include "optabs.h"
33 #include "regs.h"
34 #include "emit-rtl.h"
35 #include "recog.h"
36 #include "cgraph.h"
37 #include "diagnostic.h"
38 #include "alias.h"
39 #include "fold-const.h"
40 #include "stor-layout.h"
41 #include "attribs.h"
42 #include "varasm.h"
43 #include "except.h"
44 #include "insn-attr.h"
45 #include "dojump.h"
46 #include "explow.h"
47 #include "calls.h"
48 #include "stmt.h"
49 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
50 #include "expr.h"
51 #include "optabs-tree.h"
52 #include "libfuncs.h"
53 #include "reload.h"
54 #include "langhooks.h"
55 #include "common/common-target.h"
56 #include "tree-ssa-live.h"
57 #include "tree-outof-ssa.h"
58 #include "tree-ssa-address.h"
59 #include "builtins.h"
60 #include "tree-chkp.h"
61 #include "rtl-chkp.h"
62 #include "ccmp.h"
65 /* If this is nonzero, we do not bother generating VOLATILE
66 around volatile memory references, and we are willing to
67 output indirect addresses. If cse is to follow, we reject
68 indirect addresses so a useful potential cse is generated;
69 if it is used only once, instruction combination will produce
70 the same indirect address eventually. */
71 int cse_not_expected;
73 /* This structure is used by move_by_pieces to describe the move to
74 be performed. */
75 struct move_by_pieces_d
77 rtx to;
78 rtx to_addr;
79 int autinc_to;
80 int explicit_inc_to;
81 rtx from;
82 rtx from_addr;
83 int autinc_from;
84 int explicit_inc_from;
85 unsigned HOST_WIDE_INT len;
86 HOST_WIDE_INT offset;
87 int reverse;
90 /* This structure is used by store_by_pieces to describe the clear to
91 be performed. */
93 struct store_by_pieces_d
95 rtx to;
96 rtx to_addr;
97 int autinc_to;
98 int explicit_inc_to;
99 unsigned HOST_WIDE_INT len;
100 HOST_WIDE_INT offset;
101 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode);
102 void *constfundata;
103 int reverse;
106 static void move_by_pieces_1 (insn_gen_fn, machine_mode,
107 struct move_by_pieces_d *);
108 static bool block_move_libcall_safe_for_call_parm (void);
109 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
110 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
111 unsigned HOST_WIDE_INT);
112 static tree emit_block_move_libcall_fn (int);
113 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
114 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, machine_mode);
115 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
116 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
117 static void store_by_pieces_2 (insn_gen_fn, machine_mode,
118 struct store_by_pieces_d *);
119 static tree clear_storage_libcall_fn (int);
120 static rtx_insn *compress_float_constant (rtx, rtx);
121 static rtx get_subtarget (rtx);
122 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
123 HOST_WIDE_INT, machine_mode,
124 tree, int, alias_set_type, bool);
125 static void store_constructor (tree, rtx, int, HOST_WIDE_INT, bool);
126 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
127 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
128 machine_mode, tree, alias_set_type, bool, bool);
130 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
132 static int is_aligning_offset (const_tree, const_tree);
133 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
134 static rtx do_store_flag (sepops, rtx, machine_mode);
135 #ifdef PUSH_ROUNDING
136 static void emit_single_push_insn (machine_mode, rtx, tree);
137 #endif
138 static void do_tablejump (rtx, machine_mode, rtx, rtx, rtx, int);
139 static rtx const_vector_from_tree (tree);
140 static tree tree_expr_size (const_tree);
141 static HOST_WIDE_INT int_expr_size (tree);
144 /* This is run to set up which modes can be used
145 directly in memory and to initialize the block move optab. It is run
146 at the beginning of compilation and when the target is reinitialized. */
148 void
149 init_expr_target (void)
151 rtx insn, pat;
152 machine_mode mode;
153 int num_clobbers;
154 rtx mem, mem1;
155 rtx reg;
157 /* Try indexing by frame ptr and try by stack ptr.
158 It is known that on the Convex the stack ptr isn't a valid index.
159 With luck, one or the other is valid on any machine. */
160 mem = gen_rtx_MEM (word_mode, stack_pointer_rtx);
161 mem1 = gen_rtx_MEM (word_mode, frame_pointer_rtx);
163 /* A scratch register we can modify in-place below to avoid
164 useless RTL allocations. */
165 reg = gen_rtx_REG (word_mode, LAST_VIRTUAL_REGISTER + 1);
167 insn = rtx_alloc (INSN);
168 pat = gen_rtx_SET (NULL_RTX, NULL_RTX);
169 PATTERN (insn) = pat;
171 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
172 mode = (machine_mode) ((int) mode + 1))
174 int regno;
176 direct_load[(int) mode] = direct_store[(int) mode] = 0;
177 PUT_MODE (mem, mode);
178 PUT_MODE (mem1, mode);
180 /* See if there is some register that can be used in this mode and
181 directly loaded or stored from memory. */
183 if (mode != VOIDmode && mode != BLKmode)
184 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
185 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
186 regno++)
188 if (! HARD_REGNO_MODE_OK (regno, mode))
189 continue;
191 set_mode_and_regno (reg, mode, regno);
193 SET_SRC (pat) = mem;
194 SET_DEST (pat) = reg;
195 if (recog (pat, insn, &num_clobbers) >= 0)
196 direct_load[(int) mode] = 1;
198 SET_SRC (pat) = mem1;
199 SET_DEST (pat) = reg;
200 if (recog (pat, insn, &num_clobbers) >= 0)
201 direct_load[(int) mode] = 1;
203 SET_SRC (pat) = reg;
204 SET_DEST (pat) = mem;
205 if (recog (pat, insn, &num_clobbers) >= 0)
206 direct_store[(int) mode] = 1;
208 SET_SRC (pat) = reg;
209 SET_DEST (pat) = mem1;
210 if (recog (pat, insn, &num_clobbers) >= 0)
211 direct_store[(int) mode] = 1;
215 mem = gen_rtx_MEM (VOIDmode, gen_raw_REG (Pmode, LAST_VIRTUAL_REGISTER + 1));
217 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
218 mode = GET_MODE_WIDER_MODE (mode))
220 machine_mode srcmode;
221 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
222 srcmode = GET_MODE_WIDER_MODE (srcmode))
224 enum insn_code ic;
226 ic = can_extend_p (mode, srcmode, 0);
227 if (ic == CODE_FOR_nothing)
228 continue;
230 PUT_MODE (mem, srcmode);
232 if (insn_operand_matches (ic, 1, mem))
233 float_extend_from_mem[mode][srcmode] = true;
238 /* This is run at the start of compiling a function. */
240 void
241 init_expr (void)
243 memset (&crtl->expr, 0, sizeof (crtl->expr));
246 /* Copy data from FROM to TO, where the machine modes are not the same.
247 Both modes may be integer, or both may be floating, or both may be
248 fixed-point.
249 UNSIGNEDP should be nonzero if FROM is an unsigned type.
250 This causes zero-extension instead of sign-extension. */
252 void
253 convert_move (rtx to, rtx from, int unsignedp)
255 machine_mode to_mode = GET_MODE (to);
256 machine_mode from_mode = GET_MODE (from);
257 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
258 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
259 enum insn_code code;
260 rtx libcall;
262 /* rtx code for making an equivalent value. */
263 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
264 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
267 gcc_assert (to_real == from_real);
268 gcc_assert (to_mode != BLKmode);
269 gcc_assert (from_mode != BLKmode);
271 /* If the source and destination are already the same, then there's
272 nothing to do. */
273 if (to == from)
274 return;
276 /* If FROM is a SUBREG that indicates that we have already done at least
277 the required extension, strip it. We don't handle such SUBREGs as
278 TO here. */
280 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
281 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
282 >= GET_MODE_PRECISION (to_mode))
283 && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp))
284 from = gen_lowpart (to_mode, from), from_mode = to_mode;
286 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
288 if (to_mode == from_mode
289 || (from_mode == VOIDmode && CONSTANT_P (from)))
291 emit_move_insn (to, from);
292 return;
295 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
297 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
299 if (VECTOR_MODE_P (to_mode))
300 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
301 else
302 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
304 emit_move_insn (to, from);
305 return;
308 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
310 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
311 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
312 return;
315 if (to_real)
317 rtx value;
318 rtx_insn *insns;
319 convert_optab tab;
321 gcc_assert ((GET_MODE_PRECISION (from_mode)
322 != GET_MODE_PRECISION (to_mode))
323 || (DECIMAL_FLOAT_MODE_P (from_mode)
324 != DECIMAL_FLOAT_MODE_P (to_mode)));
326 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
327 /* Conversion between decimal float and binary float, same size. */
328 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
329 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
330 tab = sext_optab;
331 else
332 tab = trunc_optab;
334 /* Try converting directly if the insn is supported. */
336 code = convert_optab_handler (tab, to_mode, from_mode);
337 if (code != CODE_FOR_nothing)
339 emit_unop_insn (code, to, from,
340 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
341 return;
344 /* Otherwise use a libcall. */
345 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
347 /* Is this conversion implemented yet? */
348 gcc_assert (libcall);
350 start_sequence ();
351 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
352 1, from, from_mode);
353 insns = get_insns ();
354 end_sequence ();
355 emit_libcall_block (insns, to, value,
356 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
357 from)
358 : gen_rtx_FLOAT_EXTEND (to_mode, from));
359 return;
362 /* Handle pointer conversion. */ /* SPEE 900220. */
363 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
365 convert_optab ctab;
367 if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
368 ctab = trunc_optab;
369 else if (unsignedp)
370 ctab = zext_optab;
371 else
372 ctab = sext_optab;
374 if (convert_optab_handler (ctab, to_mode, from_mode)
375 != CODE_FOR_nothing)
377 emit_unop_insn (convert_optab_handler (ctab, to_mode, from_mode),
378 to, from, UNKNOWN);
379 return;
383 /* Targets are expected to provide conversion insns between PxImode and
384 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
385 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
387 machine_mode full_mode
388 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
390 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
391 != CODE_FOR_nothing);
393 if (full_mode != from_mode)
394 from = convert_to_mode (full_mode, from, unsignedp);
395 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
396 to, from, UNKNOWN);
397 return;
399 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
401 rtx new_from;
402 machine_mode full_mode
403 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
404 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
405 enum insn_code icode;
407 icode = convert_optab_handler (ctab, full_mode, from_mode);
408 gcc_assert (icode != CODE_FOR_nothing);
410 if (to_mode == full_mode)
412 emit_unop_insn (icode, to, from, UNKNOWN);
413 return;
416 new_from = gen_reg_rtx (full_mode);
417 emit_unop_insn (icode, new_from, from, UNKNOWN);
419 /* else proceed to integer conversions below. */
420 from_mode = full_mode;
421 from = new_from;
424 /* Make sure both are fixed-point modes or both are not. */
425 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
426 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
427 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
429 /* If we widen from_mode to to_mode and they are in the same class,
430 we won't saturate the result.
431 Otherwise, always saturate the result to play safe. */
432 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
433 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
434 expand_fixed_convert (to, from, 0, 0);
435 else
436 expand_fixed_convert (to, from, 0, 1);
437 return;
440 /* Now both modes are integers. */
442 /* Handle expanding beyond a word. */
443 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
444 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
446 rtx_insn *insns;
447 rtx lowpart;
448 rtx fill_value;
449 rtx lowfrom;
450 int i;
451 machine_mode lowpart_mode;
452 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
454 /* Try converting directly if the insn is supported. */
455 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
456 != CODE_FOR_nothing)
458 /* If FROM is a SUBREG, put it into a register. Do this
459 so that we always generate the same set of insns for
460 better cse'ing; if an intermediate assignment occurred,
461 we won't be doing the operation directly on the SUBREG. */
462 if (optimize > 0 && GET_CODE (from) == SUBREG)
463 from = force_reg (from_mode, from);
464 emit_unop_insn (code, to, from, equiv_code);
465 return;
467 /* Next, try converting via full word. */
468 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
469 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
470 != CODE_FOR_nothing))
472 rtx word_to = gen_reg_rtx (word_mode);
473 if (REG_P (to))
475 if (reg_overlap_mentioned_p (to, from))
476 from = force_reg (from_mode, from);
477 emit_clobber (to);
479 convert_move (word_to, from, unsignedp);
480 emit_unop_insn (code, to, word_to, equiv_code);
481 return;
484 /* No special multiword conversion insn; do it by hand. */
485 start_sequence ();
487 /* Since we will turn this into a no conflict block, we must ensure the
488 the source does not overlap the target so force it into an isolated
489 register when maybe so. Likewise for any MEM input, since the
490 conversion sequence might require several references to it and we
491 must ensure we're getting the same value every time. */
493 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
494 from = force_reg (from_mode, from);
496 /* Get a copy of FROM widened to a word, if necessary. */
497 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
498 lowpart_mode = word_mode;
499 else
500 lowpart_mode = from_mode;
502 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
504 lowpart = gen_lowpart (lowpart_mode, to);
505 emit_move_insn (lowpart, lowfrom);
507 /* Compute the value to put in each remaining word. */
508 if (unsignedp)
509 fill_value = const0_rtx;
510 else
511 fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
512 LT, lowfrom, const0_rtx,
513 lowpart_mode, 0, -1);
515 /* Fill the remaining words. */
516 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
518 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
519 rtx subword = operand_subword (to, index, 1, to_mode);
521 gcc_assert (subword);
523 if (fill_value != subword)
524 emit_move_insn (subword, fill_value);
527 insns = get_insns ();
528 end_sequence ();
530 emit_insn (insns);
531 return;
534 /* Truncating multi-word to a word or less. */
535 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
536 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
538 if (!((MEM_P (from)
539 && ! MEM_VOLATILE_P (from)
540 && direct_load[(int) to_mode]
541 && ! mode_dependent_address_p (XEXP (from, 0),
542 MEM_ADDR_SPACE (from)))
543 || REG_P (from)
544 || GET_CODE (from) == SUBREG))
545 from = force_reg (from_mode, from);
546 convert_move (to, gen_lowpart (word_mode, from), 0);
547 return;
550 /* Now follow all the conversions between integers
551 no more than a word long. */
553 /* For truncation, usually we can just refer to FROM in a narrower mode. */
554 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
555 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
557 if (!((MEM_P (from)
558 && ! MEM_VOLATILE_P (from)
559 && direct_load[(int) to_mode]
560 && ! mode_dependent_address_p (XEXP (from, 0),
561 MEM_ADDR_SPACE (from)))
562 || REG_P (from)
563 || GET_CODE (from) == SUBREG))
564 from = force_reg (from_mode, from);
565 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
566 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
567 from = copy_to_reg (from);
568 emit_move_insn (to, gen_lowpart (to_mode, from));
569 return;
572 /* Handle extension. */
573 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
575 /* Convert directly if that works. */
576 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
577 != CODE_FOR_nothing)
579 emit_unop_insn (code, to, from, equiv_code);
580 return;
582 else
584 machine_mode intermediate;
585 rtx tmp;
586 int shift_amount;
588 /* Search for a mode to convert via. */
589 for (intermediate = from_mode; intermediate != VOIDmode;
590 intermediate = GET_MODE_WIDER_MODE (intermediate))
591 if (((can_extend_p (to_mode, intermediate, unsignedp)
592 != CODE_FOR_nothing)
593 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
594 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
595 && (can_extend_p (intermediate, from_mode, unsignedp)
596 != CODE_FOR_nothing))
598 convert_move (to, convert_to_mode (intermediate, from,
599 unsignedp), unsignedp);
600 return;
603 /* No suitable intermediate mode.
604 Generate what we need with shifts. */
605 shift_amount = (GET_MODE_PRECISION (to_mode)
606 - GET_MODE_PRECISION (from_mode));
607 from = gen_lowpart (to_mode, force_reg (from_mode, from));
608 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
609 to, unsignedp);
610 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
611 to, unsignedp);
612 if (tmp != to)
613 emit_move_insn (to, tmp);
614 return;
618 /* Support special truncate insns for certain modes. */
619 if (convert_optab_handler (trunc_optab, to_mode,
620 from_mode) != CODE_FOR_nothing)
622 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
623 to, from, UNKNOWN);
624 return;
627 /* Handle truncation of volatile memrefs, and so on;
628 the things that couldn't be truncated directly,
629 and for which there was no special instruction.
631 ??? Code above formerly short-circuited this, for most integer
632 mode pairs, with a force_reg in from_mode followed by a recursive
633 call to this routine. Appears always to have been wrong. */
634 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
636 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
637 emit_move_insn (to, temp);
638 return;
641 /* Mode combination is not recognized. */
642 gcc_unreachable ();
645 /* Return an rtx for a value that would result
646 from converting X to mode MODE.
647 Both X and MODE may be floating, or both integer.
648 UNSIGNEDP is nonzero if X is an unsigned value.
649 This can be done by referring to a part of X in place
650 or by copying to a new temporary with conversion. */
653 convert_to_mode (machine_mode mode, rtx x, int unsignedp)
655 return convert_modes (mode, VOIDmode, x, unsignedp);
658 /* Return an rtx for a value that would result
659 from converting X from mode OLDMODE to mode MODE.
660 Both modes may be floating, or both integer.
661 UNSIGNEDP is nonzero if X is an unsigned value.
663 This can be done by referring to a part of X in place
664 or by copying to a new temporary with conversion.
666 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
669 convert_modes (machine_mode mode, machine_mode oldmode, rtx x, int unsignedp)
671 rtx temp;
673 /* If FROM is a SUBREG that indicates that we have already done at least
674 the required extension, strip it. */
676 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
677 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
678 && SUBREG_CHECK_PROMOTED_SIGN (x, unsignedp))
679 x = gen_lowpart (mode, SUBREG_REG (x));
681 if (GET_MODE (x) != VOIDmode)
682 oldmode = GET_MODE (x);
684 if (mode == oldmode)
685 return x;
687 if (CONST_SCALAR_INT_P (x) && GET_MODE_CLASS (mode) == MODE_INT)
689 /* If the caller did not tell us the old mode, then there is not
690 much to do with respect to canonicalization. We have to
691 assume that all the bits are significant. */
692 if (GET_MODE_CLASS (oldmode) != MODE_INT)
693 oldmode = MAX_MODE_INT;
694 wide_int w = wide_int::from (std::make_pair (x, oldmode),
695 GET_MODE_PRECISION (mode),
696 unsignedp ? UNSIGNED : SIGNED);
697 return immed_wide_int_const (w, mode);
700 /* We can do this with a gen_lowpart if both desired and current modes
701 are integer, and this is either a constant integer, a register, or a
702 non-volatile MEM. */
703 if (GET_MODE_CLASS (mode) == MODE_INT
704 && GET_MODE_CLASS (oldmode) == MODE_INT
705 && GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
706 && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) mode])
707 || (REG_P (x)
708 && (!HARD_REGISTER_P (x)
709 || HARD_REGNO_MODE_OK (REGNO (x), mode))
710 && TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (x)))))
712 return gen_lowpart (mode, x);
714 /* Converting from integer constant into mode is always equivalent to an
715 subreg operation. */
716 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
718 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
719 return simplify_gen_subreg (mode, x, oldmode, 0);
722 temp = gen_reg_rtx (mode);
723 convert_move (temp, x, unsignedp);
724 return temp;
727 /* Return the largest alignment we can use for doing a move (or store)
728 of MAX_PIECES. ALIGN is the largest alignment we could use. */
730 static unsigned int
731 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
733 machine_mode tmode;
735 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
736 if (align >= GET_MODE_ALIGNMENT (tmode))
737 align = GET_MODE_ALIGNMENT (tmode);
738 else
740 machine_mode tmode, xmode;
742 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
743 tmode != VOIDmode;
744 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
745 if (GET_MODE_SIZE (tmode) > max_pieces
746 || SLOW_UNALIGNED_ACCESS (tmode, align))
747 break;
749 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
752 return align;
755 /* Return the widest integer mode no wider than SIZE. If no such mode
756 can be found, return VOIDmode. */
758 static machine_mode
759 widest_int_mode_for_size (unsigned int size)
761 machine_mode tmode, mode = VOIDmode;
763 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
764 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
765 if (GET_MODE_SIZE (tmode) < size)
766 mode = tmode;
768 return mode;
771 /* Determine whether the LEN bytes can be moved by using several move
772 instructions. Return nonzero if a call to move_by_pieces should
773 succeed. */
776 can_move_by_pieces (unsigned HOST_WIDE_INT len,
777 unsigned int align)
779 return targetm.use_by_pieces_infrastructure_p (len, align, MOVE_BY_PIECES,
780 optimize_insn_for_speed_p ());
783 /* Generate several move instructions to copy LEN bytes from block FROM to
784 block TO. (These are MEM rtx's with BLKmode).
786 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
787 used to push FROM to the stack.
789 ALIGN is maximum stack alignment we can assume.
791 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
792 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
793 stpcpy. */
796 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
797 unsigned int align, int endp)
799 struct move_by_pieces_d data;
800 machine_mode to_addr_mode;
801 machine_mode from_addr_mode = get_address_mode (from);
802 rtx to_addr, from_addr = XEXP (from, 0);
803 unsigned int max_size = MOVE_MAX_PIECES + 1;
804 enum insn_code icode;
806 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
808 data.offset = 0;
809 data.from_addr = from_addr;
810 if (to)
812 to_addr_mode = get_address_mode (to);
813 to_addr = XEXP (to, 0);
814 data.to = to;
815 data.autinc_to
816 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
817 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
818 data.reverse
819 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
821 else
823 to_addr_mode = VOIDmode;
824 to_addr = NULL_RTX;
825 data.to = NULL_RTX;
826 data.autinc_to = 1;
827 if (STACK_GROWS_DOWNWARD)
828 data.reverse = 1;
829 else
830 data.reverse = 0;
832 data.to_addr = to_addr;
833 data.from = from;
834 data.autinc_from
835 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
836 || GET_CODE (from_addr) == POST_INC
837 || GET_CODE (from_addr) == POST_DEC);
839 data.explicit_inc_from = 0;
840 data.explicit_inc_to = 0;
841 if (data.reverse) data.offset = len;
842 data.len = len;
844 /* If copying requires more than two move insns,
845 copy addresses to registers (to make displacements shorter)
846 and use post-increment if available. */
847 if (!(data.autinc_from && data.autinc_to)
848 && move_by_pieces_ninsns (len, align, max_size) > 2)
850 /* Find the mode of the largest move...
851 MODE might not be used depending on the definitions of the
852 USE_* macros below. */
853 machine_mode mode ATTRIBUTE_UNUSED
854 = widest_int_mode_for_size (max_size);
856 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
858 data.from_addr = copy_to_mode_reg (from_addr_mode,
859 plus_constant (from_addr_mode,
860 from_addr, len));
861 data.autinc_from = 1;
862 data.explicit_inc_from = -1;
864 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
866 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
867 data.autinc_from = 1;
868 data.explicit_inc_from = 1;
870 if (!data.autinc_from && CONSTANT_P (from_addr))
871 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
872 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
874 data.to_addr = copy_to_mode_reg (to_addr_mode,
875 plus_constant (to_addr_mode,
876 to_addr, len));
877 data.autinc_to = 1;
878 data.explicit_inc_to = -1;
880 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
882 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
883 data.autinc_to = 1;
884 data.explicit_inc_to = 1;
886 if (!data.autinc_to && CONSTANT_P (to_addr))
887 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
890 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
892 /* First move what we can in the largest integer mode, then go to
893 successively smaller modes. */
895 while (max_size > 1 && data.len > 0)
897 machine_mode mode = widest_int_mode_for_size (max_size);
899 if (mode == VOIDmode)
900 break;
902 icode = optab_handler (mov_optab, mode);
903 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
904 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
906 max_size = GET_MODE_SIZE (mode);
909 /* The code above should have handled everything. */
910 gcc_assert (!data.len);
912 if (endp)
914 rtx to1;
916 gcc_assert (!data.reverse);
917 if (data.autinc_to)
919 if (endp == 2)
921 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
922 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
923 else
924 data.to_addr = copy_to_mode_reg (to_addr_mode,
925 plus_constant (to_addr_mode,
926 data.to_addr,
927 -1));
929 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
930 data.offset);
932 else
934 if (endp == 2)
935 --data.offset;
936 to1 = adjust_address (data.to, QImode, data.offset);
938 return to1;
940 else
941 return data.to;
944 /* Return number of insns required to move L bytes by pieces.
945 ALIGN (in bits) is maximum alignment we can assume. */
947 unsigned HOST_WIDE_INT
948 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
949 unsigned int max_size)
951 unsigned HOST_WIDE_INT n_insns = 0;
953 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
955 while (max_size > 1 && l > 0)
957 machine_mode mode;
958 enum insn_code icode;
960 mode = widest_int_mode_for_size (max_size);
962 if (mode == VOIDmode)
963 break;
965 icode = optab_handler (mov_optab, mode);
966 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
967 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
969 max_size = GET_MODE_SIZE (mode);
972 gcc_assert (!l);
973 return n_insns;
976 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
977 with move instructions for mode MODE. GENFUN is the gen_... function
978 to make a move insn for that mode. DATA has all the other info. */
980 static void
981 move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
982 struct move_by_pieces_d *data)
984 unsigned int size = GET_MODE_SIZE (mode);
985 rtx to1 = NULL_RTX, from1;
987 while (data->len >= size)
989 if (data->reverse)
990 data->offset -= size;
992 if (data->to)
994 if (data->autinc_to)
995 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
996 data->offset);
997 else
998 to1 = adjust_address (data->to, mode, data->offset);
1001 if (data->autinc_from)
1002 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1003 data->offset);
1004 else
1005 from1 = adjust_address (data->from, mode, data->offset);
1007 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1008 emit_insn (gen_add2_insn (data->to_addr,
1009 gen_int_mode (-(HOST_WIDE_INT) size,
1010 GET_MODE (data->to_addr))));
1011 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1012 emit_insn (gen_add2_insn (data->from_addr,
1013 gen_int_mode (-(HOST_WIDE_INT) size,
1014 GET_MODE (data->from_addr))));
1016 if (data->to)
1017 emit_insn ((*genfun) (to1, from1));
1018 else
1020 #ifdef PUSH_ROUNDING
1021 emit_single_push_insn (mode, from1, NULL);
1022 #else
1023 gcc_unreachable ();
1024 #endif
1027 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1028 emit_insn (gen_add2_insn (data->to_addr,
1029 gen_int_mode (size,
1030 GET_MODE (data->to_addr))));
1031 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1032 emit_insn (gen_add2_insn (data->from_addr,
1033 gen_int_mode (size,
1034 GET_MODE (data->from_addr))));
1036 if (! data->reverse)
1037 data->offset += size;
1039 data->len -= size;
1043 /* Emit code to move a block Y to a block X. This may be done with
1044 string-move instructions, with multiple scalar move instructions,
1045 or with a library call.
1047 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1048 SIZE is an rtx that says how long they are.
1049 ALIGN is the maximum alignment we can assume they have.
1050 METHOD describes what kind of copy this is, and what mechanisms may be used.
1051 MIN_SIZE is the minimal size of block to move
1052 MAX_SIZE is the maximal size of block to move, if it can not be represented
1053 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1055 Return the address of the new block, if memcpy is called and returns it,
1056 0 otherwise. */
1059 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1060 unsigned int expected_align, HOST_WIDE_INT expected_size,
1061 unsigned HOST_WIDE_INT min_size,
1062 unsigned HOST_WIDE_INT max_size,
1063 unsigned HOST_WIDE_INT probable_max_size)
1065 bool may_use_call;
1066 rtx retval = 0;
1067 unsigned int align;
1069 gcc_assert (size);
1070 if (CONST_INT_P (size)
1071 && INTVAL (size) == 0)
1072 return 0;
1074 switch (method)
1076 case BLOCK_OP_NORMAL:
1077 case BLOCK_OP_TAILCALL:
1078 may_use_call = true;
1079 break;
1081 case BLOCK_OP_CALL_PARM:
1082 may_use_call = block_move_libcall_safe_for_call_parm ();
1084 /* Make inhibit_defer_pop nonzero around the library call
1085 to force it to pop the arguments right away. */
1086 NO_DEFER_POP;
1087 break;
1089 case BLOCK_OP_NO_LIBCALL:
1090 may_use_call = false;
1091 break;
1093 default:
1094 gcc_unreachable ();
1097 gcc_assert (MEM_P (x) && MEM_P (y));
1098 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1099 gcc_assert (align >= BITS_PER_UNIT);
1101 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1102 block copy is more efficient for other large modes, e.g. DCmode. */
1103 x = adjust_address (x, BLKmode, 0);
1104 y = adjust_address (y, BLKmode, 0);
1106 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1107 can be incorrect is coming from __builtin_memcpy. */
1108 if (CONST_INT_P (size))
1110 x = shallow_copy_rtx (x);
1111 y = shallow_copy_rtx (y);
1112 set_mem_size (x, INTVAL (size));
1113 set_mem_size (y, INTVAL (size));
1116 if (CONST_INT_P (size) && can_move_by_pieces (INTVAL (size), align))
1117 move_by_pieces (x, y, INTVAL (size), align, 0);
1118 else if (emit_block_move_via_movmem (x, y, size, align,
1119 expected_align, expected_size,
1120 min_size, max_size, probable_max_size))
1122 else if (may_use_call
1123 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1124 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1126 /* Since x and y are passed to a libcall, mark the corresponding
1127 tree EXPR as addressable. */
1128 tree y_expr = MEM_EXPR (y);
1129 tree x_expr = MEM_EXPR (x);
1130 if (y_expr)
1131 mark_addressable (y_expr);
1132 if (x_expr)
1133 mark_addressable (x_expr);
1134 retval = emit_block_move_via_libcall (x, y, size,
1135 method == BLOCK_OP_TAILCALL);
1138 else
1139 emit_block_move_via_loop (x, y, size, align);
1141 if (method == BLOCK_OP_CALL_PARM)
1142 OK_DEFER_POP;
1144 return retval;
1148 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1150 unsigned HOST_WIDE_INT max, min = 0;
1151 if (GET_CODE (size) == CONST_INT)
1152 min = max = UINTVAL (size);
1153 else
1154 max = GET_MODE_MASK (GET_MODE (size));
1155 return emit_block_move_hints (x, y, size, method, 0, -1,
1156 min, max, max);
1159 /* A subroutine of emit_block_move. Returns true if calling the
1160 block move libcall will not clobber any parameters which may have
1161 already been placed on the stack. */
1163 static bool
1164 block_move_libcall_safe_for_call_parm (void)
1166 #if defined (REG_PARM_STACK_SPACE)
1167 tree fn;
1168 #endif
1170 /* If arguments are pushed on the stack, then they're safe. */
1171 if (PUSH_ARGS)
1172 return true;
1174 /* If registers go on the stack anyway, any argument is sure to clobber
1175 an outgoing argument. */
1176 #if defined (REG_PARM_STACK_SPACE)
1177 fn = emit_block_move_libcall_fn (false);
1178 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1179 depend on its argument. */
1180 (void) fn;
1181 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1182 && REG_PARM_STACK_SPACE (fn) != 0)
1183 return false;
1184 #endif
1186 /* If any argument goes in memory, then it might clobber an outgoing
1187 argument. */
1189 CUMULATIVE_ARGS args_so_far_v;
1190 cumulative_args_t args_so_far;
1191 tree fn, arg;
1193 fn = emit_block_move_libcall_fn (false);
1194 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1195 args_so_far = pack_cumulative_args (&args_so_far_v);
1197 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1198 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1200 machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1201 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1202 NULL_TREE, true);
1203 if (!tmp || !REG_P (tmp))
1204 return false;
1205 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1206 return false;
1207 targetm.calls.function_arg_advance (args_so_far, mode,
1208 NULL_TREE, true);
1211 return true;
1214 /* A subroutine of emit_block_move. Expand a movmem pattern;
1215 return true if successful. */
1217 static bool
1218 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1219 unsigned int expected_align, HOST_WIDE_INT expected_size,
1220 unsigned HOST_WIDE_INT min_size,
1221 unsigned HOST_WIDE_INT max_size,
1222 unsigned HOST_WIDE_INT probable_max_size)
1224 int save_volatile_ok = volatile_ok;
1225 machine_mode mode;
1227 if (expected_align < align)
1228 expected_align = align;
1229 if (expected_size != -1)
1231 if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1232 expected_size = probable_max_size;
1233 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1234 expected_size = min_size;
1237 /* Since this is a move insn, we don't care about volatility. */
1238 volatile_ok = 1;
1240 /* Try the most limited insn first, because there's no point
1241 including more than one in the machine description unless
1242 the more limited one has some advantage. */
1244 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1245 mode = GET_MODE_WIDER_MODE (mode))
1247 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1249 if (code != CODE_FOR_nothing
1250 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1251 here because if SIZE is less than the mode mask, as it is
1252 returned by the macro, it will definitely be less than the
1253 actual mode mask. Since SIZE is within the Pmode address
1254 space, we limit MODE to Pmode. */
1255 && ((CONST_INT_P (size)
1256 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1257 <= (GET_MODE_MASK (mode) >> 1)))
1258 || max_size <= (GET_MODE_MASK (mode) >> 1)
1259 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1261 struct expand_operand ops[9];
1262 unsigned int nops;
1264 /* ??? When called via emit_block_move_for_call, it'd be
1265 nice if there were some way to inform the backend, so
1266 that it doesn't fail the expansion because it thinks
1267 emitting the libcall would be more efficient. */
1268 nops = insn_data[(int) code].n_generator_args;
1269 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1271 create_fixed_operand (&ops[0], x);
1272 create_fixed_operand (&ops[1], y);
1273 /* The check above guarantees that this size conversion is valid. */
1274 create_convert_operand_to (&ops[2], size, mode, true);
1275 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1276 if (nops >= 6)
1278 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1279 create_integer_operand (&ops[5], expected_size);
1281 if (nops >= 8)
1283 create_integer_operand (&ops[6], min_size);
1284 /* If we can not represent the maximal size,
1285 make parameter NULL. */
1286 if ((HOST_WIDE_INT) max_size != -1)
1287 create_integer_operand (&ops[7], max_size);
1288 else
1289 create_fixed_operand (&ops[7], NULL);
1291 if (nops == 9)
1293 /* If we can not represent the maximal size,
1294 make parameter NULL. */
1295 if ((HOST_WIDE_INT) probable_max_size != -1)
1296 create_integer_operand (&ops[8], probable_max_size);
1297 else
1298 create_fixed_operand (&ops[8], NULL);
1300 if (maybe_expand_insn (code, nops, ops))
1302 volatile_ok = save_volatile_ok;
1303 return true;
1308 volatile_ok = save_volatile_ok;
1309 return false;
1312 /* A subroutine of emit_block_move. Expand a call to memcpy.
1313 Return the return value from memcpy, 0 otherwise. */
1316 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1318 rtx dst_addr, src_addr;
1319 tree call_expr, fn, src_tree, dst_tree, size_tree;
1320 machine_mode size_mode;
1321 rtx retval;
1323 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1324 pseudos. We can then place those new pseudos into a VAR_DECL and
1325 use them later. */
1327 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1328 src_addr = copy_addr_to_reg (XEXP (src, 0));
1330 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1331 src_addr = convert_memory_address (ptr_mode, src_addr);
1333 dst_tree = make_tree (ptr_type_node, dst_addr);
1334 src_tree = make_tree (ptr_type_node, src_addr);
1336 size_mode = TYPE_MODE (sizetype);
1338 size = convert_to_mode (size_mode, size, 1);
1339 size = copy_to_mode_reg (size_mode, size);
1341 /* It is incorrect to use the libcall calling conventions to call
1342 memcpy in this context. This could be a user call to memcpy and
1343 the user may wish to examine the return value from memcpy. For
1344 targets where libcalls and normal calls have different conventions
1345 for returning pointers, we could end up generating incorrect code. */
1347 size_tree = make_tree (sizetype, size);
1349 fn = emit_block_move_libcall_fn (true);
1350 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1351 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1353 retval = expand_normal (call_expr);
1355 return retval;
1358 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1359 for the function we use for block copies. */
1361 static GTY(()) tree block_move_fn;
1363 void
1364 init_block_move_fn (const char *asmspec)
1366 if (!block_move_fn)
1368 tree args, fn, attrs, attr_args;
1370 fn = get_identifier ("memcpy");
1371 args = build_function_type_list (ptr_type_node, ptr_type_node,
1372 const_ptr_type_node, sizetype,
1373 NULL_TREE);
1375 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1376 DECL_EXTERNAL (fn) = 1;
1377 TREE_PUBLIC (fn) = 1;
1378 DECL_ARTIFICIAL (fn) = 1;
1379 TREE_NOTHROW (fn) = 1;
1380 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1381 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1383 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1384 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1386 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1388 block_move_fn = fn;
1391 if (asmspec)
1392 set_user_assembler_name (block_move_fn, asmspec);
1395 static tree
1396 emit_block_move_libcall_fn (int for_call)
1398 static bool emitted_extern;
1400 if (!block_move_fn)
1401 init_block_move_fn (NULL);
1403 if (for_call && !emitted_extern)
1405 emitted_extern = true;
1406 make_decl_rtl (block_move_fn);
1409 return block_move_fn;
1412 /* A subroutine of emit_block_move. Copy the data via an explicit
1413 loop. This is used only when libcalls are forbidden. */
1414 /* ??? It'd be nice to copy in hunks larger than QImode. */
1416 static void
1417 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1418 unsigned int align ATTRIBUTE_UNUSED)
1420 rtx_code_label *cmp_label, *top_label;
1421 rtx iter, x_addr, y_addr, tmp;
1422 machine_mode x_addr_mode = get_address_mode (x);
1423 machine_mode y_addr_mode = get_address_mode (y);
1424 machine_mode iter_mode;
1426 iter_mode = GET_MODE (size);
1427 if (iter_mode == VOIDmode)
1428 iter_mode = word_mode;
1430 top_label = gen_label_rtx ();
1431 cmp_label = gen_label_rtx ();
1432 iter = gen_reg_rtx (iter_mode);
1434 emit_move_insn (iter, const0_rtx);
1436 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1437 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1438 do_pending_stack_adjust ();
1440 emit_jump (cmp_label);
1441 emit_label (top_label);
1443 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1444 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1446 if (x_addr_mode != y_addr_mode)
1447 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1448 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1450 x = change_address (x, QImode, x_addr);
1451 y = change_address (y, QImode, y_addr);
1453 emit_move_insn (x, y);
1455 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1456 true, OPTAB_LIB_WIDEN);
1457 if (tmp != iter)
1458 emit_move_insn (iter, tmp);
1460 emit_label (cmp_label);
1462 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1463 true, top_label, REG_BR_PROB_BASE * 90 / 100);
1466 /* Copy all or part of a value X into registers starting at REGNO.
1467 The number of registers to be filled is NREGS. */
1469 void
1470 move_block_to_reg (int regno, rtx x, int nregs, machine_mode mode)
1472 if (nregs == 0)
1473 return;
1475 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1476 x = validize_mem (force_const_mem (mode, x));
1478 /* See if the machine can do this with a load multiple insn. */
1479 if (targetm.have_load_multiple ())
1481 rtx_insn *last = get_last_insn ();
1482 rtx first = gen_rtx_REG (word_mode, regno);
1483 if (rtx_insn *pat = targetm.gen_load_multiple (first, x,
1484 GEN_INT (nregs)))
1486 emit_insn (pat);
1487 return;
1489 else
1490 delete_insns_since (last);
1493 for (int i = 0; i < nregs; i++)
1494 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1495 operand_subword_force (x, i, mode));
1498 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1499 The number of registers to be filled is NREGS. */
1501 void
1502 move_block_from_reg (int regno, rtx x, int nregs)
1504 if (nregs == 0)
1505 return;
1507 /* See if the machine can do this with a store multiple insn. */
1508 if (targetm.have_store_multiple ())
1510 rtx_insn *last = get_last_insn ();
1511 rtx first = gen_rtx_REG (word_mode, regno);
1512 if (rtx_insn *pat = targetm.gen_store_multiple (x, first,
1513 GEN_INT (nregs)))
1515 emit_insn (pat);
1516 return;
1518 else
1519 delete_insns_since (last);
1522 for (int i = 0; i < nregs; i++)
1524 rtx tem = operand_subword (x, i, 1, BLKmode);
1526 gcc_assert (tem);
1528 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1532 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1533 ORIG, where ORIG is a non-consecutive group of registers represented by
1534 a PARALLEL. The clone is identical to the original except in that the
1535 original set of registers is replaced by a new set of pseudo registers.
1536 The new set has the same modes as the original set. */
1539 gen_group_rtx (rtx orig)
1541 int i, length;
1542 rtx *tmps;
1544 gcc_assert (GET_CODE (orig) == PARALLEL);
1546 length = XVECLEN (orig, 0);
1547 tmps = XALLOCAVEC (rtx, length);
1549 /* Skip a NULL entry in first slot. */
1550 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1552 if (i)
1553 tmps[0] = 0;
1555 for (; i < length; i++)
1557 machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1558 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1560 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1563 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1566 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1567 except that values are placed in TMPS[i], and must later be moved
1568 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1570 static void
1571 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1573 rtx src;
1574 int start, i;
1575 machine_mode m = GET_MODE (orig_src);
1577 gcc_assert (GET_CODE (dst) == PARALLEL);
1579 if (m != VOIDmode
1580 && !SCALAR_INT_MODE_P (m)
1581 && !MEM_P (orig_src)
1582 && GET_CODE (orig_src) != CONCAT)
1584 machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1585 if (imode == BLKmode)
1586 src = assign_stack_temp (GET_MODE (orig_src), ssize);
1587 else
1588 src = gen_reg_rtx (imode);
1589 if (imode != BLKmode)
1590 src = gen_lowpart (GET_MODE (orig_src), src);
1591 emit_move_insn (src, orig_src);
1592 /* ...and back again. */
1593 if (imode != BLKmode)
1594 src = gen_lowpart (imode, src);
1595 emit_group_load_1 (tmps, dst, src, type, ssize);
1596 return;
1599 /* Check for a NULL entry, used to indicate that the parameter goes
1600 both on the stack and in registers. */
1601 if (XEXP (XVECEXP (dst, 0, 0), 0))
1602 start = 0;
1603 else
1604 start = 1;
1606 /* Process the pieces. */
1607 for (i = start; i < XVECLEN (dst, 0); i++)
1609 machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1610 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1611 unsigned int bytelen = GET_MODE_SIZE (mode);
1612 int shift = 0;
1614 /* Handle trailing fragments that run over the size of the struct. */
1615 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1617 /* Arrange to shift the fragment to where it belongs.
1618 extract_bit_field loads to the lsb of the reg. */
1619 if (
1620 #ifdef BLOCK_REG_PADDING
1621 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1622 == (BYTES_BIG_ENDIAN ? upward : downward)
1623 #else
1624 BYTES_BIG_ENDIAN
1625 #endif
1627 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1628 bytelen = ssize - bytepos;
1629 gcc_assert (bytelen > 0);
1632 /* If we won't be loading directly from memory, protect the real source
1633 from strange tricks we might play; but make sure that the source can
1634 be loaded directly into the destination. */
1635 src = orig_src;
1636 if (!MEM_P (orig_src)
1637 && (!CONSTANT_P (orig_src)
1638 || (GET_MODE (orig_src) != mode
1639 && GET_MODE (orig_src) != VOIDmode)))
1641 if (GET_MODE (orig_src) == VOIDmode)
1642 src = gen_reg_rtx (mode);
1643 else
1644 src = gen_reg_rtx (GET_MODE (orig_src));
1646 emit_move_insn (src, orig_src);
1649 /* Optimize the access just a bit. */
1650 if (MEM_P (src)
1651 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1652 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1653 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1654 && bytelen == GET_MODE_SIZE (mode))
1656 tmps[i] = gen_reg_rtx (mode);
1657 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1659 else if (COMPLEX_MODE_P (mode)
1660 && GET_MODE (src) == mode
1661 && bytelen == GET_MODE_SIZE (mode))
1662 /* Let emit_move_complex do the bulk of the work. */
1663 tmps[i] = src;
1664 else if (GET_CODE (src) == CONCAT)
1666 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1667 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1669 if ((bytepos == 0 && bytelen == slen0)
1670 || (bytepos != 0 && bytepos + bytelen <= slen))
1672 /* The following assumes that the concatenated objects all
1673 have the same size. In this case, a simple calculation
1674 can be used to determine the object and the bit field
1675 to be extracted. */
1676 tmps[i] = XEXP (src, bytepos / slen0);
1677 if (! CONSTANT_P (tmps[i])
1678 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1679 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1680 (bytepos % slen0) * BITS_PER_UNIT,
1681 1, NULL_RTX, mode, mode, false);
1683 else
1685 rtx mem;
1687 gcc_assert (!bytepos);
1688 mem = assign_stack_temp (GET_MODE (src), slen);
1689 emit_move_insn (mem, src);
1690 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1691 0, 1, NULL_RTX, mode, mode, false);
1694 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1695 SIMD register, which is currently broken. While we get GCC
1696 to emit proper RTL for these cases, let's dump to memory. */
1697 else if (VECTOR_MODE_P (GET_MODE (dst))
1698 && REG_P (src))
1700 int slen = GET_MODE_SIZE (GET_MODE (src));
1701 rtx mem;
1703 mem = assign_stack_temp (GET_MODE (src), slen);
1704 emit_move_insn (mem, src);
1705 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1707 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1708 && XVECLEN (dst, 0) > 1)
1709 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
1710 else if (CONSTANT_P (src))
1712 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1714 if (len == ssize)
1715 tmps[i] = src;
1716 else
1718 rtx first, second;
1720 /* TODO: const_wide_int can have sizes other than this... */
1721 gcc_assert (2 * len == ssize);
1722 split_double (src, &first, &second);
1723 if (i)
1724 tmps[i] = second;
1725 else
1726 tmps[i] = first;
1729 else if (REG_P (src) && GET_MODE (src) == mode)
1730 tmps[i] = src;
1731 else
1732 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1733 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1734 mode, mode, false);
1736 if (shift)
1737 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1738 shift, tmps[i], 0);
1742 /* Emit code to move a block SRC of type TYPE to a block DST,
1743 where DST is non-consecutive registers represented by a PARALLEL.
1744 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1745 if not known. */
1747 void
1748 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1750 rtx *tmps;
1751 int i;
1753 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1754 emit_group_load_1 (tmps, dst, src, type, ssize);
1756 /* Copy the extracted pieces into the proper (probable) hard regs. */
1757 for (i = 0; i < XVECLEN (dst, 0); i++)
1759 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1760 if (d == NULL)
1761 continue;
1762 emit_move_insn (d, tmps[i]);
1766 /* Similar, but load SRC into new pseudos in a format that looks like
1767 PARALLEL. This can later be fed to emit_group_move to get things
1768 in the right place. */
1771 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1773 rtvec vec;
1774 int i;
1776 vec = rtvec_alloc (XVECLEN (parallel, 0));
1777 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1779 /* Convert the vector to look just like the original PARALLEL, except
1780 with the computed values. */
1781 for (i = 0; i < XVECLEN (parallel, 0); i++)
1783 rtx e = XVECEXP (parallel, 0, i);
1784 rtx d = XEXP (e, 0);
1786 if (d)
1788 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1789 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1791 RTVEC_ELT (vec, i) = e;
1794 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1797 /* Emit code to move a block SRC to block DST, where SRC and DST are
1798 non-consecutive groups of registers, each represented by a PARALLEL. */
1800 void
1801 emit_group_move (rtx dst, rtx src)
1803 int i;
1805 gcc_assert (GET_CODE (src) == PARALLEL
1806 && GET_CODE (dst) == PARALLEL
1807 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1809 /* Skip first entry if NULL. */
1810 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1811 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1812 XEXP (XVECEXP (src, 0, i), 0));
1815 /* Move a group of registers represented by a PARALLEL into pseudos. */
1818 emit_group_move_into_temps (rtx src)
1820 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1821 int i;
1823 for (i = 0; i < XVECLEN (src, 0); i++)
1825 rtx e = XVECEXP (src, 0, i);
1826 rtx d = XEXP (e, 0);
1828 if (d)
1829 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1830 RTVEC_ELT (vec, i) = e;
1833 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1836 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1837 where SRC is non-consecutive registers represented by a PARALLEL.
1838 SSIZE represents the total size of block ORIG_DST, or -1 if not
1839 known. */
1841 void
1842 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1844 rtx *tmps, dst;
1845 int start, finish, i;
1846 machine_mode m = GET_MODE (orig_dst);
1848 gcc_assert (GET_CODE (src) == PARALLEL);
1850 if (!SCALAR_INT_MODE_P (m)
1851 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1853 machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1854 if (imode == BLKmode)
1855 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1856 else
1857 dst = gen_reg_rtx (imode);
1858 emit_group_store (dst, src, type, ssize);
1859 if (imode != BLKmode)
1860 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1861 emit_move_insn (orig_dst, dst);
1862 return;
1865 /* Check for a NULL entry, used to indicate that the parameter goes
1866 both on the stack and in registers. */
1867 if (XEXP (XVECEXP (src, 0, 0), 0))
1868 start = 0;
1869 else
1870 start = 1;
1871 finish = XVECLEN (src, 0);
1873 tmps = XALLOCAVEC (rtx, finish);
1875 /* Copy the (probable) hard regs into pseudos. */
1876 for (i = start; i < finish; i++)
1878 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1879 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1881 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1882 emit_move_insn (tmps[i], reg);
1884 else
1885 tmps[i] = reg;
1888 /* If we won't be storing directly into memory, protect the real destination
1889 from strange tricks we might play. */
1890 dst = orig_dst;
1891 if (GET_CODE (dst) == PARALLEL)
1893 rtx temp;
1895 /* We can get a PARALLEL dst if there is a conditional expression in
1896 a return statement. In that case, the dst and src are the same,
1897 so no action is necessary. */
1898 if (rtx_equal_p (dst, src))
1899 return;
1901 /* It is unclear if we can ever reach here, but we may as well handle
1902 it. Allocate a temporary, and split this into a store/load to/from
1903 the temporary. */
1904 temp = assign_stack_temp (GET_MODE (dst), ssize);
1905 emit_group_store (temp, src, type, ssize);
1906 emit_group_load (dst, temp, type, ssize);
1907 return;
1909 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1911 machine_mode outer = GET_MODE (dst);
1912 machine_mode inner;
1913 HOST_WIDE_INT bytepos;
1914 bool done = false;
1915 rtx temp;
1917 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1918 dst = gen_reg_rtx (outer);
1920 /* Make life a bit easier for combine. */
1921 /* If the first element of the vector is the low part
1922 of the destination mode, use a paradoxical subreg to
1923 initialize the destination. */
1924 if (start < finish)
1926 inner = GET_MODE (tmps[start]);
1927 bytepos = subreg_lowpart_offset (inner, outer);
1928 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1930 temp = simplify_gen_subreg (outer, tmps[start],
1931 inner, 0);
1932 if (temp)
1934 emit_move_insn (dst, temp);
1935 done = true;
1936 start++;
1941 /* If the first element wasn't the low part, try the last. */
1942 if (!done
1943 && start < finish - 1)
1945 inner = GET_MODE (tmps[finish - 1]);
1946 bytepos = subreg_lowpart_offset (inner, outer);
1947 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1949 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1950 inner, 0);
1951 if (temp)
1953 emit_move_insn (dst, temp);
1954 done = true;
1955 finish--;
1960 /* Otherwise, simply initialize the result to zero. */
1961 if (!done)
1962 emit_move_insn (dst, CONST0_RTX (outer));
1965 /* Process the pieces. */
1966 for (i = start; i < finish; i++)
1968 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1969 machine_mode mode = GET_MODE (tmps[i]);
1970 unsigned int bytelen = GET_MODE_SIZE (mode);
1971 unsigned int adj_bytelen;
1972 rtx dest = dst;
1974 /* Handle trailing fragments that run over the size of the struct. */
1975 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1976 adj_bytelen = ssize - bytepos;
1977 else
1978 adj_bytelen = bytelen;
1980 if (GET_CODE (dst) == CONCAT)
1982 if (bytepos + adj_bytelen
1983 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1984 dest = XEXP (dst, 0);
1985 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1987 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1988 dest = XEXP (dst, 1);
1990 else
1992 machine_mode dest_mode = GET_MODE (dest);
1993 machine_mode tmp_mode = GET_MODE (tmps[i]);
1995 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
1997 if (GET_MODE_ALIGNMENT (dest_mode)
1998 >= GET_MODE_ALIGNMENT (tmp_mode))
2000 dest = assign_stack_temp (dest_mode,
2001 GET_MODE_SIZE (dest_mode));
2002 emit_move_insn (adjust_address (dest,
2003 tmp_mode,
2004 bytepos),
2005 tmps[i]);
2006 dst = dest;
2008 else
2010 dest = assign_stack_temp (tmp_mode,
2011 GET_MODE_SIZE (tmp_mode));
2012 emit_move_insn (dest, tmps[i]);
2013 dst = adjust_address (dest, dest_mode, bytepos);
2015 break;
2019 /* Handle trailing fragments that run over the size of the struct. */
2020 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2022 /* store_bit_field always takes its value from the lsb.
2023 Move the fragment to the lsb if it's not already there. */
2024 if (
2025 #ifdef BLOCK_REG_PADDING
2026 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2027 == (BYTES_BIG_ENDIAN ? upward : downward)
2028 #else
2029 BYTES_BIG_ENDIAN
2030 #endif
2033 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2034 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2035 shift, tmps[i], 0);
2038 /* Make sure not to write past the end of the struct. */
2039 store_bit_field (dest,
2040 adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2041 bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2042 VOIDmode, tmps[i], false);
2045 /* Optimize the access just a bit. */
2046 else if (MEM_P (dest)
2047 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2048 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2049 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2050 && bytelen == GET_MODE_SIZE (mode))
2051 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2053 else
2054 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2055 0, 0, mode, tmps[i], false);
2058 /* Copy from the pseudo into the (probable) hard reg. */
2059 if (orig_dst != dst)
2060 emit_move_insn (orig_dst, dst);
2063 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2064 of the value stored in X. */
2067 maybe_emit_group_store (rtx x, tree type)
2069 machine_mode mode = TYPE_MODE (type);
2070 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2071 if (GET_CODE (x) == PARALLEL)
2073 rtx result = gen_reg_rtx (mode);
2074 emit_group_store (result, x, type, int_size_in_bytes (type));
2075 return result;
2077 return x;
2080 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2082 This is used on targets that return BLKmode values in registers. */
2084 void
2085 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2087 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2088 rtx src = NULL, dst = NULL;
2089 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2090 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2091 machine_mode mode = GET_MODE (srcreg);
2092 machine_mode tmode = GET_MODE (target);
2093 machine_mode copy_mode;
2095 /* BLKmode registers created in the back-end shouldn't have survived. */
2096 gcc_assert (mode != BLKmode);
2098 /* If the structure doesn't take up a whole number of words, see whether
2099 SRCREG is padded on the left or on the right. If it's on the left,
2100 set PADDING_CORRECTION to the number of bits to skip.
2102 In most ABIs, the structure will be returned at the least end of
2103 the register, which translates to right padding on little-endian
2104 targets and left padding on big-endian targets. The opposite
2105 holds if the structure is returned at the most significant
2106 end of the register. */
2107 if (bytes % UNITS_PER_WORD != 0
2108 && (targetm.calls.return_in_msb (type)
2109 ? !BYTES_BIG_ENDIAN
2110 : BYTES_BIG_ENDIAN))
2111 padding_correction
2112 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2114 /* We can use a single move if we have an exact mode for the size. */
2115 else if (MEM_P (target)
2116 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2117 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2118 && bytes == GET_MODE_SIZE (mode))
2120 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2121 return;
2124 /* And if we additionally have the same mode for a register. */
2125 else if (REG_P (target)
2126 && GET_MODE (target) == mode
2127 && bytes == GET_MODE_SIZE (mode))
2129 emit_move_insn (target, srcreg);
2130 return;
2133 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2134 into a new pseudo which is a full word. */
2135 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2137 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2138 mode = word_mode;
2141 /* Copy the structure BITSIZE bits at a time. If the target lives in
2142 memory, take care of not reading/writing past its end by selecting
2143 a copy mode suited to BITSIZE. This should always be possible given
2144 how it is computed.
2146 If the target lives in register, make sure not to select a copy mode
2147 larger than the mode of the register.
2149 We could probably emit more efficient code for machines which do not use
2150 strict alignment, but it doesn't seem worth the effort at the current
2151 time. */
2153 copy_mode = word_mode;
2154 if (MEM_P (target))
2156 machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2157 if (mem_mode != BLKmode)
2158 copy_mode = mem_mode;
2160 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2161 copy_mode = tmode;
2163 for (bitpos = 0, xbitpos = padding_correction;
2164 bitpos < bytes * BITS_PER_UNIT;
2165 bitpos += bitsize, xbitpos += bitsize)
2167 /* We need a new source operand each time xbitpos is on a
2168 word boundary and when xbitpos == padding_correction
2169 (the first time through). */
2170 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2171 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2173 /* We need a new destination operand each time bitpos is on
2174 a word boundary. */
2175 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2176 dst = target;
2177 else if (bitpos % BITS_PER_WORD == 0)
2178 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2180 /* Use xbitpos for the source extraction (right justified) and
2181 bitpos for the destination store (left justified). */
2182 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2183 extract_bit_field (src, bitsize,
2184 xbitpos % BITS_PER_WORD, 1,
2185 NULL_RTX, copy_mode, copy_mode,
2186 false),
2187 false);
2191 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2192 register if it contains any data, otherwise return null.
2194 This is used on targets that return BLKmode values in registers. */
2197 copy_blkmode_to_reg (machine_mode mode, tree src)
2199 int i, n_regs;
2200 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2201 unsigned int bitsize;
2202 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2203 machine_mode dst_mode;
2205 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2207 x = expand_normal (src);
2209 bytes = int_size_in_bytes (TREE_TYPE (src));
2210 if (bytes == 0)
2211 return NULL_RTX;
2213 /* If the structure doesn't take up a whole number of words, see
2214 whether the register value should be padded on the left or on
2215 the right. Set PADDING_CORRECTION to the number of padding
2216 bits needed on the left side.
2218 In most ABIs, the structure will be returned at the least end of
2219 the register, which translates to right padding on little-endian
2220 targets and left padding on big-endian targets. The opposite
2221 holds if the structure is returned at the most significant
2222 end of the register. */
2223 if (bytes % UNITS_PER_WORD != 0
2224 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2225 ? !BYTES_BIG_ENDIAN
2226 : BYTES_BIG_ENDIAN))
2227 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2228 * BITS_PER_UNIT));
2230 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2231 dst_words = XALLOCAVEC (rtx, n_regs);
2232 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2234 /* Copy the structure BITSIZE bits at a time. */
2235 for (bitpos = 0, xbitpos = padding_correction;
2236 bitpos < bytes * BITS_PER_UNIT;
2237 bitpos += bitsize, xbitpos += bitsize)
2239 /* We need a new destination pseudo each time xbitpos is
2240 on a word boundary and when xbitpos == padding_correction
2241 (the first time through). */
2242 if (xbitpos % BITS_PER_WORD == 0
2243 || xbitpos == padding_correction)
2245 /* Generate an appropriate register. */
2246 dst_word = gen_reg_rtx (word_mode);
2247 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2249 /* Clear the destination before we move anything into it. */
2250 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2253 /* We need a new source operand each time bitpos is on a word
2254 boundary. */
2255 if (bitpos % BITS_PER_WORD == 0)
2256 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2258 /* Use bitpos for the source extraction (left justified) and
2259 xbitpos for the destination store (right justified). */
2260 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2261 0, 0, word_mode,
2262 extract_bit_field (src_word, bitsize,
2263 bitpos % BITS_PER_WORD, 1,
2264 NULL_RTX, word_mode, word_mode,
2265 false),
2266 false);
2269 if (mode == BLKmode)
2271 /* Find the smallest integer mode large enough to hold the
2272 entire structure. */
2273 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2274 mode != VOIDmode;
2275 mode = GET_MODE_WIDER_MODE (mode))
2276 /* Have we found a large enough mode? */
2277 if (GET_MODE_SIZE (mode) >= bytes)
2278 break;
2280 /* A suitable mode should have been found. */
2281 gcc_assert (mode != VOIDmode);
2284 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2285 dst_mode = word_mode;
2286 else
2287 dst_mode = mode;
2288 dst = gen_reg_rtx (dst_mode);
2290 for (i = 0; i < n_regs; i++)
2291 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2293 if (mode != dst_mode)
2294 dst = gen_lowpart (mode, dst);
2296 return dst;
2299 /* Add a USE expression for REG to the (possibly empty) list pointed
2300 to by CALL_FUSAGE. REG must denote a hard register. */
2302 void
2303 use_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2305 gcc_assert (REG_P (reg));
2307 if (!HARD_REGISTER_P (reg))
2308 return;
2310 *call_fusage
2311 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2314 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2315 to by CALL_FUSAGE. REG must denote a hard register. */
2317 void
2318 clobber_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2320 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2322 *call_fusage
2323 = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2326 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2327 starting at REGNO. All of these registers must be hard registers. */
2329 void
2330 use_regs (rtx *call_fusage, int regno, int nregs)
2332 int i;
2334 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2336 for (i = 0; i < nregs; i++)
2337 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2340 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2341 PARALLEL REGS. This is for calls that pass values in multiple
2342 non-contiguous locations. The Irix 6 ABI has examples of this. */
2344 void
2345 use_group_regs (rtx *call_fusage, rtx regs)
2347 int i;
2349 for (i = 0; i < XVECLEN (regs, 0); i++)
2351 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2353 /* A NULL entry means the parameter goes both on the stack and in
2354 registers. This can also be a MEM for targets that pass values
2355 partially on the stack and partially in registers. */
2356 if (reg != 0 && REG_P (reg))
2357 use_reg (call_fusage, reg);
2361 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2362 assigment and the code of the expresion on the RHS is CODE. Return
2363 NULL otherwise. */
2365 static gimple *
2366 get_def_for_expr (tree name, enum tree_code code)
2368 gimple *def_stmt;
2370 if (TREE_CODE (name) != SSA_NAME)
2371 return NULL;
2373 def_stmt = get_gimple_for_ssa_name (name);
2374 if (!def_stmt
2375 || gimple_assign_rhs_code (def_stmt) != code)
2376 return NULL;
2378 return def_stmt;
2381 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2382 assigment and the class of the expresion on the RHS is CLASS. Return
2383 NULL otherwise. */
2385 static gimple *
2386 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2388 gimple *def_stmt;
2390 if (TREE_CODE (name) != SSA_NAME)
2391 return NULL;
2393 def_stmt = get_gimple_for_ssa_name (name);
2394 if (!def_stmt
2395 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2396 return NULL;
2398 return def_stmt;
2402 /* Determine whether the LEN bytes generated by CONSTFUN can be
2403 stored to memory using several move instructions. CONSTFUNDATA is
2404 a pointer which will be passed as argument in every CONSTFUN call.
2405 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2406 a memset operation and false if it's a copy of a constant string.
2407 Return nonzero if a call to store_by_pieces should succeed. */
2410 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2411 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2412 void *constfundata, unsigned int align, bool memsetp)
2414 unsigned HOST_WIDE_INT l;
2415 unsigned int max_size;
2416 HOST_WIDE_INT offset = 0;
2417 machine_mode mode;
2418 enum insn_code icode;
2419 int reverse;
2420 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2421 rtx cst ATTRIBUTE_UNUSED;
2423 if (len == 0)
2424 return 1;
2426 if (!targetm.use_by_pieces_infrastructure_p (len, align,
2427 memsetp
2428 ? SET_BY_PIECES
2429 : STORE_BY_PIECES,
2430 optimize_insn_for_speed_p ()))
2431 return 0;
2433 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2435 /* We would first store what we can in the largest integer mode, then go to
2436 successively smaller modes. */
2438 for (reverse = 0;
2439 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2440 reverse++)
2442 l = len;
2443 max_size = STORE_MAX_PIECES + 1;
2444 while (max_size > 1 && l > 0)
2446 mode = widest_int_mode_for_size (max_size);
2448 if (mode == VOIDmode)
2449 break;
2451 icode = optab_handler (mov_optab, mode);
2452 if (icode != CODE_FOR_nothing
2453 && align >= GET_MODE_ALIGNMENT (mode))
2455 unsigned int size = GET_MODE_SIZE (mode);
2457 while (l >= size)
2459 if (reverse)
2460 offset -= size;
2462 cst = (*constfun) (constfundata, offset, mode);
2463 if (!targetm.legitimate_constant_p (mode, cst))
2464 return 0;
2466 if (!reverse)
2467 offset += size;
2469 l -= size;
2473 max_size = GET_MODE_SIZE (mode);
2476 /* The code above should have handled everything. */
2477 gcc_assert (!l);
2480 return 1;
2483 /* Generate several move instructions to store LEN bytes generated by
2484 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2485 pointer which will be passed as argument in every CONSTFUN call.
2486 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2487 a memset operation and false if it's a copy of a constant string.
2488 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2489 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2490 stpcpy. */
2493 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2494 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2495 void *constfundata, unsigned int align, bool memsetp, int endp)
2497 machine_mode to_addr_mode = get_address_mode (to);
2498 struct store_by_pieces_d data;
2500 if (len == 0)
2502 gcc_assert (endp != 2);
2503 return to;
2506 gcc_assert (targetm.use_by_pieces_infrastructure_p
2507 (len, align,
2508 memsetp
2509 ? SET_BY_PIECES
2510 : STORE_BY_PIECES,
2511 optimize_insn_for_speed_p ()));
2513 data.constfun = constfun;
2514 data.constfundata = constfundata;
2515 data.len = len;
2516 data.to = to;
2517 store_by_pieces_1 (&data, align);
2518 if (endp)
2520 rtx to1;
2522 gcc_assert (!data.reverse);
2523 if (data.autinc_to)
2525 if (endp == 2)
2527 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2528 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2529 else
2530 data.to_addr = copy_to_mode_reg (to_addr_mode,
2531 plus_constant (to_addr_mode,
2532 data.to_addr,
2533 -1));
2535 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2536 data.offset);
2538 else
2540 if (endp == 2)
2541 --data.offset;
2542 to1 = adjust_address (data.to, QImode, data.offset);
2544 return to1;
2546 else
2547 return data.to;
2550 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2551 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2553 static void
2554 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2556 struct store_by_pieces_d data;
2558 if (len == 0)
2559 return;
2561 data.constfun = clear_by_pieces_1;
2562 data.constfundata = NULL;
2563 data.len = len;
2564 data.to = to;
2565 store_by_pieces_1 (&data, align);
2568 /* Callback routine for clear_by_pieces.
2569 Return const0_rtx unconditionally. */
2571 static rtx
2572 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2573 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2574 machine_mode mode ATTRIBUTE_UNUSED)
2576 return const0_rtx;
2579 /* Subroutine of clear_by_pieces and store_by_pieces.
2580 Generate several move instructions to store LEN bytes of block TO. (A MEM
2581 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2583 static void
2584 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2585 unsigned int align ATTRIBUTE_UNUSED)
2587 machine_mode to_addr_mode = get_address_mode (data->to);
2588 rtx to_addr = XEXP (data->to, 0);
2589 unsigned int max_size = STORE_MAX_PIECES + 1;
2590 enum insn_code icode;
2592 data->offset = 0;
2593 data->to_addr = to_addr;
2594 data->autinc_to
2595 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2596 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2598 data->explicit_inc_to = 0;
2599 data->reverse
2600 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2601 if (data->reverse)
2602 data->offset = data->len;
2604 /* If storing requires more than two move insns,
2605 copy addresses to registers (to make displacements shorter)
2606 and use post-increment if available. */
2607 if (!data->autinc_to
2608 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2610 /* Determine the main mode we'll be using.
2611 MODE might not be used depending on the definitions of the
2612 USE_* macros below. */
2613 machine_mode mode ATTRIBUTE_UNUSED
2614 = widest_int_mode_for_size (max_size);
2616 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2618 data->to_addr = copy_to_mode_reg (to_addr_mode,
2619 plus_constant (to_addr_mode,
2620 to_addr,
2621 data->len));
2622 data->autinc_to = 1;
2623 data->explicit_inc_to = -1;
2626 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2627 && ! data->autinc_to)
2629 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2630 data->autinc_to = 1;
2631 data->explicit_inc_to = 1;
2634 if ( !data->autinc_to && CONSTANT_P (to_addr))
2635 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2638 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2640 /* First store what we can in the largest integer mode, then go to
2641 successively smaller modes. */
2643 while (max_size > 1 && data->len > 0)
2645 machine_mode mode = widest_int_mode_for_size (max_size);
2647 if (mode == VOIDmode)
2648 break;
2650 icode = optab_handler (mov_optab, mode);
2651 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2652 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2654 max_size = GET_MODE_SIZE (mode);
2657 /* The code above should have handled everything. */
2658 gcc_assert (!data->len);
2661 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2662 with move instructions for mode MODE. GENFUN is the gen_... function
2663 to make a move insn for that mode. DATA has all the other info. */
2665 static void
2666 store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
2667 struct store_by_pieces_d *data)
2669 unsigned int size = GET_MODE_SIZE (mode);
2670 rtx to1, cst;
2672 while (data->len >= size)
2674 if (data->reverse)
2675 data->offset -= size;
2677 if (data->autinc_to)
2678 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2679 data->offset);
2680 else
2681 to1 = adjust_address (data->to, mode, data->offset);
2683 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2684 emit_insn (gen_add2_insn (data->to_addr,
2685 gen_int_mode (-(HOST_WIDE_INT) size,
2686 GET_MODE (data->to_addr))));
2688 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2689 emit_insn ((*genfun) (to1, cst));
2691 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2692 emit_insn (gen_add2_insn (data->to_addr,
2693 gen_int_mode (size,
2694 GET_MODE (data->to_addr))));
2696 if (! data->reverse)
2697 data->offset += size;
2699 data->len -= size;
2703 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2704 its length in bytes. */
2707 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2708 unsigned int expected_align, HOST_WIDE_INT expected_size,
2709 unsigned HOST_WIDE_INT min_size,
2710 unsigned HOST_WIDE_INT max_size,
2711 unsigned HOST_WIDE_INT probable_max_size)
2713 machine_mode mode = GET_MODE (object);
2714 unsigned int align;
2716 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2718 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2719 just move a zero. Otherwise, do this a piece at a time. */
2720 if (mode != BLKmode
2721 && CONST_INT_P (size)
2722 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2724 rtx zero = CONST0_RTX (mode);
2725 if (zero != NULL)
2727 emit_move_insn (object, zero);
2728 return NULL;
2731 if (COMPLEX_MODE_P (mode))
2733 zero = CONST0_RTX (GET_MODE_INNER (mode));
2734 if (zero != NULL)
2736 write_complex_part (object, zero, 0);
2737 write_complex_part (object, zero, 1);
2738 return NULL;
2743 if (size == const0_rtx)
2744 return NULL;
2746 align = MEM_ALIGN (object);
2748 if (CONST_INT_P (size)
2749 && targetm.use_by_pieces_infrastructure_p (INTVAL (size), align,
2750 CLEAR_BY_PIECES,
2751 optimize_insn_for_speed_p ()))
2752 clear_by_pieces (object, INTVAL (size), align);
2753 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2754 expected_align, expected_size,
2755 min_size, max_size, probable_max_size))
2757 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2758 return set_storage_via_libcall (object, size, const0_rtx,
2759 method == BLOCK_OP_TAILCALL);
2760 else
2761 gcc_unreachable ();
2763 return NULL;
2767 clear_storage (rtx object, rtx size, enum block_op_methods method)
2769 unsigned HOST_WIDE_INT max, min = 0;
2770 if (GET_CODE (size) == CONST_INT)
2771 min = max = UINTVAL (size);
2772 else
2773 max = GET_MODE_MASK (GET_MODE (size));
2774 return clear_storage_hints (object, size, method, 0, -1, min, max, max);
2778 /* A subroutine of clear_storage. Expand a call to memset.
2779 Return the return value of memset, 0 otherwise. */
2782 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2784 tree call_expr, fn, object_tree, size_tree, val_tree;
2785 machine_mode size_mode;
2786 rtx retval;
2788 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2789 place those into new pseudos into a VAR_DECL and use them later. */
2791 object = copy_addr_to_reg (XEXP (object, 0));
2793 size_mode = TYPE_MODE (sizetype);
2794 size = convert_to_mode (size_mode, size, 1);
2795 size = copy_to_mode_reg (size_mode, size);
2797 /* It is incorrect to use the libcall calling conventions to call
2798 memset in this context. This could be a user call to memset and
2799 the user may wish to examine the return value from memset. For
2800 targets where libcalls and normal calls have different conventions
2801 for returning pointers, we could end up generating incorrect code. */
2803 object_tree = make_tree (ptr_type_node, object);
2804 if (!CONST_INT_P (val))
2805 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2806 size_tree = make_tree (sizetype, size);
2807 val_tree = make_tree (integer_type_node, val);
2809 fn = clear_storage_libcall_fn (true);
2810 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2811 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2813 retval = expand_normal (call_expr);
2815 return retval;
2818 /* A subroutine of set_storage_via_libcall. Create the tree node
2819 for the function we use for block clears. */
2821 tree block_clear_fn;
2823 void
2824 init_block_clear_fn (const char *asmspec)
2826 if (!block_clear_fn)
2828 tree fn, args;
2830 fn = get_identifier ("memset");
2831 args = build_function_type_list (ptr_type_node, ptr_type_node,
2832 integer_type_node, sizetype,
2833 NULL_TREE);
2835 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2836 DECL_EXTERNAL (fn) = 1;
2837 TREE_PUBLIC (fn) = 1;
2838 DECL_ARTIFICIAL (fn) = 1;
2839 TREE_NOTHROW (fn) = 1;
2840 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2841 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2843 block_clear_fn = fn;
2846 if (asmspec)
2847 set_user_assembler_name (block_clear_fn, asmspec);
2850 static tree
2851 clear_storage_libcall_fn (int for_call)
2853 static bool emitted_extern;
2855 if (!block_clear_fn)
2856 init_block_clear_fn (NULL);
2858 if (for_call && !emitted_extern)
2860 emitted_extern = true;
2861 make_decl_rtl (block_clear_fn);
2864 return block_clear_fn;
2867 /* Expand a setmem pattern; return true if successful. */
2869 bool
2870 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2871 unsigned int expected_align, HOST_WIDE_INT expected_size,
2872 unsigned HOST_WIDE_INT min_size,
2873 unsigned HOST_WIDE_INT max_size,
2874 unsigned HOST_WIDE_INT probable_max_size)
2876 /* Try the most limited insn first, because there's no point
2877 including more than one in the machine description unless
2878 the more limited one has some advantage. */
2880 machine_mode mode;
2882 if (expected_align < align)
2883 expected_align = align;
2884 if (expected_size != -1)
2886 if ((unsigned HOST_WIDE_INT)expected_size > max_size)
2887 expected_size = max_size;
2888 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
2889 expected_size = min_size;
2892 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2893 mode = GET_MODE_WIDER_MODE (mode))
2895 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2897 if (code != CODE_FOR_nothing
2898 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2899 here because if SIZE is less than the mode mask, as it is
2900 returned by the macro, it will definitely be less than the
2901 actual mode mask. Since SIZE is within the Pmode address
2902 space, we limit MODE to Pmode. */
2903 && ((CONST_INT_P (size)
2904 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2905 <= (GET_MODE_MASK (mode) >> 1)))
2906 || max_size <= (GET_MODE_MASK (mode) >> 1)
2907 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
2909 struct expand_operand ops[9];
2910 unsigned int nops;
2912 nops = insn_data[(int) code].n_generator_args;
2913 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
2915 create_fixed_operand (&ops[0], object);
2916 /* The check above guarantees that this size conversion is valid. */
2917 create_convert_operand_to (&ops[1], size, mode, true);
2918 create_convert_operand_from (&ops[2], val, byte_mode, true);
2919 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2920 if (nops >= 6)
2922 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2923 create_integer_operand (&ops[5], expected_size);
2925 if (nops >= 8)
2927 create_integer_operand (&ops[6], min_size);
2928 /* If we can not represent the maximal size,
2929 make parameter NULL. */
2930 if ((HOST_WIDE_INT) max_size != -1)
2931 create_integer_operand (&ops[7], max_size);
2932 else
2933 create_fixed_operand (&ops[7], NULL);
2935 if (nops == 9)
2937 /* If we can not represent the maximal size,
2938 make parameter NULL. */
2939 if ((HOST_WIDE_INT) probable_max_size != -1)
2940 create_integer_operand (&ops[8], probable_max_size);
2941 else
2942 create_fixed_operand (&ops[8], NULL);
2944 if (maybe_expand_insn (code, nops, ops))
2945 return true;
2949 return false;
2953 /* Write to one of the components of the complex value CPLX. Write VAL to
2954 the real part if IMAG_P is false, and the imaginary part if its true. */
2956 void
2957 write_complex_part (rtx cplx, rtx val, bool imag_p)
2959 machine_mode cmode;
2960 machine_mode imode;
2961 unsigned ibitsize;
2963 if (GET_CODE (cplx) == CONCAT)
2965 emit_move_insn (XEXP (cplx, imag_p), val);
2966 return;
2969 cmode = GET_MODE (cplx);
2970 imode = GET_MODE_INNER (cmode);
2971 ibitsize = GET_MODE_BITSIZE (imode);
2973 /* For MEMs simplify_gen_subreg may generate an invalid new address
2974 because, e.g., the original address is considered mode-dependent
2975 by the target, which restricts simplify_subreg from invoking
2976 adjust_address_nv. Instead of preparing fallback support for an
2977 invalid address, we call adjust_address_nv directly. */
2978 if (MEM_P (cplx))
2980 emit_move_insn (adjust_address_nv (cplx, imode,
2981 imag_p ? GET_MODE_SIZE (imode) : 0),
2982 val);
2983 return;
2986 /* If the sub-object is at least word sized, then we know that subregging
2987 will work. This special case is important, since store_bit_field
2988 wants to operate on integer modes, and there's rarely an OImode to
2989 correspond to TCmode. */
2990 if (ibitsize >= BITS_PER_WORD
2991 /* For hard regs we have exact predicates. Assume we can split
2992 the original object if it spans an even number of hard regs.
2993 This special case is important for SCmode on 64-bit platforms
2994 where the natural size of floating-point regs is 32-bit. */
2995 || (REG_P (cplx)
2996 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2997 && REG_NREGS (cplx) % 2 == 0))
2999 rtx part = simplify_gen_subreg (imode, cplx, cmode,
3000 imag_p ? GET_MODE_SIZE (imode) : 0);
3001 if (part)
3003 emit_move_insn (part, val);
3004 return;
3006 else
3007 /* simplify_gen_subreg may fail for sub-word MEMs. */
3008 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3011 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val,
3012 false);
3015 /* Extract one of the components of the complex value CPLX. Extract the
3016 real part if IMAG_P is false, and the imaginary part if it's true. */
3019 read_complex_part (rtx cplx, bool imag_p)
3021 machine_mode cmode, imode;
3022 unsigned ibitsize;
3024 if (GET_CODE (cplx) == CONCAT)
3025 return XEXP (cplx, imag_p);
3027 cmode = GET_MODE (cplx);
3028 imode = GET_MODE_INNER (cmode);
3029 ibitsize = GET_MODE_BITSIZE (imode);
3031 /* Special case reads from complex constants that got spilled to memory. */
3032 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3034 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3035 if (decl && TREE_CODE (decl) == COMPLEX_CST)
3037 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3038 if (CONSTANT_CLASS_P (part))
3039 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3043 /* For MEMs simplify_gen_subreg may generate an invalid new address
3044 because, e.g., the original address is considered mode-dependent
3045 by the target, which restricts simplify_subreg from invoking
3046 adjust_address_nv. Instead of preparing fallback support for an
3047 invalid address, we call adjust_address_nv directly. */
3048 if (MEM_P (cplx))
3049 return adjust_address_nv (cplx, imode,
3050 imag_p ? GET_MODE_SIZE (imode) : 0);
3052 /* If the sub-object is at least word sized, then we know that subregging
3053 will work. This special case is important, since extract_bit_field
3054 wants to operate on integer modes, and there's rarely an OImode to
3055 correspond to TCmode. */
3056 if (ibitsize >= BITS_PER_WORD
3057 /* For hard regs we have exact predicates. Assume we can split
3058 the original object if it spans an even number of hard regs.
3059 This special case is important for SCmode on 64-bit platforms
3060 where the natural size of floating-point regs is 32-bit. */
3061 || (REG_P (cplx)
3062 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3063 && REG_NREGS (cplx) % 2 == 0))
3065 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3066 imag_p ? GET_MODE_SIZE (imode) : 0);
3067 if (ret)
3068 return ret;
3069 else
3070 /* simplify_gen_subreg may fail for sub-word MEMs. */
3071 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3074 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3075 true, NULL_RTX, imode, imode, false);
3078 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3079 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3080 represented in NEW_MODE. If FORCE is true, this will never happen, as
3081 we'll force-create a SUBREG if needed. */
3083 static rtx
3084 emit_move_change_mode (machine_mode new_mode,
3085 machine_mode old_mode, rtx x, bool force)
3087 rtx ret;
3089 if (push_operand (x, GET_MODE (x)))
3091 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3092 MEM_COPY_ATTRIBUTES (ret, x);
3094 else if (MEM_P (x))
3096 /* We don't have to worry about changing the address since the
3097 size in bytes is supposed to be the same. */
3098 if (reload_in_progress)
3100 /* Copy the MEM to change the mode and move any
3101 substitutions from the old MEM to the new one. */
3102 ret = adjust_address_nv (x, new_mode, 0);
3103 copy_replacements (x, ret);
3105 else
3106 ret = adjust_address (x, new_mode, 0);
3108 else
3110 /* Note that we do want simplify_subreg's behavior of validating
3111 that the new mode is ok for a hard register. If we were to use
3112 simplify_gen_subreg, we would create the subreg, but would
3113 probably run into the target not being able to implement it. */
3114 /* Except, of course, when FORCE is true, when this is exactly what
3115 we want. Which is needed for CCmodes on some targets. */
3116 if (force)
3117 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3118 else
3119 ret = simplify_subreg (new_mode, x, old_mode, 0);
3122 return ret;
3125 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3126 an integer mode of the same size as MODE. Returns the instruction
3127 emitted, or NULL if such a move could not be generated. */
3129 static rtx_insn *
3130 emit_move_via_integer (machine_mode mode, rtx x, rtx y, bool force)
3132 machine_mode imode;
3133 enum insn_code code;
3135 /* There must exist a mode of the exact size we require. */
3136 imode = int_mode_for_mode (mode);
3137 if (imode == BLKmode)
3138 return NULL;
3140 /* The target must support moves in this mode. */
3141 code = optab_handler (mov_optab, imode);
3142 if (code == CODE_FOR_nothing)
3143 return NULL;
3145 x = emit_move_change_mode (imode, mode, x, force);
3146 if (x == NULL_RTX)
3147 return NULL;
3148 y = emit_move_change_mode (imode, mode, y, force);
3149 if (y == NULL_RTX)
3150 return NULL;
3151 return emit_insn (GEN_FCN (code) (x, y));
3154 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3155 Return an equivalent MEM that does not use an auto-increment. */
3158 emit_move_resolve_push (machine_mode mode, rtx x)
3160 enum rtx_code code = GET_CODE (XEXP (x, 0));
3161 HOST_WIDE_INT adjust;
3162 rtx temp;
3164 adjust = GET_MODE_SIZE (mode);
3165 #ifdef PUSH_ROUNDING
3166 adjust = PUSH_ROUNDING (adjust);
3167 #endif
3168 if (code == PRE_DEC || code == POST_DEC)
3169 adjust = -adjust;
3170 else if (code == PRE_MODIFY || code == POST_MODIFY)
3172 rtx expr = XEXP (XEXP (x, 0), 1);
3173 HOST_WIDE_INT val;
3175 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3176 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3177 val = INTVAL (XEXP (expr, 1));
3178 if (GET_CODE (expr) == MINUS)
3179 val = -val;
3180 gcc_assert (adjust == val || adjust == -val);
3181 adjust = val;
3184 /* Do not use anti_adjust_stack, since we don't want to update
3185 stack_pointer_delta. */
3186 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3187 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3188 0, OPTAB_LIB_WIDEN);
3189 if (temp != stack_pointer_rtx)
3190 emit_move_insn (stack_pointer_rtx, temp);
3192 switch (code)
3194 case PRE_INC:
3195 case PRE_DEC:
3196 case PRE_MODIFY:
3197 temp = stack_pointer_rtx;
3198 break;
3199 case POST_INC:
3200 case POST_DEC:
3201 case POST_MODIFY:
3202 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3203 break;
3204 default:
3205 gcc_unreachable ();
3208 return replace_equiv_address (x, temp);
3211 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3212 X is known to satisfy push_operand, and MODE is known to be complex.
3213 Returns the last instruction emitted. */
3215 rtx_insn *
3216 emit_move_complex_push (machine_mode mode, rtx x, rtx y)
3218 machine_mode submode = GET_MODE_INNER (mode);
3219 bool imag_first;
3221 #ifdef PUSH_ROUNDING
3222 unsigned int submodesize = GET_MODE_SIZE (submode);
3224 /* In case we output to the stack, but the size is smaller than the
3225 machine can push exactly, we need to use move instructions. */
3226 if (PUSH_ROUNDING (submodesize) != submodesize)
3228 x = emit_move_resolve_push (mode, x);
3229 return emit_move_insn (x, y);
3231 #endif
3233 /* Note that the real part always precedes the imag part in memory
3234 regardless of machine's endianness. */
3235 switch (GET_CODE (XEXP (x, 0)))
3237 case PRE_DEC:
3238 case POST_DEC:
3239 imag_first = true;
3240 break;
3241 case PRE_INC:
3242 case POST_INC:
3243 imag_first = false;
3244 break;
3245 default:
3246 gcc_unreachable ();
3249 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3250 read_complex_part (y, imag_first));
3251 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3252 read_complex_part (y, !imag_first));
3255 /* A subroutine of emit_move_complex. Perform the move from Y to X
3256 via two moves of the parts. Returns the last instruction emitted. */
3258 rtx_insn *
3259 emit_move_complex_parts (rtx x, rtx y)
3261 /* Show the output dies here. This is necessary for SUBREGs
3262 of pseudos since we cannot track their lifetimes correctly;
3263 hard regs shouldn't appear here except as return values. */
3264 if (!reload_completed && !reload_in_progress
3265 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3266 emit_clobber (x);
3268 write_complex_part (x, read_complex_part (y, false), false);
3269 write_complex_part (x, read_complex_part (y, true), true);
3271 return get_last_insn ();
3274 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3275 MODE is known to be complex. Returns the last instruction emitted. */
3277 static rtx_insn *
3278 emit_move_complex (machine_mode mode, rtx x, rtx y)
3280 bool try_int;
3282 /* Need to take special care for pushes, to maintain proper ordering
3283 of the data, and possibly extra padding. */
3284 if (push_operand (x, mode))
3285 return emit_move_complex_push (mode, x, y);
3287 /* See if we can coerce the target into moving both values at once, except
3288 for floating point where we favor moving as parts if this is easy. */
3289 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3290 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3291 && !(REG_P (x)
3292 && HARD_REGISTER_P (x)
3293 && REG_NREGS (x) == 1)
3294 && !(REG_P (y)
3295 && HARD_REGISTER_P (y)
3296 && REG_NREGS (y) == 1))
3297 try_int = false;
3298 /* Not possible if the values are inherently not adjacent. */
3299 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3300 try_int = false;
3301 /* Is possible if both are registers (or subregs of registers). */
3302 else if (register_operand (x, mode) && register_operand (y, mode))
3303 try_int = true;
3304 /* If one of the operands is a memory, and alignment constraints
3305 are friendly enough, we may be able to do combined memory operations.
3306 We do not attempt this if Y is a constant because that combination is
3307 usually better with the by-parts thing below. */
3308 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3309 && (!STRICT_ALIGNMENT
3310 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3311 try_int = true;
3312 else
3313 try_int = false;
3315 if (try_int)
3317 rtx_insn *ret;
3319 /* For memory to memory moves, optimal behavior can be had with the
3320 existing block move logic. */
3321 if (MEM_P (x) && MEM_P (y))
3323 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3324 BLOCK_OP_NO_LIBCALL);
3325 return get_last_insn ();
3328 ret = emit_move_via_integer (mode, x, y, true);
3329 if (ret)
3330 return ret;
3333 return emit_move_complex_parts (x, y);
3336 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3337 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3339 static rtx_insn *
3340 emit_move_ccmode (machine_mode mode, rtx x, rtx y)
3342 rtx_insn *ret;
3344 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3345 if (mode != CCmode)
3347 enum insn_code code = optab_handler (mov_optab, CCmode);
3348 if (code != CODE_FOR_nothing)
3350 x = emit_move_change_mode (CCmode, mode, x, true);
3351 y = emit_move_change_mode (CCmode, mode, y, true);
3352 return emit_insn (GEN_FCN (code) (x, y));
3356 /* Otherwise, find the MODE_INT mode of the same width. */
3357 ret = emit_move_via_integer (mode, x, y, false);
3358 gcc_assert (ret != NULL);
3359 return ret;
3362 /* Return true if word I of OP lies entirely in the
3363 undefined bits of a paradoxical subreg. */
3365 static bool
3366 undefined_operand_subword_p (const_rtx op, int i)
3368 machine_mode innermode, innermostmode;
3369 int offset;
3370 if (GET_CODE (op) != SUBREG)
3371 return false;
3372 innermode = GET_MODE (op);
3373 innermostmode = GET_MODE (SUBREG_REG (op));
3374 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3375 /* The SUBREG_BYTE represents offset, as if the value were stored in
3376 memory, except for a paradoxical subreg where we define
3377 SUBREG_BYTE to be 0; undo this exception as in
3378 simplify_subreg. */
3379 if (SUBREG_BYTE (op) == 0
3380 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3382 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3383 if (WORDS_BIG_ENDIAN)
3384 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3385 if (BYTES_BIG_ENDIAN)
3386 offset += difference % UNITS_PER_WORD;
3388 if (offset >= GET_MODE_SIZE (innermostmode)
3389 || offset <= -GET_MODE_SIZE (word_mode))
3390 return true;
3391 return false;
3394 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3395 MODE is any multi-word or full-word mode that lacks a move_insn
3396 pattern. Note that you will get better code if you define such
3397 patterns, even if they must turn into multiple assembler instructions. */
3399 static rtx_insn *
3400 emit_move_multi_word (machine_mode mode, rtx x, rtx y)
3402 rtx_insn *last_insn = 0;
3403 rtx_insn *seq;
3404 rtx inner;
3405 bool need_clobber;
3406 int i;
3408 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3410 /* If X is a push on the stack, do the push now and replace
3411 X with a reference to the stack pointer. */
3412 if (push_operand (x, mode))
3413 x = emit_move_resolve_push (mode, x);
3415 /* If we are in reload, see if either operand is a MEM whose address
3416 is scheduled for replacement. */
3417 if (reload_in_progress && MEM_P (x)
3418 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3419 x = replace_equiv_address_nv (x, inner);
3420 if (reload_in_progress && MEM_P (y)
3421 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3422 y = replace_equiv_address_nv (y, inner);
3424 start_sequence ();
3426 need_clobber = false;
3427 for (i = 0;
3428 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3429 i++)
3431 rtx xpart = operand_subword (x, i, 1, mode);
3432 rtx ypart;
3434 /* Do not generate code for a move if it would come entirely
3435 from the undefined bits of a paradoxical subreg. */
3436 if (undefined_operand_subword_p (y, i))
3437 continue;
3439 ypart = operand_subword (y, i, 1, mode);
3441 /* If we can't get a part of Y, put Y into memory if it is a
3442 constant. Otherwise, force it into a register. Then we must
3443 be able to get a part of Y. */
3444 if (ypart == 0 && CONSTANT_P (y))
3446 y = use_anchored_address (force_const_mem (mode, y));
3447 ypart = operand_subword (y, i, 1, mode);
3449 else if (ypart == 0)
3450 ypart = operand_subword_force (y, i, mode);
3452 gcc_assert (xpart && ypart);
3454 need_clobber |= (GET_CODE (xpart) == SUBREG);
3456 last_insn = emit_move_insn (xpart, ypart);
3459 seq = get_insns ();
3460 end_sequence ();
3462 /* Show the output dies here. This is necessary for SUBREGs
3463 of pseudos since we cannot track their lifetimes correctly;
3464 hard regs shouldn't appear here except as return values.
3465 We never want to emit such a clobber after reload. */
3466 if (x != y
3467 && ! (reload_in_progress || reload_completed)
3468 && need_clobber != 0)
3469 emit_clobber (x);
3471 emit_insn (seq);
3473 return last_insn;
3476 /* Low level part of emit_move_insn.
3477 Called just like emit_move_insn, but assumes X and Y
3478 are basically valid. */
3480 rtx_insn *
3481 emit_move_insn_1 (rtx x, rtx y)
3483 machine_mode mode = GET_MODE (x);
3484 enum insn_code code;
3486 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3488 code = optab_handler (mov_optab, mode);
3489 if (code != CODE_FOR_nothing)
3490 return emit_insn (GEN_FCN (code) (x, y));
3492 /* Expand complex moves by moving real part and imag part. */
3493 if (COMPLEX_MODE_P (mode))
3494 return emit_move_complex (mode, x, y);
3496 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3497 || ALL_FIXED_POINT_MODE_P (mode))
3499 rtx_insn *result = emit_move_via_integer (mode, x, y, true);
3501 /* If we can't find an integer mode, use multi words. */
3502 if (result)
3503 return result;
3504 else
3505 return emit_move_multi_word (mode, x, y);
3508 if (GET_MODE_CLASS (mode) == MODE_CC)
3509 return emit_move_ccmode (mode, x, y);
3511 /* Try using a move pattern for the corresponding integer mode. This is
3512 only safe when simplify_subreg can convert MODE constants into integer
3513 constants. At present, it can only do this reliably if the value
3514 fits within a HOST_WIDE_INT. */
3515 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3517 rtx_insn *ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3519 if (ret)
3521 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3522 return ret;
3526 return emit_move_multi_word (mode, x, y);
3529 /* Generate code to copy Y into X.
3530 Both Y and X must have the same mode, except that
3531 Y can be a constant with VOIDmode.
3532 This mode cannot be BLKmode; use emit_block_move for that.
3534 Return the last instruction emitted. */
3536 rtx_insn *
3537 emit_move_insn (rtx x, rtx y)
3539 machine_mode mode = GET_MODE (x);
3540 rtx y_cst = NULL_RTX;
3541 rtx_insn *last_insn;
3542 rtx set;
3544 gcc_assert (mode != BLKmode
3545 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3547 if (CONSTANT_P (y))
3549 if (optimize
3550 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3551 && (last_insn = compress_float_constant (x, y)))
3552 return last_insn;
3554 y_cst = y;
3556 if (!targetm.legitimate_constant_p (mode, y))
3558 y = force_const_mem (mode, y);
3560 /* If the target's cannot_force_const_mem prevented the spill,
3561 assume that the target's move expanders will also take care
3562 of the non-legitimate constant. */
3563 if (!y)
3564 y = y_cst;
3565 else
3566 y = use_anchored_address (y);
3570 /* If X or Y are memory references, verify that their addresses are valid
3571 for the machine. */
3572 if (MEM_P (x)
3573 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3574 MEM_ADDR_SPACE (x))
3575 && ! push_operand (x, GET_MODE (x))))
3576 x = validize_mem (x);
3578 if (MEM_P (y)
3579 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3580 MEM_ADDR_SPACE (y)))
3581 y = validize_mem (y);
3583 gcc_assert (mode != BLKmode);
3585 last_insn = emit_move_insn_1 (x, y);
3587 if (y_cst && REG_P (x)
3588 && (set = single_set (last_insn)) != NULL_RTX
3589 && SET_DEST (set) == x
3590 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3591 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3593 return last_insn;
3596 /* Generate the body of an instruction to copy Y into X.
3597 It may be a list of insns, if one insn isn't enough. */
3599 rtx_insn *
3600 gen_move_insn (rtx x, rtx y)
3602 rtx_insn *seq;
3604 start_sequence ();
3605 emit_move_insn_1 (x, y);
3606 seq = get_insns ();
3607 end_sequence ();
3608 return seq;
3611 /* If Y is representable exactly in a narrower mode, and the target can
3612 perform the extension directly from constant or memory, then emit the
3613 move as an extension. */
3615 static rtx_insn *
3616 compress_float_constant (rtx x, rtx y)
3618 machine_mode dstmode = GET_MODE (x);
3619 machine_mode orig_srcmode = GET_MODE (y);
3620 machine_mode srcmode;
3621 const REAL_VALUE_TYPE *r;
3622 int oldcost, newcost;
3623 bool speed = optimize_insn_for_speed_p ();
3625 r = CONST_DOUBLE_REAL_VALUE (y);
3627 if (targetm.legitimate_constant_p (dstmode, y))
3628 oldcost = set_src_cost (y, orig_srcmode, speed);
3629 else
3630 oldcost = set_src_cost (force_const_mem (dstmode, y), dstmode, speed);
3632 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3633 srcmode != orig_srcmode;
3634 srcmode = GET_MODE_WIDER_MODE (srcmode))
3636 enum insn_code ic;
3637 rtx trunc_y;
3638 rtx_insn *last_insn;
3640 /* Skip if the target can't extend this way. */
3641 ic = can_extend_p (dstmode, srcmode, 0);
3642 if (ic == CODE_FOR_nothing)
3643 continue;
3645 /* Skip if the narrowed value isn't exact. */
3646 if (! exact_real_truncate (srcmode, r))
3647 continue;
3649 trunc_y = const_double_from_real_value (*r, srcmode);
3651 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3653 /* Skip if the target needs extra instructions to perform
3654 the extension. */
3655 if (!insn_operand_matches (ic, 1, trunc_y))
3656 continue;
3657 /* This is valid, but may not be cheaper than the original. */
3658 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3659 dstmode, speed);
3660 if (oldcost < newcost)
3661 continue;
3663 else if (float_extend_from_mem[dstmode][srcmode])
3665 trunc_y = force_const_mem (srcmode, trunc_y);
3666 /* This is valid, but may not be cheaper than the original. */
3667 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3668 dstmode, speed);
3669 if (oldcost < newcost)
3670 continue;
3671 trunc_y = validize_mem (trunc_y);
3673 else
3674 continue;
3676 /* For CSE's benefit, force the compressed constant pool entry
3677 into a new pseudo. This constant may be used in different modes,
3678 and if not, combine will put things back together for us. */
3679 trunc_y = force_reg (srcmode, trunc_y);
3681 /* If x is a hard register, perform the extension into a pseudo,
3682 so that e.g. stack realignment code is aware of it. */
3683 rtx target = x;
3684 if (REG_P (x) && HARD_REGISTER_P (x))
3685 target = gen_reg_rtx (dstmode);
3687 emit_unop_insn (ic, target, trunc_y, UNKNOWN);
3688 last_insn = get_last_insn ();
3690 if (REG_P (target))
3691 set_unique_reg_note (last_insn, REG_EQUAL, y);
3693 if (target != x)
3694 return emit_move_insn (x, target);
3695 return last_insn;
3698 return NULL;
3701 /* Pushing data onto the stack. */
3703 /* Push a block of length SIZE (perhaps variable)
3704 and return an rtx to address the beginning of the block.
3705 The value may be virtual_outgoing_args_rtx.
3707 EXTRA is the number of bytes of padding to push in addition to SIZE.
3708 BELOW nonzero means this padding comes at low addresses;
3709 otherwise, the padding comes at high addresses. */
3712 push_block (rtx size, int extra, int below)
3714 rtx temp;
3716 size = convert_modes (Pmode, ptr_mode, size, 1);
3717 if (CONSTANT_P (size))
3718 anti_adjust_stack (plus_constant (Pmode, size, extra));
3719 else if (REG_P (size) && extra == 0)
3720 anti_adjust_stack (size);
3721 else
3723 temp = copy_to_mode_reg (Pmode, size);
3724 if (extra != 0)
3725 temp = expand_binop (Pmode, add_optab, temp,
3726 gen_int_mode (extra, Pmode),
3727 temp, 0, OPTAB_LIB_WIDEN);
3728 anti_adjust_stack (temp);
3731 if (STACK_GROWS_DOWNWARD)
3733 temp = virtual_outgoing_args_rtx;
3734 if (extra != 0 && below)
3735 temp = plus_constant (Pmode, temp, extra);
3737 else
3739 if (CONST_INT_P (size))
3740 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3741 -INTVAL (size) - (below ? 0 : extra));
3742 else if (extra != 0 && !below)
3743 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3744 negate_rtx (Pmode, plus_constant (Pmode, size,
3745 extra)));
3746 else
3747 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3748 negate_rtx (Pmode, size));
3751 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3754 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3756 static rtx
3757 mem_autoinc_base (rtx mem)
3759 if (MEM_P (mem))
3761 rtx addr = XEXP (mem, 0);
3762 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3763 return XEXP (addr, 0);
3765 return NULL;
3768 /* A utility routine used here, in reload, and in try_split. The insns
3769 after PREV up to and including LAST are known to adjust the stack,
3770 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3771 placing notes as appropriate. PREV may be NULL, indicating the
3772 entire insn sequence prior to LAST should be scanned.
3774 The set of allowed stack pointer modifications is small:
3775 (1) One or more auto-inc style memory references (aka pushes),
3776 (2) One or more addition/subtraction with the SP as destination,
3777 (3) A single move insn with the SP as destination,
3778 (4) A call_pop insn,
3779 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3781 Insns in the sequence that do not modify the SP are ignored,
3782 except for noreturn calls.
3784 The return value is the amount of adjustment that can be trivially
3785 verified, via immediate operand or auto-inc. If the adjustment
3786 cannot be trivially extracted, the return value is INT_MIN. */
3788 HOST_WIDE_INT
3789 find_args_size_adjust (rtx_insn *insn)
3791 rtx dest, set, pat;
3792 int i;
3794 pat = PATTERN (insn);
3795 set = NULL;
3797 /* Look for a call_pop pattern. */
3798 if (CALL_P (insn))
3800 /* We have to allow non-call_pop patterns for the case
3801 of emit_single_push_insn of a TLS address. */
3802 if (GET_CODE (pat) != PARALLEL)
3803 return 0;
3805 /* All call_pop have a stack pointer adjust in the parallel.
3806 The call itself is always first, and the stack adjust is
3807 usually last, so search from the end. */
3808 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3810 set = XVECEXP (pat, 0, i);
3811 if (GET_CODE (set) != SET)
3812 continue;
3813 dest = SET_DEST (set);
3814 if (dest == stack_pointer_rtx)
3815 break;
3817 /* We'd better have found the stack pointer adjust. */
3818 if (i == 0)
3819 return 0;
3820 /* Fall through to process the extracted SET and DEST
3821 as if it was a standalone insn. */
3823 else if (GET_CODE (pat) == SET)
3824 set = pat;
3825 else if ((set = single_set (insn)) != NULL)
3827 else if (GET_CODE (pat) == PARALLEL)
3829 /* ??? Some older ports use a parallel with a stack adjust
3830 and a store for a PUSH_ROUNDING pattern, rather than a
3831 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3832 /* ??? See h8300 and m68k, pushqi1. */
3833 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3835 set = XVECEXP (pat, 0, i);
3836 if (GET_CODE (set) != SET)
3837 continue;
3838 dest = SET_DEST (set);
3839 if (dest == stack_pointer_rtx)
3840 break;
3842 /* We do not expect an auto-inc of the sp in the parallel. */
3843 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3844 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3845 != stack_pointer_rtx);
3847 if (i < 0)
3848 return 0;
3850 else
3851 return 0;
3853 dest = SET_DEST (set);
3855 /* Look for direct modifications of the stack pointer. */
3856 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3858 /* Look for a trivial adjustment, otherwise assume nothing. */
3859 /* Note that the SPU restore_stack_block pattern refers to
3860 the stack pointer in V4SImode. Consider that non-trivial. */
3861 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3862 && GET_CODE (SET_SRC (set)) == PLUS
3863 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3864 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3865 return INTVAL (XEXP (SET_SRC (set), 1));
3866 /* ??? Reload can generate no-op moves, which will be cleaned
3867 up later. Recognize it and continue searching. */
3868 else if (rtx_equal_p (dest, SET_SRC (set)))
3869 return 0;
3870 else
3871 return HOST_WIDE_INT_MIN;
3873 else
3875 rtx mem, addr;
3877 /* Otherwise only think about autoinc patterns. */
3878 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3880 mem = dest;
3881 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3882 != stack_pointer_rtx);
3884 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3885 mem = SET_SRC (set);
3886 else
3887 return 0;
3889 addr = XEXP (mem, 0);
3890 switch (GET_CODE (addr))
3892 case PRE_INC:
3893 case POST_INC:
3894 return GET_MODE_SIZE (GET_MODE (mem));
3895 case PRE_DEC:
3896 case POST_DEC:
3897 return -GET_MODE_SIZE (GET_MODE (mem));
3898 case PRE_MODIFY:
3899 case POST_MODIFY:
3900 addr = XEXP (addr, 1);
3901 gcc_assert (GET_CODE (addr) == PLUS);
3902 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3903 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3904 return INTVAL (XEXP (addr, 1));
3905 default:
3906 gcc_unreachable ();
3912 fixup_args_size_notes (rtx_insn *prev, rtx_insn *last, int end_args_size)
3914 int args_size = end_args_size;
3915 bool saw_unknown = false;
3916 rtx_insn *insn;
3918 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3920 HOST_WIDE_INT this_delta;
3922 if (!NONDEBUG_INSN_P (insn))
3923 continue;
3925 this_delta = find_args_size_adjust (insn);
3926 if (this_delta == 0)
3928 if (!CALL_P (insn)
3929 || ACCUMULATE_OUTGOING_ARGS
3930 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3931 continue;
3934 gcc_assert (!saw_unknown);
3935 if (this_delta == HOST_WIDE_INT_MIN)
3936 saw_unknown = true;
3938 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3939 if (STACK_GROWS_DOWNWARD)
3940 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
3942 args_size -= this_delta;
3945 return saw_unknown ? INT_MIN : args_size;
3948 #ifdef PUSH_ROUNDING
3949 /* Emit single push insn. */
3951 static void
3952 emit_single_push_insn_1 (machine_mode mode, rtx x, tree type)
3954 rtx dest_addr;
3955 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3956 rtx dest;
3957 enum insn_code icode;
3959 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3960 /* If there is push pattern, use it. Otherwise try old way of throwing
3961 MEM representing push operation to move expander. */
3962 icode = optab_handler (push_optab, mode);
3963 if (icode != CODE_FOR_nothing)
3965 struct expand_operand ops[1];
3967 create_input_operand (&ops[0], x, mode);
3968 if (maybe_expand_insn (icode, 1, ops))
3969 return;
3971 if (GET_MODE_SIZE (mode) == rounded_size)
3972 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3973 /* If we are to pad downward, adjust the stack pointer first and
3974 then store X into the stack location using an offset. This is
3975 because emit_move_insn does not know how to pad; it does not have
3976 access to type. */
3977 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3979 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3980 HOST_WIDE_INT offset;
3982 emit_move_insn (stack_pointer_rtx,
3983 expand_binop (Pmode,
3984 STACK_GROWS_DOWNWARD ? sub_optab
3985 : add_optab,
3986 stack_pointer_rtx,
3987 gen_int_mode (rounded_size, Pmode),
3988 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3990 offset = (HOST_WIDE_INT) padding_size;
3991 if (STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_DEC)
3992 /* We have already decremented the stack pointer, so get the
3993 previous value. */
3994 offset += (HOST_WIDE_INT) rounded_size;
3996 if (!STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_INC)
3997 /* We have already incremented the stack pointer, so get the
3998 previous value. */
3999 offset -= (HOST_WIDE_INT) rounded_size;
4001 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4002 gen_int_mode (offset, Pmode));
4004 else
4006 if (STACK_GROWS_DOWNWARD)
4007 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4008 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4009 gen_int_mode (-(HOST_WIDE_INT) rounded_size,
4010 Pmode));
4011 else
4012 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4013 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4014 gen_int_mode (rounded_size, Pmode));
4016 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4019 dest = gen_rtx_MEM (mode, dest_addr);
4021 if (type != 0)
4023 set_mem_attributes (dest, type, 1);
4025 if (cfun->tail_call_marked)
4026 /* Function incoming arguments may overlap with sibling call
4027 outgoing arguments and we cannot allow reordering of reads
4028 from function arguments with stores to outgoing arguments
4029 of sibling calls. */
4030 set_mem_alias_set (dest, 0);
4032 emit_move_insn (dest, x);
4035 /* Emit and annotate a single push insn. */
4037 static void
4038 emit_single_push_insn (machine_mode mode, rtx x, tree type)
4040 int delta, old_delta = stack_pointer_delta;
4041 rtx_insn *prev = get_last_insn ();
4042 rtx_insn *last;
4044 emit_single_push_insn_1 (mode, x, type);
4046 last = get_last_insn ();
4048 /* Notice the common case where we emitted exactly one insn. */
4049 if (PREV_INSN (last) == prev)
4051 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4052 return;
4055 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4056 gcc_assert (delta == INT_MIN || delta == old_delta);
4058 #endif
4060 /* If reading SIZE bytes from X will end up reading from
4061 Y return the number of bytes that overlap. Return -1
4062 if there is no overlap or -2 if we can't determine
4063 (for example when X and Y have different base registers). */
4065 static int
4066 memory_load_overlap (rtx x, rtx y, HOST_WIDE_INT size)
4068 rtx tmp = plus_constant (Pmode, x, size);
4069 rtx sub = simplify_gen_binary (MINUS, Pmode, tmp, y);
4071 if (!CONST_INT_P (sub))
4072 return -2;
4074 HOST_WIDE_INT val = INTVAL (sub);
4076 return IN_RANGE (val, 1, size) ? val : -1;
4079 /* Generate code to push X onto the stack, assuming it has mode MODE and
4080 type TYPE.
4081 MODE is redundant except when X is a CONST_INT (since they don't
4082 carry mode info).
4083 SIZE is an rtx for the size of data to be copied (in bytes),
4084 needed only if X is BLKmode.
4085 Return true if successful. May return false if asked to push a
4086 partial argument during a sibcall optimization (as specified by
4087 SIBCALL_P) and the incoming and outgoing pointers cannot be shown
4088 to not overlap.
4090 ALIGN (in bits) is maximum alignment we can assume.
4092 If PARTIAL and REG are both nonzero, then copy that many of the first
4093 bytes of X into registers starting with REG, and push the rest of X.
4094 The amount of space pushed is decreased by PARTIAL bytes.
4095 REG must be a hard register in this case.
4096 If REG is zero but PARTIAL is not, take any all others actions for an
4097 argument partially in registers, but do not actually load any
4098 registers.
4100 EXTRA is the amount in bytes of extra space to leave next to this arg.
4101 This is ignored if an argument block has already been allocated.
4103 On a machine that lacks real push insns, ARGS_ADDR is the address of
4104 the bottom of the argument block for this call. We use indexing off there
4105 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4106 argument block has not been preallocated.
4108 ARGS_SO_FAR is the size of args previously pushed for this call.
4110 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4111 for arguments passed in registers. If nonzero, it will be the number
4112 of bytes required. */
4114 bool
4115 emit_push_insn (rtx x, machine_mode mode, tree type, rtx size,
4116 unsigned int align, int partial, rtx reg, int extra,
4117 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4118 rtx alignment_pad, bool sibcall_p)
4120 rtx xinner;
4121 enum direction stack_direction = STACK_GROWS_DOWNWARD ? downward : upward;
4123 /* Decide where to pad the argument: `downward' for below,
4124 `upward' for above, or `none' for don't pad it.
4125 Default is below for small data on big-endian machines; else above. */
4126 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4128 /* Invert direction if stack is post-decrement.
4129 FIXME: why? */
4130 if (STACK_PUSH_CODE == POST_DEC)
4131 if (where_pad != none)
4132 where_pad = (where_pad == downward ? upward : downward);
4134 xinner = x;
4136 int nregs = partial / UNITS_PER_WORD;
4137 rtx *tmp_regs = NULL;
4138 int overlapping = 0;
4140 if (mode == BLKmode
4141 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4143 /* Copy a block into the stack, entirely or partially. */
4145 rtx temp;
4146 int used;
4147 int offset;
4148 int skip;
4150 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4151 used = partial - offset;
4153 if (mode != BLKmode)
4155 /* A value is to be stored in an insufficiently aligned
4156 stack slot; copy via a suitably aligned slot if
4157 necessary. */
4158 size = GEN_INT (GET_MODE_SIZE (mode));
4159 if (!MEM_P (xinner))
4161 temp = assign_temp (type, 1, 1);
4162 emit_move_insn (temp, xinner);
4163 xinner = temp;
4167 gcc_assert (size);
4169 /* USED is now the # of bytes we need not copy to the stack
4170 because registers will take care of them. */
4172 if (partial != 0)
4173 xinner = adjust_address (xinner, BLKmode, used);
4175 /* If the partial register-part of the arg counts in its stack size,
4176 skip the part of stack space corresponding to the registers.
4177 Otherwise, start copying to the beginning of the stack space,
4178 by setting SKIP to 0. */
4179 skip = (reg_parm_stack_space == 0) ? 0 : used;
4181 #ifdef PUSH_ROUNDING
4182 /* Do it with several push insns if that doesn't take lots of insns
4183 and if there is no difficulty with push insns that skip bytes
4184 on the stack for alignment purposes. */
4185 if (args_addr == 0
4186 && PUSH_ARGS
4187 && CONST_INT_P (size)
4188 && skip == 0
4189 && MEM_ALIGN (xinner) >= align
4190 && can_move_by_pieces ((unsigned) INTVAL (size) - used, align)
4191 /* Here we avoid the case of a structure whose weak alignment
4192 forces many pushes of a small amount of data,
4193 and such small pushes do rounding that causes trouble. */
4194 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4195 || align >= BIGGEST_ALIGNMENT
4196 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4197 == (align / BITS_PER_UNIT)))
4198 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4200 /* Push padding now if padding above and stack grows down,
4201 or if padding below and stack grows up.
4202 But if space already allocated, this has already been done. */
4203 if (extra && args_addr == 0
4204 && where_pad != none && where_pad != stack_direction)
4205 anti_adjust_stack (GEN_INT (extra));
4207 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4209 else
4210 #endif /* PUSH_ROUNDING */
4212 rtx target;
4214 /* Otherwise make space on the stack and copy the data
4215 to the address of that space. */
4217 /* Deduct words put into registers from the size we must copy. */
4218 if (partial != 0)
4220 if (CONST_INT_P (size))
4221 size = GEN_INT (INTVAL (size) - used);
4222 else
4223 size = expand_binop (GET_MODE (size), sub_optab, size,
4224 gen_int_mode (used, GET_MODE (size)),
4225 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4228 /* Get the address of the stack space.
4229 In this case, we do not deal with EXTRA separately.
4230 A single stack adjust will do. */
4231 if (! args_addr)
4233 temp = push_block (size, extra, where_pad == downward);
4234 extra = 0;
4236 else if (CONST_INT_P (args_so_far))
4237 temp = memory_address (BLKmode,
4238 plus_constant (Pmode, args_addr,
4239 skip + INTVAL (args_so_far)));
4240 else
4241 temp = memory_address (BLKmode,
4242 plus_constant (Pmode,
4243 gen_rtx_PLUS (Pmode,
4244 args_addr,
4245 args_so_far),
4246 skip));
4248 if (!ACCUMULATE_OUTGOING_ARGS)
4250 /* If the source is referenced relative to the stack pointer,
4251 copy it to another register to stabilize it. We do not need
4252 to do this if we know that we won't be changing sp. */
4254 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4255 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4256 temp = copy_to_reg (temp);
4259 target = gen_rtx_MEM (BLKmode, temp);
4261 /* We do *not* set_mem_attributes here, because incoming arguments
4262 may overlap with sibling call outgoing arguments and we cannot
4263 allow reordering of reads from function arguments with stores
4264 to outgoing arguments of sibling calls. We do, however, want
4265 to record the alignment of the stack slot. */
4266 /* ALIGN may well be better aligned than TYPE, e.g. due to
4267 PARM_BOUNDARY. Assume the caller isn't lying. */
4268 set_mem_align (target, align);
4270 /* If part should go in registers and pushing to that part would
4271 overwrite some of the values that need to go into regs, load the
4272 overlapping values into temporary pseudos to be moved into the hard
4273 regs at the end after the stack pushing has completed.
4274 We cannot load them directly into the hard regs here because
4275 they can be clobbered by the block move expansions.
4276 See PR 65358. */
4278 if (partial > 0 && reg != 0 && mode == BLKmode
4279 && GET_CODE (reg) != PARALLEL)
4281 overlapping = memory_load_overlap (XEXP (x, 0), temp, partial);
4282 if (overlapping > 0)
4284 gcc_assert (overlapping % UNITS_PER_WORD == 0);
4285 overlapping /= UNITS_PER_WORD;
4287 tmp_regs = XALLOCAVEC (rtx, overlapping);
4289 for (int i = 0; i < overlapping; i++)
4290 tmp_regs[i] = gen_reg_rtx (word_mode);
4292 for (int i = 0; i < overlapping; i++)
4293 emit_move_insn (tmp_regs[i],
4294 operand_subword_force (target, i, mode));
4296 else if (overlapping == -1)
4297 overlapping = 0;
4298 /* Could not determine whether there is overlap.
4299 Fail the sibcall. */
4300 else
4302 overlapping = 0;
4303 if (sibcall_p)
4304 return false;
4307 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4310 else if (partial > 0)
4312 /* Scalar partly in registers. */
4314 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4315 int i;
4316 int not_stack;
4317 /* # bytes of start of argument
4318 that we must make space for but need not store. */
4319 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4320 int args_offset = INTVAL (args_so_far);
4321 int skip;
4323 /* Push padding now if padding above and stack grows down,
4324 or if padding below and stack grows up.
4325 But if space already allocated, this has already been done. */
4326 if (extra && args_addr == 0
4327 && where_pad != none && where_pad != stack_direction)
4328 anti_adjust_stack (GEN_INT (extra));
4330 /* If we make space by pushing it, we might as well push
4331 the real data. Otherwise, we can leave OFFSET nonzero
4332 and leave the space uninitialized. */
4333 if (args_addr == 0)
4334 offset = 0;
4336 /* Now NOT_STACK gets the number of words that we don't need to
4337 allocate on the stack. Convert OFFSET to words too. */
4338 not_stack = (partial - offset) / UNITS_PER_WORD;
4339 offset /= UNITS_PER_WORD;
4341 /* If the partial register-part of the arg counts in its stack size,
4342 skip the part of stack space corresponding to the registers.
4343 Otherwise, start copying to the beginning of the stack space,
4344 by setting SKIP to 0. */
4345 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4347 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4348 x = validize_mem (force_const_mem (mode, x));
4350 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4351 SUBREGs of such registers are not allowed. */
4352 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4353 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4354 x = copy_to_reg (x);
4356 /* Loop over all the words allocated on the stack for this arg. */
4357 /* We can do it by words, because any scalar bigger than a word
4358 has a size a multiple of a word. */
4359 for (i = size - 1; i >= not_stack; i--)
4360 if (i >= not_stack + offset)
4361 if (!emit_push_insn (operand_subword_force (x, i, mode),
4362 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4363 0, args_addr,
4364 GEN_INT (args_offset + ((i - not_stack + skip)
4365 * UNITS_PER_WORD)),
4366 reg_parm_stack_space, alignment_pad, sibcall_p))
4367 return false;
4369 else
4371 rtx addr;
4372 rtx dest;
4374 /* Push padding now if padding above and stack grows down,
4375 or if padding below and stack grows up.
4376 But if space already allocated, this has already been done. */
4377 if (extra && args_addr == 0
4378 && where_pad != none && where_pad != stack_direction)
4379 anti_adjust_stack (GEN_INT (extra));
4381 #ifdef PUSH_ROUNDING
4382 if (args_addr == 0 && PUSH_ARGS)
4383 emit_single_push_insn (mode, x, type);
4384 else
4385 #endif
4387 if (CONST_INT_P (args_so_far))
4388 addr
4389 = memory_address (mode,
4390 plus_constant (Pmode, args_addr,
4391 INTVAL (args_so_far)));
4392 else
4393 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4394 args_so_far));
4395 dest = gen_rtx_MEM (mode, addr);
4397 /* We do *not* set_mem_attributes here, because incoming arguments
4398 may overlap with sibling call outgoing arguments and we cannot
4399 allow reordering of reads from function arguments with stores
4400 to outgoing arguments of sibling calls. We do, however, want
4401 to record the alignment of the stack slot. */
4402 /* ALIGN may well be better aligned than TYPE, e.g. due to
4403 PARM_BOUNDARY. Assume the caller isn't lying. */
4404 set_mem_align (dest, align);
4406 emit_move_insn (dest, x);
4410 /* Move the partial arguments into the registers and any overlapping
4411 values that we moved into the pseudos in tmp_regs. */
4412 if (partial > 0 && reg != 0)
4414 /* Handle calls that pass values in multiple non-contiguous locations.
4415 The Irix 6 ABI has examples of this. */
4416 if (GET_CODE (reg) == PARALLEL)
4417 emit_group_load (reg, x, type, -1);
4418 else
4420 gcc_assert (partial % UNITS_PER_WORD == 0);
4421 move_block_to_reg (REGNO (reg), x, nregs - overlapping, mode);
4423 for (int i = 0; i < overlapping; i++)
4424 emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg)
4425 + nregs - overlapping + i),
4426 tmp_regs[i]);
4431 if (extra && args_addr == 0 && where_pad == stack_direction)
4432 anti_adjust_stack (GEN_INT (extra));
4434 if (alignment_pad && args_addr == 0)
4435 anti_adjust_stack (alignment_pad);
4437 return true;
4440 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4441 operations. */
4443 static rtx
4444 get_subtarget (rtx x)
4446 return (optimize
4447 || x == 0
4448 /* Only registers can be subtargets. */
4449 || !REG_P (x)
4450 /* Don't use hard regs to avoid extending their life. */
4451 || REGNO (x) < FIRST_PSEUDO_REGISTER
4452 ? 0 : x);
4455 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4456 FIELD is a bitfield. Returns true if the optimization was successful,
4457 and there's nothing else to do. */
4459 static bool
4460 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4461 unsigned HOST_WIDE_INT bitpos,
4462 unsigned HOST_WIDE_INT bitregion_start,
4463 unsigned HOST_WIDE_INT bitregion_end,
4464 machine_mode mode1, rtx str_rtx,
4465 tree to, tree src, bool reverse)
4467 machine_mode str_mode = GET_MODE (str_rtx);
4468 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4469 tree op0, op1;
4470 rtx value, result;
4471 optab binop;
4472 gimple *srcstmt;
4473 enum tree_code code;
4475 if (mode1 != VOIDmode
4476 || bitsize >= BITS_PER_WORD
4477 || str_bitsize > BITS_PER_WORD
4478 || TREE_SIDE_EFFECTS (to)
4479 || TREE_THIS_VOLATILE (to))
4480 return false;
4482 STRIP_NOPS (src);
4483 if (TREE_CODE (src) != SSA_NAME)
4484 return false;
4485 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4486 return false;
4488 srcstmt = get_gimple_for_ssa_name (src);
4489 if (!srcstmt
4490 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4491 return false;
4493 code = gimple_assign_rhs_code (srcstmt);
4495 op0 = gimple_assign_rhs1 (srcstmt);
4497 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4498 to find its initialization. Hopefully the initialization will
4499 be from a bitfield load. */
4500 if (TREE_CODE (op0) == SSA_NAME)
4502 gimple *op0stmt = get_gimple_for_ssa_name (op0);
4504 /* We want to eventually have OP0 be the same as TO, which
4505 should be a bitfield. */
4506 if (!op0stmt
4507 || !is_gimple_assign (op0stmt)
4508 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4509 return false;
4510 op0 = gimple_assign_rhs1 (op0stmt);
4513 op1 = gimple_assign_rhs2 (srcstmt);
4515 if (!operand_equal_p (to, op0, 0))
4516 return false;
4518 if (MEM_P (str_rtx))
4520 unsigned HOST_WIDE_INT offset1;
4522 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4523 str_mode = word_mode;
4524 str_mode = get_best_mode (bitsize, bitpos,
4525 bitregion_start, bitregion_end,
4526 MEM_ALIGN (str_rtx), str_mode, 0);
4527 if (str_mode == VOIDmode)
4528 return false;
4529 str_bitsize = GET_MODE_BITSIZE (str_mode);
4531 offset1 = bitpos;
4532 bitpos %= str_bitsize;
4533 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4534 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4536 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4537 return false;
4538 else
4539 gcc_assert (!reverse);
4541 /* If the bit field covers the whole REG/MEM, store_field
4542 will likely generate better code. */
4543 if (bitsize >= str_bitsize)
4544 return false;
4546 /* We can't handle fields split across multiple entities. */
4547 if (bitpos + bitsize > str_bitsize)
4548 return false;
4550 if (reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4551 bitpos = str_bitsize - bitpos - bitsize;
4553 switch (code)
4555 case PLUS_EXPR:
4556 case MINUS_EXPR:
4557 /* For now, just optimize the case of the topmost bitfield
4558 where we don't need to do any masking and also
4559 1 bit bitfields where xor can be used.
4560 We might win by one instruction for the other bitfields
4561 too if insv/extv instructions aren't used, so that
4562 can be added later. */
4563 if ((reverse || bitpos + bitsize != str_bitsize)
4564 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4565 break;
4567 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4568 value = convert_modes (str_mode,
4569 TYPE_MODE (TREE_TYPE (op1)), value,
4570 TYPE_UNSIGNED (TREE_TYPE (op1)));
4572 /* We may be accessing data outside the field, which means
4573 we can alias adjacent data. */
4574 if (MEM_P (str_rtx))
4576 str_rtx = shallow_copy_rtx (str_rtx);
4577 set_mem_alias_set (str_rtx, 0);
4578 set_mem_expr (str_rtx, 0);
4581 if (bitsize == 1 && (reverse || bitpos + bitsize != str_bitsize))
4583 value = expand_and (str_mode, value, const1_rtx, NULL);
4584 binop = xor_optab;
4586 else
4587 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4589 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4590 if (reverse)
4591 value = flip_storage_order (str_mode, value);
4592 result = expand_binop (str_mode, binop, str_rtx,
4593 value, str_rtx, 1, OPTAB_WIDEN);
4594 if (result != str_rtx)
4595 emit_move_insn (str_rtx, result);
4596 return true;
4598 case BIT_IOR_EXPR:
4599 case BIT_XOR_EXPR:
4600 if (TREE_CODE (op1) != INTEGER_CST)
4601 break;
4602 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4603 value = convert_modes (str_mode,
4604 TYPE_MODE (TREE_TYPE (op1)), value,
4605 TYPE_UNSIGNED (TREE_TYPE (op1)));
4607 /* We may be accessing data outside the field, which means
4608 we can alias adjacent data. */
4609 if (MEM_P (str_rtx))
4611 str_rtx = shallow_copy_rtx (str_rtx);
4612 set_mem_alias_set (str_rtx, 0);
4613 set_mem_expr (str_rtx, 0);
4616 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4617 if (bitpos + bitsize != str_bitsize)
4619 rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4620 str_mode);
4621 value = expand_and (str_mode, value, mask, NULL_RTX);
4623 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4624 if (reverse)
4625 value = flip_storage_order (str_mode, value);
4626 result = expand_binop (str_mode, binop, str_rtx,
4627 value, str_rtx, 1, OPTAB_WIDEN);
4628 if (result != str_rtx)
4629 emit_move_insn (str_rtx, result);
4630 return true;
4632 default:
4633 break;
4636 return false;
4639 /* In the C++ memory model, consecutive bit fields in a structure are
4640 considered one memory location.
4642 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4643 returns the bit range of consecutive bits in which this COMPONENT_REF
4644 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4645 and *OFFSET may be adjusted in the process.
4647 If the access does not need to be restricted, 0 is returned in both
4648 *BITSTART and *BITEND. */
4650 static void
4651 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4652 unsigned HOST_WIDE_INT *bitend,
4653 tree exp,
4654 HOST_WIDE_INT *bitpos,
4655 tree *offset)
4657 HOST_WIDE_INT bitoffset;
4658 tree field, repr;
4660 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4662 field = TREE_OPERAND (exp, 1);
4663 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4664 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4665 need to limit the range we can access. */
4666 if (!repr)
4668 *bitstart = *bitend = 0;
4669 return;
4672 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4673 part of a larger bit field, then the representative does not serve any
4674 useful purpose. This can occur in Ada. */
4675 if (handled_component_p (TREE_OPERAND (exp, 0)))
4677 machine_mode rmode;
4678 HOST_WIDE_INT rbitsize, rbitpos;
4679 tree roffset;
4680 int unsignedp, reversep, volatilep = 0;
4681 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4682 &roffset, &rmode, &unsignedp, &reversep,
4683 &volatilep, false);
4684 if ((rbitpos % BITS_PER_UNIT) != 0)
4686 *bitstart = *bitend = 0;
4687 return;
4691 /* Compute the adjustment to bitpos from the offset of the field
4692 relative to the representative. DECL_FIELD_OFFSET of field and
4693 repr are the same by construction if they are not constants,
4694 see finish_bitfield_layout. */
4695 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
4696 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
4697 bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
4698 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
4699 else
4700 bitoffset = 0;
4701 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4702 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4704 /* If the adjustment is larger than bitpos, we would have a negative bit
4705 position for the lower bound and this may wreak havoc later. Adjust
4706 offset and bitpos to make the lower bound non-negative in that case. */
4707 if (bitoffset > *bitpos)
4709 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4710 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4712 *bitpos += adjust;
4713 if (*offset == NULL_TREE)
4714 *offset = size_int (-adjust / BITS_PER_UNIT);
4715 else
4716 *offset
4717 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4718 *bitstart = 0;
4720 else
4721 *bitstart = *bitpos - bitoffset;
4723 *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4726 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4727 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4728 DECL_RTL was not set yet, return NORTL. */
4730 static inline bool
4731 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4733 if (TREE_CODE (addr) != ADDR_EXPR)
4734 return false;
4736 tree base = TREE_OPERAND (addr, 0);
4738 if (!DECL_P (base)
4739 || TREE_ADDRESSABLE (base)
4740 || DECL_MODE (base) == BLKmode)
4741 return false;
4743 if (!DECL_RTL_SET_P (base))
4744 return nortl;
4746 return (!MEM_P (DECL_RTL (base)));
4749 /* Returns true if the MEM_REF REF refers to an object that does not
4750 reside in memory and has non-BLKmode. */
4752 static inline bool
4753 mem_ref_refers_to_non_mem_p (tree ref)
4755 tree base = TREE_OPERAND (ref, 0);
4756 return addr_expr_of_non_mem_decl_p_1 (base, false);
4759 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4760 is true, try generating a nontemporal store. */
4762 void
4763 expand_assignment (tree to, tree from, bool nontemporal)
4765 rtx to_rtx = 0;
4766 rtx result;
4767 machine_mode mode;
4768 unsigned int align;
4769 enum insn_code icode;
4771 /* Don't crash if the lhs of the assignment was erroneous. */
4772 if (TREE_CODE (to) == ERROR_MARK)
4774 expand_normal (from);
4775 return;
4778 /* Optimize away no-op moves without side-effects. */
4779 if (operand_equal_p (to, from, 0))
4780 return;
4782 /* Handle misaligned stores. */
4783 mode = TYPE_MODE (TREE_TYPE (to));
4784 if ((TREE_CODE (to) == MEM_REF
4785 || TREE_CODE (to) == TARGET_MEM_REF)
4786 && mode != BLKmode
4787 && !mem_ref_refers_to_non_mem_p (to)
4788 && ((align = get_object_alignment (to))
4789 < GET_MODE_ALIGNMENT (mode))
4790 && (((icode = optab_handler (movmisalign_optab, mode))
4791 != CODE_FOR_nothing)
4792 || SLOW_UNALIGNED_ACCESS (mode, align)))
4794 rtx reg, mem;
4796 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4797 reg = force_not_mem (reg);
4798 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4799 if (TREE_CODE (to) == MEM_REF && REF_REVERSE_STORAGE_ORDER (to))
4800 reg = flip_storage_order (mode, reg);
4802 if (icode != CODE_FOR_nothing)
4804 struct expand_operand ops[2];
4806 create_fixed_operand (&ops[0], mem);
4807 create_input_operand (&ops[1], reg, mode);
4808 /* The movmisalign<mode> pattern cannot fail, else the assignment
4809 would silently be omitted. */
4810 expand_insn (icode, 2, ops);
4812 else
4813 store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg,
4814 false);
4815 return;
4818 /* Assignment of a structure component needs special treatment
4819 if the structure component's rtx is not simply a MEM.
4820 Assignment of an array element at a constant index, and assignment of
4821 an array element in an unaligned packed structure field, has the same
4822 problem. Same for (partially) storing into a non-memory object. */
4823 if (handled_component_p (to)
4824 || (TREE_CODE (to) == MEM_REF
4825 && (REF_REVERSE_STORAGE_ORDER (to)
4826 || mem_ref_refers_to_non_mem_p (to)))
4827 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4829 machine_mode mode1;
4830 HOST_WIDE_INT bitsize, bitpos;
4831 unsigned HOST_WIDE_INT bitregion_start = 0;
4832 unsigned HOST_WIDE_INT bitregion_end = 0;
4833 tree offset;
4834 int unsignedp, reversep, volatilep = 0;
4835 tree tem;
4837 push_temp_slots ();
4838 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4839 &unsignedp, &reversep, &volatilep, true);
4841 /* Make sure bitpos is not negative, it can wreak havoc later. */
4842 if (bitpos < 0)
4844 gcc_assert (offset == NULL_TREE);
4845 offset = size_int (bitpos >> (BITS_PER_UNIT == 8
4846 ? 3 : exact_log2 (BITS_PER_UNIT)));
4847 bitpos &= BITS_PER_UNIT - 1;
4850 if (TREE_CODE (to) == COMPONENT_REF
4851 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4852 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4853 /* The C++ memory model naturally applies to byte-aligned fields.
4854 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4855 BITSIZE are not byte-aligned, there is no need to limit the range
4856 we can access. This can occur with packed structures in Ada. */
4857 else if (bitsize > 0
4858 && bitsize % BITS_PER_UNIT == 0
4859 && bitpos % BITS_PER_UNIT == 0)
4861 bitregion_start = bitpos;
4862 bitregion_end = bitpos + bitsize - 1;
4865 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4867 /* If the field has a mode, we want to access it in the
4868 field's mode, not the computed mode.
4869 If a MEM has VOIDmode (external with incomplete type),
4870 use BLKmode for it instead. */
4871 if (MEM_P (to_rtx))
4873 if (mode1 != VOIDmode)
4874 to_rtx = adjust_address (to_rtx, mode1, 0);
4875 else if (GET_MODE (to_rtx) == VOIDmode)
4876 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4879 if (offset != 0)
4881 machine_mode address_mode;
4882 rtx offset_rtx;
4884 if (!MEM_P (to_rtx))
4886 /* We can get constant negative offsets into arrays with broken
4887 user code. Translate this to a trap instead of ICEing. */
4888 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4889 expand_builtin_trap ();
4890 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4893 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4894 address_mode = get_address_mode (to_rtx);
4895 if (GET_MODE (offset_rtx) != address_mode)
4897 /* We cannot be sure that the RTL in offset_rtx is valid outside
4898 of a memory address context, so force it into a register
4899 before attempting to convert it to the desired mode. */
4900 offset_rtx = force_operand (offset_rtx, NULL_RTX);
4901 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4904 /* If we have an expression in OFFSET_RTX and a non-zero
4905 byte offset in BITPOS, adding the byte offset before the
4906 OFFSET_RTX results in better intermediate code, which makes
4907 later rtl optimization passes perform better.
4909 We prefer intermediate code like this:
4911 r124:DI=r123:DI+0x18
4912 [r124:DI]=r121:DI
4914 ... instead of ...
4916 r124:DI=r123:DI+0x10
4917 [r124:DI+0x8]=r121:DI
4919 This is only done for aligned data values, as these can
4920 be expected to result in single move instructions. */
4921 if (mode1 != VOIDmode
4922 && bitpos != 0
4923 && bitsize > 0
4924 && (bitpos % bitsize) == 0
4925 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4926 && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
4928 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4929 bitregion_start = 0;
4930 if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos)
4931 bitregion_end -= bitpos;
4932 bitpos = 0;
4935 to_rtx = offset_address (to_rtx, offset_rtx,
4936 highest_pow2_factor_for_target (to,
4937 offset));
4940 /* No action is needed if the target is not a memory and the field
4941 lies completely outside that target. This can occur if the source
4942 code contains an out-of-bounds access to a small array. */
4943 if (!MEM_P (to_rtx)
4944 && GET_MODE (to_rtx) != BLKmode
4945 && (unsigned HOST_WIDE_INT) bitpos
4946 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4948 expand_normal (from);
4949 result = NULL;
4951 /* Handle expand_expr of a complex value returning a CONCAT. */
4952 else if (GET_CODE (to_rtx) == CONCAT)
4954 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4955 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4956 && bitpos == 0
4957 && bitsize == mode_bitsize)
4958 result = store_expr (from, to_rtx, false, nontemporal, reversep);
4959 else if (bitsize == mode_bitsize / 2
4960 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4961 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4962 nontemporal, reversep);
4963 else if (bitpos + bitsize <= mode_bitsize / 2)
4964 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4965 bitregion_start, bitregion_end,
4966 mode1, from, get_alias_set (to),
4967 nontemporal, reversep);
4968 else if (bitpos >= mode_bitsize / 2)
4969 result = store_field (XEXP (to_rtx, 1), bitsize,
4970 bitpos - mode_bitsize / 2,
4971 bitregion_start, bitregion_end,
4972 mode1, from, get_alias_set (to),
4973 nontemporal, reversep);
4974 else if (bitpos == 0 && bitsize == mode_bitsize)
4976 rtx from_rtx;
4977 result = expand_normal (from);
4978 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4979 TYPE_MODE (TREE_TYPE (from)), 0);
4980 emit_move_insn (XEXP (to_rtx, 0),
4981 read_complex_part (from_rtx, false));
4982 emit_move_insn (XEXP (to_rtx, 1),
4983 read_complex_part (from_rtx, true));
4985 else
4987 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4988 GET_MODE_SIZE (GET_MODE (to_rtx)));
4989 write_complex_part (temp, XEXP (to_rtx, 0), false);
4990 write_complex_part (temp, XEXP (to_rtx, 1), true);
4991 result = store_field (temp, bitsize, bitpos,
4992 bitregion_start, bitregion_end,
4993 mode1, from, get_alias_set (to),
4994 nontemporal, reversep);
4995 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4996 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4999 else
5001 if (MEM_P (to_rtx))
5003 /* If the field is at offset zero, we could have been given the
5004 DECL_RTX of the parent struct. Don't munge it. */
5005 to_rtx = shallow_copy_rtx (to_rtx);
5006 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
5007 if (volatilep)
5008 MEM_VOLATILE_P (to_rtx) = 1;
5011 if (optimize_bitfield_assignment_op (bitsize, bitpos,
5012 bitregion_start, bitregion_end,
5013 mode1, to_rtx, to, from,
5014 reversep))
5015 result = NULL;
5016 else
5017 result = store_field (to_rtx, bitsize, bitpos,
5018 bitregion_start, bitregion_end,
5019 mode1, from, get_alias_set (to),
5020 nontemporal, reversep);
5023 if (result)
5024 preserve_temp_slots (result);
5025 pop_temp_slots ();
5026 return;
5029 /* If the rhs is a function call and its value is not an aggregate,
5030 call the function before we start to compute the lhs.
5031 This is needed for correct code for cases such as
5032 val = setjmp (buf) on machines where reference to val
5033 requires loading up part of an address in a separate insn.
5035 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5036 since it might be a promoted variable where the zero- or sign- extension
5037 needs to be done. Handling this in the normal way is safe because no
5038 computation is done before the call. The same is true for SSA names. */
5039 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
5040 && COMPLETE_TYPE_P (TREE_TYPE (from))
5041 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
5042 && ! (((TREE_CODE (to) == VAR_DECL
5043 || TREE_CODE (to) == PARM_DECL
5044 || TREE_CODE (to) == RESULT_DECL)
5045 && REG_P (DECL_RTL (to)))
5046 || TREE_CODE (to) == SSA_NAME))
5048 rtx value;
5049 rtx bounds;
5051 push_temp_slots ();
5052 value = expand_normal (from);
5054 /* Split value and bounds to store them separately. */
5055 chkp_split_slot (value, &value, &bounds);
5057 if (to_rtx == 0)
5058 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5060 /* Handle calls that return values in multiple non-contiguous locations.
5061 The Irix 6 ABI has examples of this. */
5062 if (GET_CODE (to_rtx) == PARALLEL)
5064 if (GET_CODE (value) == PARALLEL)
5065 emit_group_move (to_rtx, value);
5066 else
5067 emit_group_load (to_rtx, value, TREE_TYPE (from),
5068 int_size_in_bytes (TREE_TYPE (from)));
5070 else if (GET_CODE (value) == PARALLEL)
5071 emit_group_store (to_rtx, value, TREE_TYPE (from),
5072 int_size_in_bytes (TREE_TYPE (from)));
5073 else if (GET_MODE (to_rtx) == BLKmode)
5075 /* Handle calls that return BLKmode values in registers. */
5076 if (REG_P (value))
5077 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5078 else
5079 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5081 else
5083 if (POINTER_TYPE_P (TREE_TYPE (to)))
5084 value = convert_memory_address_addr_space
5085 (GET_MODE (to_rtx), value,
5086 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5088 emit_move_insn (to_rtx, value);
5091 /* Store bounds if required. */
5092 if (bounds
5093 && (BOUNDED_P (to) || chkp_type_has_pointer (TREE_TYPE (to))))
5095 gcc_assert (MEM_P (to_rtx));
5096 chkp_emit_bounds_store (bounds, value, to_rtx);
5099 preserve_temp_slots (to_rtx);
5100 pop_temp_slots ();
5101 return;
5104 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5105 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5107 /* Don't move directly into a return register. */
5108 if (TREE_CODE (to) == RESULT_DECL
5109 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5111 rtx temp;
5113 push_temp_slots ();
5115 /* If the source is itself a return value, it still is in a pseudo at
5116 this point so we can move it back to the return register directly. */
5117 if (REG_P (to_rtx)
5118 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5119 && TREE_CODE (from) != CALL_EXPR)
5120 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5121 else
5122 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5124 /* Handle calls that return values in multiple non-contiguous locations.
5125 The Irix 6 ABI has examples of this. */
5126 if (GET_CODE (to_rtx) == PARALLEL)
5128 if (GET_CODE (temp) == PARALLEL)
5129 emit_group_move (to_rtx, temp);
5130 else
5131 emit_group_load (to_rtx, temp, TREE_TYPE (from),
5132 int_size_in_bytes (TREE_TYPE (from)));
5134 else if (temp)
5135 emit_move_insn (to_rtx, temp);
5137 preserve_temp_slots (to_rtx);
5138 pop_temp_slots ();
5139 return;
5142 /* In case we are returning the contents of an object which overlaps
5143 the place the value is being stored, use a safe function when copying
5144 a value through a pointer into a structure value return block. */
5145 if (TREE_CODE (to) == RESULT_DECL
5146 && TREE_CODE (from) == INDIRECT_REF
5147 && ADDR_SPACE_GENERIC_P
5148 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5149 && refs_may_alias_p (to, from)
5150 && cfun->returns_struct
5151 && !cfun->returns_pcc_struct)
5153 rtx from_rtx, size;
5155 push_temp_slots ();
5156 size = expr_size (from);
5157 from_rtx = expand_normal (from);
5159 emit_library_call (memmove_libfunc, LCT_NORMAL,
5160 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
5161 XEXP (from_rtx, 0), Pmode,
5162 convert_to_mode (TYPE_MODE (sizetype),
5163 size, TYPE_UNSIGNED (sizetype)),
5164 TYPE_MODE (sizetype));
5166 preserve_temp_slots (to_rtx);
5167 pop_temp_slots ();
5168 return;
5171 /* Compute FROM and store the value in the rtx we got. */
5173 push_temp_slots ();
5174 result = store_expr_with_bounds (from, to_rtx, 0, nontemporal, false, to);
5175 preserve_temp_slots (result);
5176 pop_temp_slots ();
5177 return;
5180 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5181 succeeded, false otherwise. */
5183 bool
5184 emit_storent_insn (rtx to, rtx from)
5186 struct expand_operand ops[2];
5187 machine_mode mode = GET_MODE (to);
5188 enum insn_code code = optab_handler (storent_optab, mode);
5190 if (code == CODE_FOR_nothing)
5191 return false;
5193 create_fixed_operand (&ops[0], to);
5194 create_input_operand (&ops[1], from, mode);
5195 return maybe_expand_insn (code, 2, ops);
5198 /* Generate code for computing expression EXP,
5199 and storing the value into TARGET.
5201 If the mode is BLKmode then we may return TARGET itself.
5202 It turns out that in BLKmode it doesn't cause a problem.
5203 because C has no operators that could combine two different
5204 assignments into the same BLKmode object with different values
5205 with no sequence point. Will other languages need this to
5206 be more thorough?
5208 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5209 stack, and block moves may need to be treated specially.
5211 If NONTEMPORAL is true, try using a nontemporal store instruction.
5213 If REVERSE is true, the store is to be done in reverse order.
5215 If BTARGET is not NULL then computed bounds of EXP are
5216 associated with BTARGET. */
5219 store_expr_with_bounds (tree exp, rtx target, int call_param_p,
5220 bool nontemporal, bool reverse, tree btarget)
5222 rtx temp;
5223 rtx alt_rtl = NULL_RTX;
5224 location_t loc = curr_insn_location ();
5226 if (VOID_TYPE_P (TREE_TYPE (exp)))
5228 /* C++ can generate ?: expressions with a throw expression in one
5229 branch and an rvalue in the other. Here, we resolve attempts to
5230 store the throw expression's nonexistent result. */
5231 gcc_assert (!call_param_p);
5232 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5233 return NULL_RTX;
5235 if (TREE_CODE (exp) == COMPOUND_EXPR)
5237 /* Perform first part of compound expression, then assign from second
5238 part. */
5239 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5240 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5241 return store_expr_with_bounds (TREE_OPERAND (exp, 1), target,
5242 call_param_p, nontemporal, reverse,
5243 btarget);
5245 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5247 /* For conditional expression, get safe form of the target. Then
5248 test the condition, doing the appropriate assignment on either
5249 side. This avoids the creation of unnecessary temporaries.
5250 For non-BLKmode, it is more efficient not to do this. */
5252 rtx_code_label *lab1 = gen_label_rtx (), *lab2 = gen_label_rtx ();
5254 do_pending_stack_adjust ();
5255 NO_DEFER_POP;
5256 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5257 store_expr_with_bounds (TREE_OPERAND (exp, 1), target, call_param_p,
5258 nontemporal, reverse, btarget);
5259 emit_jump_insn (targetm.gen_jump (lab2));
5260 emit_barrier ();
5261 emit_label (lab1);
5262 store_expr_with_bounds (TREE_OPERAND (exp, 2), target, call_param_p,
5263 nontemporal, reverse, btarget);
5264 emit_label (lab2);
5265 OK_DEFER_POP;
5267 return NULL_RTX;
5269 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5270 /* If this is a scalar in a register that is stored in a wider mode
5271 than the declared mode, compute the result into its declared mode
5272 and then convert to the wider mode. Our value is the computed
5273 expression. */
5275 rtx inner_target = 0;
5277 /* We can do the conversion inside EXP, which will often result
5278 in some optimizations. Do the conversion in two steps: first
5279 change the signedness, if needed, then the extend. But don't
5280 do this if the type of EXP is a subtype of something else
5281 since then the conversion might involve more than just
5282 converting modes. */
5283 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5284 && TREE_TYPE (TREE_TYPE (exp)) == 0
5285 && GET_MODE_PRECISION (GET_MODE (target))
5286 == TYPE_PRECISION (TREE_TYPE (exp)))
5288 if (!SUBREG_CHECK_PROMOTED_SIGN (target,
5289 TYPE_UNSIGNED (TREE_TYPE (exp))))
5291 /* Some types, e.g. Fortran's logical*4, won't have a signed
5292 version, so use the mode instead. */
5293 tree ntype
5294 = (signed_or_unsigned_type_for
5295 (SUBREG_PROMOTED_SIGN (target), TREE_TYPE (exp)));
5296 if (ntype == NULL)
5297 ntype = lang_hooks.types.type_for_mode
5298 (TYPE_MODE (TREE_TYPE (exp)),
5299 SUBREG_PROMOTED_SIGN (target));
5301 exp = fold_convert_loc (loc, ntype, exp);
5304 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5305 (GET_MODE (SUBREG_REG (target)),
5306 SUBREG_PROMOTED_SIGN (target)),
5307 exp);
5309 inner_target = SUBREG_REG (target);
5312 temp = expand_expr (exp, inner_target, VOIDmode,
5313 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5315 /* Handle bounds returned by call. */
5316 if (TREE_CODE (exp) == CALL_EXPR)
5318 rtx bounds;
5319 chkp_split_slot (temp, &temp, &bounds);
5320 if (bounds && btarget)
5322 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5323 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5324 chkp_set_rtl_bounds (btarget, tmp);
5328 /* If TEMP is a VOIDmode constant, use convert_modes to make
5329 sure that we properly convert it. */
5330 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5332 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5333 temp, SUBREG_PROMOTED_SIGN (target));
5334 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5335 GET_MODE (target), temp,
5336 SUBREG_PROMOTED_SIGN (target));
5339 convert_move (SUBREG_REG (target), temp,
5340 SUBREG_PROMOTED_SIGN (target));
5342 return NULL_RTX;
5344 else if ((TREE_CODE (exp) == STRING_CST
5345 || (TREE_CODE (exp) == MEM_REF
5346 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5347 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5348 == STRING_CST
5349 && integer_zerop (TREE_OPERAND (exp, 1))))
5350 && !nontemporal && !call_param_p
5351 && MEM_P (target))
5353 /* Optimize initialization of an array with a STRING_CST. */
5354 HOST_WIDE_INT exp_len, str_copy_len;
5355 rtx dest_mem;
5356 tree str = TREE_CODE (exp) == STRING_CST
5357 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5359 exp_len = int_expr_size (exp);
5360 if (exp_len <= 0)
5361 goto normal_expr;
5363 if (TREE_STRING_LENGTH (str) <= 0)
5364 goto normal_expr;
5366 str_copy_len = strlen (TREE_STRING_POINTER (str));
5367 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5368 goto normal_expr;
5370 str_copy_len = TREE_STRING_LENGTH (str);
5371 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5372 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5374 str_copy_len += STORE_MAX_PIECES - 1;
5375 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5377 str_copy_len = MIN (str_copy_len, exp_len);
5378 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5379 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5380 MEM_ALIGN (target), false))
5381 goto normal_expr;
5383 dest_mem = target;
5385 dest_mem = store_by_pieces (dest_mem,
5386 str_copy_len, builtin_strncpy_read_str,
5387 CONST_CAST (char *,
5388 TREE_STRING_POINTER (str)),
5389 MEM_ALIGN (target), false,
5390 exp_len > str_copy_len ? 1 : 0);
5391 if (exp_len > str_copy_len)
5392 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5393 GEN_INT (exp_len - str_copy_len),
5394 BLOCK_OP_NORMAL);
5395 return NULL_RTX;
5397 else
5399 rtx tmp_target;
5401 normal_expr:
5402 /* If we want to use a nontemporal or a reverse order store, force the
5403 value into a register first. */
5404 tmp_target = nontemporal || reverse ? NULL_RTX : target;
5405 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5406 (call_param_p
5407 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5408 &alt_rtl, false);
5410 /* Handle bounds returned by call. */
5411 if (TREE_CODE (exp) == CALL_EXPR)
5413 rtx bounds;
5414 chkp_split_slot (temp, &temp, &bounds);
5415 if (bounds && btarget)
5417 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5418 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5419 chkp_set_rtl_bounds (btarget, tmp);
5424 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5425 the same as that of TARGET, adjust the constant. This is needed, for
5426 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5427 only a word-sized value. */
5428 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5429 && TREE_CODE (exp) != ERROR_MARK
5430 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5431 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5432 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5434 /* If value was not generated in the target, store it there.
5435 Convert the value to TARGET's type first if necessary and emit the
5436 pending incrementations that have been queued when expanding EXP.
5437 Note that we cannot emit the whole queue blindly because this will
5438 effectively disable the POST_INC optimization later.
5440 If TEMP and TARGET compare equal according to rtx_equal_p, but
5441 one or both of them are volatile memory refs, we have to distinguish
5442 two cases:
5443 - expand_expr has used TARGET. In this case, we must not generate
5444 another copy. This can be detected by TARGET being equal according
5445 to == .
5446 - expand_expr has not used TARGET - that means that the source just
5447 happens to have the same RTX form. Since temp will have been created
5448 by expand_expr, it will compare unequal according to == .
5449 We must generate a copy in this case, to reach the correct number
5450 of volatile memory references. */
5452 if ((! rtx_equal_p (temp, target)
5453 || (temp != target && (side_effects_p (temp)
5454 || side_effects_p (target))))
5455 && TREE_CODE (exp) != ERROR_MARK
5456 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5457 but TARGET is not valid memory reference, TEMP will differ
5458 from TARGET although it is really the same location. */
5459 && !(alt_rtl
5460 && rtx_equal_p (alt_rtl, target)
5461 && !side_effects_p (alt_rtl)
5462 && !side_effects_p (target))
5463 /* If there's nothing to copy, don't bother. Don't call
5464 expr_size unless necessary, because some front-ends (C++)
5465 expr_size-hook must not be given objects that are not
5466 supposed to be bit-copied or bit-initialized. */
5467 && expr_size (exp) != const0_rtx)
5469 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5471 if (GET_MODE (target) == BLKmode)
5473 /* Handle calls that return BLKmode values in registers. */
5474 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5475 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5476 else
5477 store_bit_field (target,
5478 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5479 0, 0, 0, GET_MODE (temp), temp, reverse);
5481 else
5482 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5485 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5487 /* Handle copying a string constant into an array. The string
5488 constant may be shorter than the array. So copy just the string's
5489 actual length, and clear the rest. First get the size of the data
5490 type of the string, which is actually the size of the target. */
5491 rtx size = expr_size (exp);
5493 if (CONST_INT_P (size)
5494 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5495 emit_block_move (target, temp, size,
5496 (call_param_p
5497 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5498 else
5500 machine_mode pointer_mode
5501 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5502 machine_mode address_mode = get_address_mode (target);
5504 /* Compute the size of the data to copy from the string. */
5505 tree copy_size
5506 = size_binop_loc (loc, MIN_EXPR,
5507 make_tree (sizetype, size),
5508 size_int (TREE_STRING_LENGTH (exp)));
5509 rtx copy_size_rtx
5510 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5511 (call_param_p
5512 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5513 rtx_code_label *label = 0;
5515 /* Copy that much. */
5516 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5517 TYPE_UNSIGNED (sizetype));
5518 emit_block_move (target, temp, copy_size_rtx,
5519 (call_param_p
5520 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5522 /* Figure out how much is left in TARGET that we have to clear.
5523 Do all calculations in pointer_mode. */
5524 if (CONST_INT_P (copy_size_rtx))
5526 size = plus_constant (address_mode, size,
5527 -INTVAL (copy_size_rtx));
5528 target = adjust_address (target, BLKmode,
5529 INTVAL (copy_size_rtx));
5531 else
5533 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5534 copy_size_rtx, NULL_RTX, 0,
5535 OPTAB_LIB_WIDEN);
5537 if (GET_MODE (copy_size_rtx) != address_mode)
5538 copy_size_rtx = convert_to_mode (address_mode,
5539 copy_size_rtx,
5540 TYPE_UNSIGNED (sizetype));
5542 target = offset_address (target, copy_size_rtx,
5543 highest_pow2_factor (copy_size));
5544 label = gen_label_rtx ();
5545 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5546 GET_MODE (size), 0, label);
5549 if (size != const0_rtx)
5550 clear_storage (target, size, BLOCK_OP_NORMAL);
5552 if (label)
5553 emit_label (label);
5556 /* Handle calls that return values in multiple non-contiguous locations.
5557 The Irix 6 ABI has examples of this. */
5558 else if (GET_CODE (target) == PARALLEL)
5560 if (GET_CODE (temp) == PARALLEL)
5561 emit_group_move (target, temp);
5562 else
5563 emit_group_load (target, temp, TREE_TYPE (exp),
5564 int_size_in_bytes (TREE_TYPE (exp)));
5566 else if (GET_CODE (temp) == PARALLEL)
5567 emit_group_store (target, temp, TREE_TYPE (exp),
5568 int_size_in_bytes (TREE_TYPE (exp)));
5569 else if (GET_MODE (temp) == BLKmode)
5570 emit_block_move (target, temp, expr_size (exp),
5571 (call_param_p
5572 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5573 /* If we emit a nontemporal store, there is nothing else to do. */
5574 else if (nontemporal && emit_storent_insn (target, temp))
5576 else
5578 if (reverse)
5579 temp = flip_storage_order (GET_MODE (target), temp);
5580 temp = force_operand (temp, target);
5581 if (temp != target)
5582 emit_move_insn (target, temp);
5586 return NULL_RTX;
5589 /* Same as store_expr_with_bounds but ignoring bounds of EXP. */
5591 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal,
5592 bool reverse)
5594 return store_expr_with_bounds (exp, target, call_param_p, nontemporal,
5595 reverse, NULL);
5598 /* Return true if field F of structure TYPE is a flexible array. */
5600 static bool
5601 flexible_array_member_p (const_tree f, const_tree type)
5603 const_tree tf;
5605 tf = TREE_TYPE (f);
5606 return (DECL_CHAIN (f) == NULL
5607 && TREE_CODE (tf) == ARRAY_TYPE
5608 && TYPE_DOMAIN (tf)
5609 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5610 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5611 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5612 && int_size_in_bytes (type) >= 0);
5615 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5616 must have in order for it to completely initialize a value of type TYPE.
5617 Return -1 if the number isn't known.
5619 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5621 static HOST_WIDE_INT
5622 count_type_elements (const_tree type, bool for_ctor_p)
5624 switch (TREE_CODE (type))
5626 case ARRAY_TYPE:
5628 tree nelts;
5630 nelts = array_type_nelts (type);
5631 if (nelts && tree_fits_uhwi_p (nelts))
5633 unsigned HOST_WIDE_INT n;
5635 n = tree_to_uhwi (nelts) + 1;
5636 if (n == 0 || for_ctor_p)
5637 return n;
5638 else
5639 return n * count_type_elements (TREE_TYPE (type), false);
5641 return for_ctor_p ? -1 : 1;
5644 case RECORD_TYPE:
5646 unsigned HOST_WIDE_INT n;
5647 tree f;
5649 n = 0;
5650 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5651 if (TREE_CODE (f) == FIELD_DECL)
5653 if (!for_ctor_p)
5654 n += count_type_elements (TREE_TYPE (f), false);
5655 else if (!flexible_array_member_p (f, type))
5656 /* Don't count flexible arrays, which are not supposed
5657 to be initialized. */
5658 n += 1;
5661 return n;
5664 case UNION_TYPE:
5665 case QUAL_UNION_TYPE:
5667 tree f;
5668 HOST_WIDE_INT n, m;
5670 gcc_assert (!for_ctor_p);
5671 /* Estimate the number of scalars in each field and pick the
5672 maximum. Other estimates would do instead; the idea is simply
5673 to make sure that the estimate is not sensitive to the ordering
5674 of the fields. */
5675 n = 1;
5676 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5677 if (TREE_CODE (f) == FIELD_DECL)
5679 m = count_type_elements (TREE_TYPE (f), false);
5680 /* If the field doesn't span the whole union, add an extra
5681 scalar for the rest. */
5682 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5683 TYPE_SIZE (type)) != 1)
5684 m++;
5685 if (n < m)
5686 n = m;
5688 return n;
5691 case COMPLEX_TYPE:
5692 return 2;
5694 case VECTOR_TYPE:
5695 return TYPE_VECTOR_SUBPARTS (type);
5697 case INTEGER_TYPE:
5698 case REAL_TYPE:
5699 case FIXED_POINT_TYPE:
5700 case ENUMERAL_TYPE:
5701 case BOOLEAN_TYPE:
5702 case POINTER_TYPE:
5703 case OFFSET_TYPE:
5704 case REFERENCE_TYPE:
5705 case NULLPTR_TYPE:
5706 return 1;
5708 case ERROR_MARK:
5709 return 0;
5711 case VOID_TYPE:
5712 case METHOD_TYPE:
5713 case FUNCTION_TYPE:
5714 case LANG_TYPE:
5715 default:
5716 gcc_unreachable ();
5720 /* Helper for categorize_ctor_elements. Identical interface. */
5722 static bool
5723 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5724 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5726 unsigned HOST_WIDE_INT idx;
5727 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5728 tree value, purpose, elt_type;
5730 /* Whether CTOR is a valid constant initializer, in accordance with what
5731 initializer_constant_valid_p does. If inferred from the constructor
5732 elements, true until proven otherwise. */
5733 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5734 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5736 nz_elts = 0;
5737 init_elts = 0;
5738 num_fields = 0;
5739 elt_type = NULL_TREE;
5741 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5743 HOST_WIDE_INT mult = 1;
5745 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5747 tree lo_index = TREE_OPERAND (purpose, 0);
5748 tree hi_index = TREE_OPERAND (purpose, 1);
5750 if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
5751 mult = (tree_to_uhwi (hi_index)
5752 - tree_to_uhwi (lo_index) + 1);
5754 num_fields += mult;
5755 elt_type = TREE_TYPE (value);
5757 switch (TREE_CODE (value))
5759 case CONSTRUCTOR:
5761 HOST_WIDE_INT nz = 0, ic = 0;
5763 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5764 p_complete);
5766 nz_elts += mult * nz;
5767 init_elts += mult * ic;
5769 if (const_from_elts_p && const_p)
5770 const_p = const_elt_p;
5772 break;
5774 case INTEGER_CST:
5775 case REAL_CST:
5776 case FIXED_CST:
5777 if (!initializer_zerop (value))
5778 nz_elts += mult;
5779 init_elts += mult;
5780 break;
5782 case STRING_CST:
5783 nz_elts += mult * TREE_STRING_LENGTH (value);
5784 init_elts += mult * TREE_STRING_LENGTH (value);
5785 break;
5787 case COMPLEX_CST:
5788 if (!initializer_zerop (TREE_REALPART (value)))
5789 nz_elts += mult;
5790 if (!initializer_zerop (TREE_IMAGPART (value)))
5791 nz_elts += mult;
5792 init_elts += mult;
5793 break;
5795 case VECTOR_CST:
5797 unsigned i;
5798 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5800 tree v = VECTOR_CST_ELT (value, i);
5801 if (!initializer_zerop (v))
5802 nz_elts += mult;
5803 init_elts += mult;
5806 break;
5808 default:
5810 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5811 nz_elts += mult * tc;
5812 init_elts += mult * tc;
5814 if (const_from_elts_p && const_p)
5815 const_p
5816 = initializer_constant_valid_p (value,
5817 elt_type,
5818 TYPE_REVERSE_STORAGE_ORDER
5819 (TREE_TYPE (ctor)))
5820 != NULL_TREE;
5822 break;
5826 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5827 num_fields, elt_type))
5828 *p_complete = false;
5830 *p_nz_elts += nz_elts;
5831 *p_init_elts += init_elts;
5833 return const_p;
5836 /* Examine CTOR to discover:
5837 * how many scalar fields are set to nonzero values,
5838 and place it in *P_NZ_ELTS;
5839 * how many scalar fields in total are in CTOR,
5840 and place it in *P_ELT_COUNT.
5841 * whether the constructor is complete -- in the sense that every
5842 meaningful byte is explicitly given a value --
5843 and place it in *P_COMPLETE.
5845 Return whether or not CTOR is a valid static constant initializer, the same
5846 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5848 bool
5849 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5850 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5852 *p_nz_elts = 0;
5853 *p_init_elts = 0;
5854 *p_complete = true;
5856 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5859 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5860 of which had type LAST_TYPE. Each element was itself a complete
5861 initializer, in the sense that every meaningful byte was explicitly
5862 given a value. Return true if the same is true for the constructor
5863 as a whole. */
5865 bool
5866 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5867 const_tree last_type)
5869 if (TREE_CODE (type) == UNION_TYPE
5870 || TREE_CODE (type) == QUAL_UNION_TYPE)
5872 if (num_elts == 0)
5873 return false;
5875 gcc_assert (num_elts == 1 && last_type);
5877 /* ??? We could look at each element of the union, and find the
5878 largest element. Which would avoid comparing the size of the
5879 initialized element against any tail padding in the union.
5880 Doesn't seem worth the effort... */
5881 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5884 return count_type_elements (type, true) == num_elts;
5887 /* Return 1 if EXP contains mostly (3/4) zeros. */
5889 static int
5890 mostly_zeros_p (const_tree exp)
5892 if (TREE_CODE (exp) == CONSTRUCTOR)
5894 HOST_WIDE_INT nz_elts, init_elts;
5895 bool complete_p;
5897 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5898 return !complete_p || nz_elts < init_elts / 4;
5901 return initializer_zerop (exp);
5904 /* Return 1 if EXP contains all zeros. */
5906 static int
5907 all_zeros_p (const_tree exp)
5909 if (TREE_CODE (exp) == CONSTRUCTOR)
5911 HOST_WIDE_INT nz_elts, init_elts;
5912 bool complete_p;
5914 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5915 return nz_elts == 0;
5918 return initializer_zerop (exp);
5921 /* Helper function for store_constructor.
5922 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5923 CLEARED is as for store_constructor.
5924 ALIAS_SET is the alias set to use for any stores.
5925 If REVERSE is true, the store is to be done in reverse order.
5927 This provides a recursive shortcut back to store_constructor when it isn't
5928 necessary to go through store_field. This is so that we can pass through
5929 the cleared field to let store_constructor know that we may not have to
5930 clear a substructure if the outer structure has already been cleared. */
5932 static void
5933 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5934 HOST_WIDE_INT bitpos, machine_mode mode,
5935 tree exp, int cleared,
5936 alias_set_type alias_set, bool reverse)
5938 if (TREE_CODE (exp) == CONSTRUCTOR
5939 /* We can only call store_constructor recursively if the size and
5940 bit position are on a byte boundary. */
5941 && bitpos % BITS_PER_UNIT == 0
5942 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5943 /* If we have a nonzero bitpos for a register target, then we just
5944 let store_field do the bitfield handling. This is unlikely to
5945 generate unnecessary clear instructions anyways. */
5946 && (bitpos == 0 || MEM_P (target)))
5948 if (MEM_P (target))
5949 target
5950 = adjust_address (target,
5951 GET_MODE (target) == BLKmode
5952 || 0 != (bitpos
5953 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5954 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5957 /* Update the alias set, if required. */
5958 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5959 && MEM_ALIAS_SET (target) != 0)
5961 target = copy_rtx (target);
5962 set_mem_alias_set (target, alias_set);
5965 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT,
5966 reverse);
5968 else
5969 store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false,
5970 reverse);
5974 /* Returns the number of FIELD_DECLs in TYPE. */
5976 static int
5977 fields_length (const_tree type)
5979 tree t = TYPE_FIELDS (type);
5980 int count = 0;
5982 for (; t; t = DECL_CHAIN (t))
5983 if (TREE_CODE (t) == FIELD_DECL)
5984 ++count;
5986 return count;
5990 /* Store the value of constructor EXP into the rtx TARGET.
5991 TARGET is either a REG or a MEM; we know it cannot conflict, since
5992 safe_from_p has been called.
5993 CLEARED is true if TARGET is known to have been zero'd.
5994 SIZE is the number of bytes of TARGET we are allowed to modify: this
5995 may not be the same as the size of EXP if we are assigning to a field
5996 which has been packed to exclude padding bits.
5997 If REVERSE is true, the store is to be done in reverse order. */
5999 static void
6000 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size,
6001 bool reverse)
6003 tree type = TREE_TYPE (exp);
6004 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
6006 switch (TREE_CODE (type))
6008 case RECORD_TYPE:
6009 case UNION_TYPE:
6010 case QUAL_UNION_TYPE:
6012 unsigned HOST_WIDE_INT idx;
6013 tree field, value;
6015 /* The storage order is specified for every aggregate type. */
6016 reverse = TYPE_REVERSE_STORAGE_ORDER (type);
6018 /* If size is zero or the target is already cleared, do nothing. */
6019 if (size == 0 || cleared)
6020 cleared = 1;
6021 /* We either clear the aggregate or indicate the value is dead. */
6022 else if ((TREE_CODE (type) == UNION_TYPE
6023 || TREE_CODE (type) == QUAL_UNION_TYPE)
6024 && ! CONSTRUCTOR_ELTS (exp))
6025 /* If the constructor is empty, clear the union. */
6027 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6028 cleared = 1;
6031 /* If we are building a static constructor into a register,
6032 set the initial value as zero so we can fold the value into
6033 a constant. But if more than one register is involved,
6034 this probably loses. */
6035 else if (REG_P (target) && TREE_STATIC (exp)
6036 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
6038 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6039 cleared = 1;
6042 /* If the constructor has fewer fields than the structure or
6043 if we are initializing the structure to mostly zeros, clear
6044 the whole structure first. Don't do this if TARGET is a
6045 register whose mode size isn't equal to SIZE since
6046 clear_storage can't handle this case. */
6047 else if (size > 0
6048 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
6049 != fields_length (type))
6050 || mostly_zeros_p (exp))
6051 && (!REG_P (target)
6052 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
6053 == size)))
6055 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6056 cleared = 1;
6059 if (REG_P (target) && !cleared)
6060 emit_clobber (target);
6062 /* Store each element of the constructor into the
6063 corresponding field of TARGET. */
6064 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
6066 machine_mode mode;
6067 HOST_WIDE_INT bitsize;
6068 HOST_WIDE_INT bitpos = 0;
6069 tree offset;
6070 rtx to_rtx = target;
6072 /* Just ignore missing fields. We cleared the whole
6073 structure, above, if any fields are missing. */
6074 if (field == 0)
6075 continue;
6077 if (cleared && initializer_zerop (value))
6078 continue;
6080 if (tree_fits_uhwi_p (DECL_SIZE (field)))
6081 bitsize = tree_to_uhwi (DECL_SIZE (field));
6082 else
6083 bitsize = -1;
6085 mode = DECL_MODE (field);
6086 if (DECL_BIT_FIELD (field))
6087 mode = VOIDmode;
6089 offset = DECL_FIELD_OFFSET (field);
6090 if (tree_fits_shwi_p (offset)
6091 && tree_fits_shwi_p (bit_position (field)))
6093 bitpos = int_bit_position (field);
6094 offset = 0;
6096 else
6097 bitpos = tree_to_shwi (DECL_FIELD_BIT_OFFSET (field));
6099 if (offset)
6101 machine_mode address_mode;
6102 rtx offset_rtx;
6104 offset
6105 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
6106 make_tree (TREE_TYPE (exp),
6107 target));
6109 offset_rtx = expand_normal (offset);
6110 gcc_assert (MEM_P (to_rtx));
6112 address_mode = get_address_mode (to_rtx);
6113 if (GET_MODE (offset_rtx) != address_mode)
6114 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
6116 to_rtx = offset_address (to_rtx, offset_rtx,
6117 highest_pow2_factor (offset));
6120 /* If this initializes a field that is smaller than a
6121 word, at the start of a word, try to widen it to a full
6122 word. This special case allows us to output C++ member
6123 function initializations in a form that the optimizers
6124 can understand. */
6125 if (WORD_REGISTER_OPERATIONS
6126 && REG_P (target)
6127 && bitsize < BITS_PER_WORD
6128 && bitpos % BITS_PER_WORD == 0
6129 && GET_MODE_CLASS (mode) == MODE_INT
6130 && TREE_CODE (value) == INTEGER_CST
6131 && exp_size >= 0
6132 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6134 tree type = TREE_TYPE (value);
6136 if (TYPE_PRECISION (type) < BITS_PER_WORD)
6138 type = lang_hooks.types.type_for_mode
6139 (word_mode, TYPE_UNSIGNED (type));
6140 value = fold_convert (type, value);
6143 if (BYTES_BIG_ENDIAN)
6144 value
6145 = fold_build2 (LSHIFT_EXPR, type, value,
6146 build_int_cst (type,
6147 BITS_PER_WORD - bitsize));
6148 bitsize = BITS_PER_WORD;
6149 mode = word_mode;
6152 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6153 && DECL_NONADDRESSABLE_P (field))
6155 to_rtx = copy_rtx (to_rtx);
6156 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6159 store_constructor_field (to_rtx, bitsize, bitpos, mode,
6160 value, cleared,
6161 get_alias_set (TREE_TYPE (field)),
6162 reverse);
6164 break;
6166 case ARRAY_TYPE:
6168 tree value, index;
6169 unsigned HOST_WIDE_INT i;
6170 int need_to_clear;
6171 tree domain;
6172 tree elttype = TREE_TYPE (type);
6173 int const_bounds_p;
6174 HOST_WIDE_INT minelt = 0;
6175 HOST_WIDE_INT maxelt = 0;
6177 /* The storage order is specified for every aggregate type. */
6178 reverse = TYPE_REVERSE_STORAGE_ORDER (type);
6180 domain = TYPE_DOMAIN (type);
6181 const_bounds_p = (TYPE_MIN_VALUE (domain)
6182 && TYPE_MAX_VALUE (domain)
6183 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6184 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6186 /* If we have constant bounds for the range of the type, get them. */
6187 if (const_bounds_p)
6189 minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6190 maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6193 /* If the constructor has fewer elements than the array, clear
6194 the whole array first. Similarly if this is static
6195 constructor of a non-BLKmode object. */
6196 if (cleared)
6197 need_to_clear = 0;
6198 else if (REG_P (target) && TREE_STATIC (exp))
6199 need_to_clear = 1;
6200 else
6202 unsigned HOST_WIDE_INT idx;
6203 tree index, value;
6204 HOST_WIDE_INT count = 0, zero_count = 0;
6205 need_to_clear = ! const_bounds_p;
6207 /* This loop is a more accurate version of the loop in
6208 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6209 is also needed to check for missing elements. */
6210 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6212 HOST_WIDE_INT this_node_count;
6214 if (need_to_clear)
6215 break;
6217 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6219 tree lo_index = TREE_OPERAND (index, 0);
6220 tree hi_index = TREE_OPERAND (index, 1);
6222 if (! tree_fits_uhwi_p (lo_index)
6223 || ! tree_fits_uhwi_p (hi_index))
6225 need_to_clear = 1;
6226 break;
6229 this_node_count = (tree_to_uhwi (hi_index)
6230 - tree_to_uhwi (lo_index) + 1);
6232 else
6233 this_node_count = 1;
6235 count += this_node_count;
6236 if (mostly_zeros_p (value))
6237 zero_count += this_node_count;
6240 /* Clear the entire array first if there are any missing
6241 elements, or if the incidence of zero elements is >=
6242 75%. */
6243 if (! need_to_clear
6244 && (count < maxelt - minelt + 1
6245 || 4 * zero_count >= 3 * count))
6246 need_to_clear = 1;
6249 if (need_to_clear && size > 0)
6251 if (REG_P (target))
6252 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6253 else
6254 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6255 cleared = 1;
6258 if (!cleared && REG_P (target))
6259 /* Inform later passes that the old value is dead. */
6260 emit_clobber (target);
6262 /* Store each element of the constructor into the
6263 corresponding element of TARGET, determined by counting the
6264 elements. */
6265 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6267 machine_mode mode;
6268 HOST_WIDE_INT bitsize;
6269 HOST_WIDE_INT bitpos;
6270 rtx xtarget = target;
6272 if (cleared && initializer_zerop (value))
6273 continue;
6275 mode = TYPE_MODE (elttype);
6276 if (mode == BLKmode)
6277 bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6278 ? tree_to_uhwi (TYPE_SIZE (elttype))
6279 : -1);
6280 else
6281 bitsize = GET_MODE_BITSIZE (mode);
6283 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6285 tree lo_index = TREE_OPERAND (index, 0);
6286 tree hi_index = TREE_OPERAND (index, 1);
6287 rtx index_r, pos_rtx;
6288 HOST_WIDE_INT lo, hi, count;
6289 tree position;
6291 /* If the range is constant and "small", unroll the loop. */
6292 if (const_bounds_p
6293 && tree_fits_shwi_p (lo_index)
6294 && tree_fits_shwi_p (hi_index)
6295 && (lo = tree_to_shwi (lo_index),
6296 hi = tree_to_shwi (hi_index),
6297 count = hi - lo + 1,
6298 (!MEM_P (target)
6299 || count <= 2
6300 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6301 && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6302 <= 40 * 8)))))
6304 lo -= minelt; hi -= minelt;
6305 for (; lo <= hi; lo++)
6307 bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6309 if (MEM_P (target)
6310 && !MEM_KEEP_ALIAS_SET_P (target)
6311 && TREE_CODE (type) == ARRAY_TYPE
6312 && TYPE_NONALIASED_COMPONENT (type))
6314 target = copy_rtx (target);
6315 MEM_KEEP_ALIAS_SET_P (target) = 1;
6318 store_constructor_field
6319 (target, bitsize, bitpos, mode, value, cleared,
6320 get_alias_set (elttype), reverse);
6323 else
6325 rtx_code_label *loop_start = gen_label_rtx ();
6326 rtx_code_label *loop_end = gen_label_rtx ();
6327 tree exit_cond;
6329 expand_normal (hi_index);
6331 index = build_decl (EXPR_LOCATION (exp),
6332 VAR_DECL, NULL_TREE, domain);
6333 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6334 SET_DECL_RTL (index, index_r);
6335 store_expr (lo_index, index_r, 0, false, reverse);
6337 /* Build the head of the loop. */
6338 do_pending_stack_adjust ();
6339 emit_label (loop_start);
6341 /* Assign value to element index. */
6342 position =
6343 fold_convert (ssizetype,
6344 fold_build2 (MINUS_EXPR,
6345 TREE_TYPE (index),
6346 index,
6347 TYPE_MIN_VALUE (domain)));
6349 position =
6350 size_binop (MULT_EXPR, position,
6351 fold_convert (ssizetype,
6352 TYPE_SIZE_UNIT (elttype)));
6354 pos_rtx = expand_normal (position);
6355 xtarget = offset_address (target, pos_rtx,
6356 highest_pow2_factor (position));
6357 xtarget = adjust_address (xtarget, mode, 0);
6358 if (TREE_CODE (value) == CONSTRUCTOR)
6359 store_constructor (value, xtarget, cleared,
6360 bitsize / BITS_PER_UNIT, reverse);
6361 else
6362 store_expr (value, xtarget, 0, false, reverse);
6364 /* Generate a conditional jump to exit the loop. */
6365 exit_cond = build2 (LT_EXPR, integer_type_node,
6366 index, hi_index);
6367 jumpif (exit_cond, loop_end, -1);
6369 /* Update the loop counter, and jump to the head of
6370 the loop. */
6371 expand_assignment (index,
6372 build2 (PLUS_EXPR, TREE_TYPE (index),
6373 index, integer_one_node),
6374 false);
6376 emit_jump (loop_start);
6378 /* Build the end of the loop. */
6379 emit_label (loop_end);
6382 else if ((index != 0 && ! tree_fits_shwi_p (index))
6383 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6385 tree position;
6387 if (index == 0)
6388 index = ssize_int (1);
6390 if (minelt)
6391 index = fold_convert (ssizetype,
6392 fold_build2 (MINUS_EXPR,
6393 TREE_TYPE (index),
6394 index,
6395 TYPE_MIN_VALUE (domain)));
6397 position =
6398 size_binop (MULT_EXPR, index,
6399 fold_convert (ssizetype,
6400 TYPE_SIZE_UNIT (elttype)));
6401 xtarget = offset_address (target,
6402 expand_normal (position),
6403 highest_pow2_factor (position));
6404 xtarget = adjust_address (xtarget, mode, 0);
6405 store_expr (value, xtarget, 0, false, reverse);
6407 else
6409 if (index != 0)
6410 bitpos = ((tree_to_shwi (index) - minelt)
6411 * tree_to_uhwi (TYPE_SIZE (elttype)));
6412 else
6413 bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6415 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6416 && TREE_CODE (type) == ARRAY_TYPE
6417 && TYPE_NONALIASED_COMPONENT (type))
6419 target = copy_rtx (target);
6420 MEM_KEEP_ALIAS_SET_P (target) = 1;
6422 store_constructor_field (target, bitsize, bitpos, mode, value,
6423 cleared, get_alias_set (elttype),
6424 reverse);
6427 break;
6430 case VECTOR_TYPE:
6432 unsigned HOST_WIDE_INT idx;
6433 constructor_elt *ce;
6434 int i;
6435 int need_to_clear;
6436 int icode = CODE_FOR_nothing;
6437 tree elttype = TREE_TYPE (type);
6438 int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6439 machine_mode eltmode = TYPE_MODE (elttype);
6440 HOST_WIDE_INT bitsize;
6441 HOST_WIDE_INT bitpos;
6442 rtvec vector = NULL;
6443 unsigned n_elts;
6444 alias_set_type alias;
6446 gcc_assert (eltmode != BLKmode);
6448 n_elts = TYPE_VECTOR_SUBPARTS (type);
6449 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6451 machine_mode mode = GET_MODE (target);
6453 icode = (int) optab_handler (vec_init_optab, mode);
6454 /* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
6455 if (icode != CODE_FOR_nothing)
6457 tree value;
6459 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6460 if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
6462 icode = CODE_FOR_nothing;
6463 break;
6466 if (icode != CODE_FOR_nothing)
6468 unsigned int i;
6470 vector = rtvec_alloc (n_elts);
6471 for (i = 0; i < n_elts; i++)
6472 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6476 /* If the constructor has fewer elements than the vector,
6477 clear the whole array first. Similarly if this is static
6478 constructor of a non-BLKmode object. */
6479 if (cleared)
6480 need_to_clear = 0;
6481 else if (REG_P (target) && TREE_STATIC (exp))
6482 need_to_clear = 1;
6483 else
6485 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6486 tree value;
6488 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6490 int n_elts_here = tree_to_uhwi
6491 (int_const_binop (TRUNC_DIV_EXPR,
6492 TYPE_SIZE (TREE_TYPE (value)),
6493 TYPE_SIZE (elttype)));
6495 count += n_elts_here;
6496 if (mostly_zeros_p (value))
6497 zero_count += n_elts_here;
6500 /* Clear the entire vector first if there are any missing elements,
6501 or if the incidence of zero elements is >= 75%. */
6502 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6505 if (need_to_clear && size > 0 && !vector)
6507 if (REG_P (target))
6508 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6509 else
6510 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6511 cleared = 1;
6514 /* Inform later passes that the old value is dead. */
6515 if (!cleared && !vector && REG_P (target))
6516 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6518 if (MEM_P (target))
6519 alias = MEM_ALIAS_SET (target);
6520 else
6521 alias = get_alias_set (elttype);
6523 /* Store each element of the constructor into the corresponding
6524 element of TARGET, determined by counting the elements. */
6525 for (idx = 0, i = 0;
6526 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6527 idx++, i += bitsize / elt_size)
6529 HOST_WIDE_INT eltpos;
6530 tree value = ce->value;
6532 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6533 if (cleared && initializer_zerop (value))
6534 continue;
6536 if (ce->index)
6537 eltpos = tree_to_uhwi (ce->index);
6538 else
6539 eltpos = i;
6541 if (vector)
6543 /* vec_init<mode> should not be used if there are VECTOR_TYPE
6544 elements. */
6545 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6546 RTVEC_ELT (vector, eltpos)
6547 = expand_normal (value);
6549 else
6551 machine_mode value_mode =
6552 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6553 ? TYPE_MODE (TREE_TYPE (value))
6554 : eltmode;
6555 bitpos = eltpos * elt_size;
6556 store_constructor_field (target, bitsize, bitpos, value_mode,
6557 value, cleared, alias, reverse);
6561 if (vector)
6562 emit_insn (GEN_FCN (icode)
6563 (target,
6564 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6565 break;
6568 default:
6569 gcc_unreachable ();
6573 /* Store the value of EXP (an expression tree)
6574 into a subfield of TARGET which has mode MODE and occupies
6575 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6576 If MODE is VOIDmode, it means that we are storing into a bit-field.
6578 BITREGION_START is bitpos of the first bitfield in this region.
6579 BITREGION_END is the bitpos of the ending bitfield in this region.
6580 These two fields are 0, if the C++ memory model does not apply,
6581 or we are not interested in keeping track of bitfield regions.
6583 Always return const0_rtx unless we have something particular to
6584 return.
6586 ALIAS_SET is the alias set for the destination. This value will
6587 (in general) be different from that for TARGET, since TARGET is a
6588 reference to the containing structure.
6590 If NONTEMPORAL is true, try generating a nontemporal store.
6592 If REVERSE is true, the store is to be done in reverse order. */
6594 static rtx
6595 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6596 unsigned HOST_WIDE_INT bitregion_start,
6597 unsigned HOST_WIDE_INT bitregion_end,
6598 machine_mode mode, tree exp,
6599 alias_set_type alias_set, bool nontemporal, bool reverse)
6601 if (TREE_CODE (exp) == ERROR_MARK)
6602 return const0_rtx;
6604 /* If we have nothing to store, do nothing unless the expression has
6605 side-effects. */
6606 if (bitsize == 0)
6607 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6609 if (GET_CODE (target) == CONCAT)
6611 /* We're storing into a struct containing a single __complex. */
6613 gcc_assert (!bitpos);
6614 return store_expr (exp, target, 0, nontemporal, reverse);
6617 /* If the structure is in a register or if the component
6618 is a bit field, we cannot use addressing to access it.
6619 Use bit-field techniques or SUBREG to store in it. */
6621 if (mode == VOIDmode
6622 || (mode != BLKmode && ! direct_store[(int) mode]
6623 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6624 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6625 || REG_P (target)
6626 || GET_CODE (target) == SUBREG
6627 /* If the field isn't aligned enough to store as an ordinary memref,
6628 store it as a bit field. */
6629 || (mode != BLKmode
6630 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6631 || bitpos % GET_MODE_ALIGNMENT (mode))
6632 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6633 || (bitpos % BITS_PER_UNIT != 0)))
6634 || (bitsize >= 0 && mode != BLKmode
6635 && GET_MODE_BITSIZE (mode) > bitsize)
6636 /* If the RHS and field are a constant size and the size of the
6637 RHS isn't the same size as the bitfield, we must use bitfield
6638 operations. */
6639 || (bitsize >= 0
6640 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6641 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0
6642 /* Except for initialization of full bytes from a CONSTRUCTOR, which
6643 we will handle specially below. */
6644 && !(TREE_CODE (exp) == CONSTRUCTOR
6645 && bitsize % BITS_PER_UNIT == 0))
6646 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6647 decl we must use bitfield operations. */
6648 || (bitsize >= 0
6649 && TREE_CODE (exp) == MEM_REF
6650 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6651 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6652 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6653 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6655 rtx temp;
6656 gimple *nop_def;
6658 /* Using bitwise copy is not safe for TREE_ADDRESSABLE types. */
6659 gcc_assert (!TREE_ADDRESSABLE (TREE_TYPE (exp)));
6661 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6662 implies a mask operation. If the precision is the same size as
6663 the field we're storing into, that mask is redundant. This is
6664 particularly common with bit field assignments generated by the
6665 C front end. */
6666 nop_def = get_def_for_expr (exp, NOP_EXPR);
6667 if (nop_def)
6669 tree type = TREE_TYPE (exp);
6670 if (INTEGRAL_TYPE_P (type)
6671 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6672 && bitsize == TYPE_PRECISION (type))
6674 tree op = gimple_assign_rhs1 (nop_def);
6675 type = TREE_TYPE (op);
6676 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6677 exp = op;
6681 temp = expand_normal (exp);
6683 /* If the value has a record type and an integral mode then, if BITSIZE
6684 is narrower than this mode and this is for big-endian data, we must
6685 first put the value into the low-order bits. Moreover, the field may
6686 be not aligned on a byte boundary; in this case, if it has reverse
6687 storage order, it needs to be accessed as a scalar field with reverse
6688 storage order and we must first put the value into target order. */
6689 if (TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
6690 && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT)
6692 HOST_WIDE_INT size = GET_MODE_BITSIZE (GET_MODE (temp));
6694 reverse = TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (exp));
6696 if (reverse)
6697 temp = flip_storage_order (GET_MODE (temp), temp);
6699 if (bitsize < size
6700 && reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6701 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6702 size - bitsize, NULL_RTX, 1);
6705 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6706 if (mode != VOIDmode && mode != BLKmode
6707 && mode != TYPE_MODE (TREE_TYPE (exp)))
6708 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6710 /* If TEMP is not a PARALLEL (see below) and its mode and that of TARGET
6711 are both BLKmode, both must be in memory and BITPOS must be aligned
6712 on a byte boundary. If so, we simply do a block copy. Likewise for
6713 a BLKmode-like TARGET. */
6714 if (GET_CODE (temp) != PARALLEL
6715 && GET_MODE (temp) == BLKmode
6716 && (GET_MODE (target) == BLKmode
6717 || (MEM_P (target)
6718 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6719 && (bitpos % BITS_PER_UNIT) == 0
6720 && (bitsize % BITS_PER_UNIT) == 0)))
6722 gcc_assert (MEM_P (target) && MEM_P (temp)
6723 && (bitpos % BITS_PER_UNIT) == 0);
6725 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6726 emit_block_move (target, temp,
6727 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6728 / BITS_PER_UNIT),
6729 BLOCK_OP_NORMAL);
6731 return const0_rtx;
6734 /* Handle calls that return values in multiple non-contiguous locations.
6735 The Irix 6 ABI has examples of this. */
6736 if (GET_CODE (temp) == PARALLEL)
6738 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6739 rtx temp_target;
6740 if (mode == BLKmode || mode == VOIDmode)
6741 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6742 temp_target = gen_reg_rtx (mode);
6743 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6744 temp = temp_target;
6746 else if (mode == BLKmode)
6748 /* Handle calls that return BLKmode values in registers. */
6749 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6751 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6752 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6753 temp = temp_target;
6755 else
6757 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6758 rtx temp_target;
6759 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6760 temp_target = gen_reg_rtx (mode);
6761 temp_target
6762 = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
6763 temp_target, mode, mode, false);
6764 temp = temp_target;
6768 /* Store the value in the bitfield. */
6769 store_bit_field (target, bitsize, bitpos,
6770 bitregion_start, bitregion_end,
6771 mode, temp, reverse);
6773 return const0_rtx;
6775 else
6777 /* Now build a reference to just the desired component. */
6778 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6780 if (to_rtx == target)
6781 to_rtx = copy_rtx (to_rtx);
6783 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6784 set_mem_alias_set (to_rtx, alias_set);
6786 /* Above we avoided using bitfield operations for storing a CONSTRUCTOR
6787 into a target smaller than its type; handle that case now. */
6788 if (TREE_CODE (exp) == CONSTRUCTOR && bitsize >= 0)
6790 gcc_assert (bitsize % BITS_PER_UNIT == 0);
6791 store_constructor (exp, to_rtx, 0, bitsize / BITS_PER_UNIT, reverse);
6792 return to_rtx;
6795 return store_expr (exp, to_rtx, 0, nontemporal, reverse);
6799 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6800 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6801 codes and find the ultimate containing object, which we return.
6803 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6804 bit position, *PUNSIGNEDP to the signedness and *PREVERSEP to the
6805 storage order of the field.
6806 If the position of the field is variable, we store a tree
6807 giving the variable offset (in units) in *POFFSET.
6808 This offset is in addition to the bit position.
6809 If the position is not variable, we store 0 in *POFFSET.
6811 If any of the extraction expressions is volatile,
6812 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6814 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6815 Otherwise, it is a mode that can be used to access the field.
6817 If the field describes a variable-sized object, *PMODE is set to
6818 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6819 this case, but the address of the object can be found.
6821 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6822 look through nodes that serve as markers of a greater alignment than
6823 the one that can be deduced from the expression. These nodes make it
6824 possible for front-ends to prevent temporaries from being created by
6825 the middle-end on alignment considerations. For that purpose, the
6826 normal operating mode at high-level is to always pass FALSE so that
6827 the ultimate containing object is really returned; moreover, the
6828 associated predicate handled_component_p will always return TRUE
6829 on these nodes, thus indicating that they are essentially handled
6830 by get_inner_reference. TRUE should only be passed when the caller
6831 is scanning the expression in order to build another representation
6832 and specifically knows how to handle these nodes; as such, this is
6833 the normal operating mode in the RTL expanders. */
6835 tree
6836 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6837 HOST_WIDE_INT *pbitpos, tree *poffset,
6838 machine_mode *pmode, int *punsignedp,
6839 int *preversep, int *pvolatilep, bool keep_aligning)
6841 tree size_tree = 0;
6842 machine_mode mode = VOIDmode;
6843 bool blkmode_bitfield = false;
6844 tree offset = size_zero_node;
6845 offset_int bit_offset = 0;
6847 /* First get the mode, signedness, storage order and size. We do this from
6848 just the outermost expression. */
6849 *pbitsize = -1;
6850 if (TREE_CODE (exp) == COMPONENT_REF)
6852 tree field = TREE_OPERAND (exp, 1);
6853 size_tree = DECL_SIZE (field);
6854 if (flag_strict_volatile_bitfields > 0
6855 && TREE_THIS_VOLATILE (exp)
6856 && DECL_BIT_FIELD_TYPE (field)
6857 && DECL_MODE (field) != BLKmode)
6858 /* Volatile bitfields should be accessed in the mode of the
6859 field's type, not the mode computed based on the bit
6860 size. */
6861 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6862 else if (!DECL_BIT_FIELD (field))
6863 mode = DECL_MODE (field);
6864 else if (DECL_MODE (field) == BLKmode)
6865 blkmode_bitfield = true;
6867 *punsignedp = DECL_UNSIGNED (field);
6869 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6871 size_tree = TREE_OPERAND (exp, 1);
6872 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6873 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6875 /* For vector types, with the correct size of access, use the mode of
6876 inner type. */
6877 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6878 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6879 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6880 mode = TYPE_MODE (TREE_TYPE (exp));
6882 else
6884 mode = TYPE_MODE (TREE_TYPE (exp));
6885 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6887 if (mode == BLKmode)
6888 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6889 else
6890 *pbitsize = GET_MODE_BITSIZE (mode);
6893 if (size_tree != 0)
6895 if (! tree_fits_uhwi_p (size_tree))
6896 mode = BLKmode, *pbitsize = -1;
6897 else
6898 *pbitsize = tree_to_uhwi (size_tree);
6901 *preversep = reverse_storage_order_for_component_p (exp);
6903 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6904 and find the ultimate containing object. */
6905 while (1)
6907 switch (TREE_CODE (exp))
6909 case BIT_FIELD_REF:
6910 bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
6911 break;
6913 case COMPONENT_REF:
6915 tree field = TREE_OPERAND (exp, 1);
6916 tree this_offset = component_ref_field_offset (exp);
6918 /* If this field hasn't been filled in yet, don't go past it.
6919 This should only happen when folding expressions made during
6920 type construction. */
6921 if (this_offset == 0)
6922 break;
6924 offset = size_binop (PLUS_EXPR, offset, this_offset);
6925 bit_offset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
6927 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6929 break;
6931 case ARRAY_REF:
6932 case ARRAY_RANGE_REF:
6934 tree index = TREE_OPERAND (exp, 1);
6935 tree low_bound = array_ref_low_bound (exp);
6936 tree unit_size = array_ref_element_size (exp);
6938 /* We assume all arrays have sizes that are a multiple of a byte.
6939 First subtract the lower bound, if any, in the type of the
6940 index, then convert to sizetype and multiply by the size of
6941 the array element. */
6942 if (! integer_zerop (low_bound))
6943 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6944 index, low_bound);
6946 offset = size_binop (PLUS_EXPR, offset,
6947 size_binop (MULT_EXPR,
6948 fold_convert (sizetype, index),
6949 unit_size));
6951 break;
6953 case REALPART_EXPR:
6954 break;
6956 case IMAGPART_EXPR:
6957 bit_offset += *pbitsize;
6958 break;
6960 case VIEW_CONVERT_EXPR:
6961 if (keep_aligning && STRICT_ALIGNMENT
6962 && (TYPE_ALIGN (TREE_TYPE (exp))
6963 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6964 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6965 < BIGGEST_ALIGNMENT)
6966 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6967 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6968 goto done;
6969 break;
6971 case MEM_REF:
6972 /* Hand back the decl for MEM[&decl, off]. */
6973 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6975 tree off = TREE_OPERAND (exp, 1);
6976 if (!integer_zerop (off))
6978 offset_int boff, coff = mem_ref_offset (exp);
6979 boff = wi::lshift (coff, LOG2_BITS_PER_UNIT);
6980 bit_offset += boff;
6982 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6984 goto done;
6986 default:
6987 goto done;
6990 /* If any reference in the chain is volatile, the effect is volatile. */
6991 if (TREE_THIS_VOLATILE (exp))
6992 *pvolatilep = 1;
6994 exp = TREE_OPERAND (exp, 0);
6996 done:
6998 /* If OFFSET is constant, see if we can return the whole thing as a
6999 constant bit position. Make sure to handle overflow during
7000 this conversion. */
7001 if (TREE_CODE (offset) == INTEGER_CST)
7003 offset_int tem = wi::sext (wi::to_offset (offset),
7004 TYPE_PRECISION (sizetype));
7005 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
7006 tem += bit_offset;
7007 if (wi::fits_shwi_p (tem))
7009 *pbitpos = tem.to_shwi ();
7010 *poffset = offset = NULL_TREE;
7014 /* Otherwise, split it up. */
7015 if (offset)
7017 /* Avoid returning a negative bitpos as this may wreak havoc later. */
7018 if (wi::neg_p (bit_offset) || !wi::fits_shwi_p (bit_offset))
7020 offset_int mask = wi::mask <offset_int> (LOG2_BITS_PER_UNIT, false);
7021 offset_int tem = bit_offset.and_not (mask);
7022 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
7023 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
7024 bit_offset -= tem;
7025 tem = wi::arshift (tem, LOG2_BITS_PER_UNIT);
7026 offset = size_binop (PLUS_EXPR, offset,
7027 wide_int_to_tree (sizetype, tem));
7030 *pbitpos = bit_offset.to_shwi ();
7031 *poffset = offset;
7034 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
7035 if (mode == VOIDmode
7036 && blkmode_bitfield
7037 && (*pbitpos % BITS_PER_UNIT) == 0
7038 && (*pbitsize % BITS_PER_UNIT) == 0)
7039 *pmode = BLKmode;
7040 else
7041 *pmode = mode;
7043 return exp;
7046 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7048 static unsigned HOST_WIDE_INT
7049 target_align (const_tree target)
7051 /* We might have a chain of nested references with intermediate misaligning
7052 bitfields components, so need to recurse to find out. */
7054 unsigned HOST_WIDE_INT this_align, outer_align;
7056 switch (TREE_CODE (target))
7058 case BIT_FIELD_REF:
7059 return 1;
7061 case COMPONENT_REF:
7062 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7063 outer_align = target_align (TREE_OPERAND (target, 0));
7064 return MIN (this_align, outer_align);
7066 case ARRAY_REF:
7067 case ARRAY_RANGE_REF:
7068 this_align = TYPE_ALIGN (TREE_TYPE (target));
7069 outer_align = target_align (TREE_OPERAND (target, 0));
7070 return MIN (this_align, outer_align);
7072 CASE_CONVERT:
7073 case NON_LVALUE_EXPR:
7074 case VIEW_CONVERT_EXPR:
7075 this_align = TYPE_ALIGN (TREE_TYPE (target));
7076 outer_align = target_align (TREE_OPERAND (target, 0));
7077 return MAX (this_align, outer_align);
7079 default:
7080 return TYPE_ALIGN (TREE_TYPE (target));
7085 /* Given an rtx VALUE that may contain additions and multiplications, return
7086 an equivalent value that just refers to a register, memory, or constant.
7087 This is done by generating instructions to perform the arithmetic and
7088 returning a pseudo-register containing the value.
7090 The returned value may be a REG, SUBREG, MEM or constant. */
7093 force_operand (rtx value, rtx target)
7095 rtx op1, op2;
7096 /* Use subtarget as the target for operand 0 of a binary operation. */
7097 rtx subtarget = get_subtarget (target);
7098 enum rtx_code code = GET_CODE (value);
7100 /* Check for subreg applied to an expression produced by loop optimizer. */
7101 if (code == SUBREG
7102 && !REG_P (SUBREG_REG (value))
7103 && !MEM_P (SUBREG_REG (value)))
7105 value
7106 = simplify_gen_subreg (GET_MODE (value),
7107 force_reg (GET_MODE (SUBREG_REG (value)),
7108 force_operand (SUBREG_REG (value),
7109 NULL_RTX)),
7110 GET_MODE (SUBREG_REG (value)),
7111 SUBREG_BYTE (value));
7112 code = GET_CODE (value);
7115 /* Check for a PIC address load. */
7116 if ((code == PLUS || code == MINUS)
7117 && XEXP (value, 0) == pic_offset_table_rtx
7118 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7119 || GET_CODE (XEXP (value, 1)) == LABEL_REF
7120 || GET_CODE (XEXP (value, 1)) == CONST))
7122 if (!subtarget)
7123 subtarget = gen_reg_rtx (GET_MODE (value));
7124 emit_move_insn (subtarget, value);
7125 return subtarget;
7128 if (ARITHMETIC_P (value))
7130 op2 = XEXP (value, 1);
7131 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7132 subtarget = 0;
7133 if (code == MINUS && CONST_INT_P (op2))
7135 code = PLUS;
7136 op2 = negate_rtx (GET_MODE (value), op2);
7139 /* Check for an addition with OP2 a constant integer and our first
7140 operand a PLUS of a virtual register and something else. In that
7141 case, we want to emit the sum of the virtual register and the
7142 constant first and then add the other value. This allows virtual
7143 register instantiation to simply modify the constant rather than
7144 creating another one around this addition. */
7145 if (code == PLUS && CONST_INT_P (op2)
7146 && GET_CODE (XEXP (value, 0)) == PLUS
7147 && REG_P (XEXP (XEXP (value, 0), 0))
7148 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7149 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7151 rtx temp = expand_simple_binop (GET_MODE (value), code,
7152 XEXP (XEXP (value, 0), 0), op2,
7153 subtarget, 0, OPTAB_LIB_WIDEN);
7154 return expand_simple_binop (GET_MODE (value), code, temp,
7155 force_operand (XEXP (XEXP (value,
7156 0), 1), 0),
7157 target, 0, OPTAB_LIB_WIDEN);
7160 op1 = force_operand (XEXP (value, 0), subtarget);
7161 op2 = force_operand (op2, NULL_RTX);
7162 switch (code)
7164 case MULT:
7165 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7166 case DIV:
7167 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7168 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7169 target, 1, OPTAB_LIB_WIDEN);
7170 else
7171 return expand_divmod (0,
7172 FLOAT_MODE_P (GET_MODE (value))
7173 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7174 GET_MODE (value), op1, op2, target, 0);
7175 case MOD:
7176 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7177 target, 0);
7178 case UDIV:
7179 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7180 target, 1);
7181 case UMOD:
7182 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7183 target, 1);
7184 case ASHIFTRT:
7185 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7186 target, 0, OPTAB_LIB_WIDEN);
7187 default:
7188 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7189 target, 1, OPTAB_LIB_WIDEN);
7192 if (UNARY_P (value))
7194 if (!target)
7195 target = gen_reg_rtx (GET_MODE (value));
7196 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7197 switch (code)
7199 case ZERO_EXTEND:
7200 case SIGN_EXTEND:
7201 case TRUNCATE:
7202 case FLOAT_EXTEND:
7203 case FLOAT_TRUNCATE:
7204 convert_move (target, op1, code == ZERO_EXTEND);
7205 return target;
7207 case FIX:
7208 case UNSIGNED_FIX:
7209 expand_fix (target, op1, code == UNSIGNED_FIX);
7210 return target;
7212 case FLOAT:
7213 case UNSIGNED_FLOAT:
7214 expand_float (target, op1, code == UNSIGNED_FLOAT);
7215 return target;
7217 default:
7218 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7222 #ifdef INSN_SCHEDULING
7223 /* On machines that have insn scheduling, we want all memory reference to be
7224 explicit, so we need to deal with such paradoxical SUBREGs. */
7225 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7226 value
7227 = simplify_gen_subreg (GET_MODE (value),
7228 force_reg (GET_MODE (SUBREG_REG (value)),
7229 force_operand (SUBREG_REG (value),
7230 NULL_RTX)),
7231 GET_MODE (SUBREG_REG (value)),
7232 SUBREG_BYTE (value));
7233 #endif
7235 return value;
7238 /* Subroutine of expand_expr: return nonzero iff there is no way that
7239 EXP can reference X, which is being modified. TOP_P is nonzero if this
7240 call is going to be used to determine whether we need a temporary
7241 for EXP, as opposed to a recursive call to this function.
7243 It is always safe for this routine to return zero since it merely
7244 searches for optimization opportunities. */
7247 safe_from_p (const_rtx x, tree exp, int top_p)
7249 rtx exp_rtl = 0;
7250 int i, nops;
7252 if (x == 0
7253 /* If EXP has varying size, we MUST use a target since we currently
7254 have no way of allocating temporaries of variable size
7255 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7256 So we assume here that something at a higher level has prevented a
7257 clash. This is somewhat bogus, but the best we can do. Only
7258 do this when X is BLKmode and when we are at the top level. */
7259 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7260 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7261 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7262 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7263 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7264 != INTEGER_CST)
7265 && GET_MODE (x) == BLKmode)
7266 /* If X is in the outgoing argument area, it is always safe. */
7267 || (MEM_P (x)
7268 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7269 || (GET_CODE (XEXP (x, 0)) == PLUS
7270 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7271 return 1;
7273 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7274 find the underlying pseudo. */
7275 if (GET_CODE (x) == SUBREG)
7277 x = SUBREG_REG (x);
7278 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7279 return 0;
7282 /* Now look at our tree code and possibly recurse. */
7283 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7285 case tcc_declaration:
7286 exp_rtl = DECL_RTL_IF_SET (exp);
7287 break;
7289 case tcc_constant:
7290 return 1;
7292 case tcc_exceptional:
7293 if (TREE_CODE (exp) == TREE_LIST)
7295 while (1)
7297 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7298 return 0;
7299 exp = TREE_CHAIN (exp);
7300 if (!exp)
7301 return 1;
7302 if (TREE_CODE (exp) != TREE_LIST)
7303 return safe_from_p (x, exp, 0);
7306 else if (TREE_CODE (exp) == CONSTRUCTOR)
7308 constructor_elt *ce;
7309 unsigned HOST_WIDE_INT idx;
7311 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7312 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7313 || !safe_from_p (x, ce->value, 0))
7314 return 0;
7315 return 1;
7317 else if (TREE_CODE (exp) == ERROR_MARK)
7318 return 1; /* An already-visited SAVE_EXPR? */
7319 else
7320 return 0;
7322 case tcc_statement:
7323 /* The only case we look at here is the DECL_INITIAL inside a
7324 DECL_EXPR. */
7325 return (TREE_CODE (exp) != DECL_EXPR
7326 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7327 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7328 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7330 case tcc_binary:
7331 case tcc_comparison:
7332 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7333 return 0;
7334 /* Fall through. */
7336 case tcc_unary:
7337 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7339 case tcc_expression:
7340 case tcc_reference:
7341 case tcc_vl_exp:
7342 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7343 the expression. If it is set, we conflict iff we are that rtx or
7344 both are in memory. Otherwise, we check all operands of the
7345 expression recursively. */
7347 switch (TREE_CODE (exp))
7349 case ADDR_EXPR:
7350 /* If the operand is static or we are static, we can't conflict.
7351 Likewise if we don't conflict with the operand at all. */
7352 if (staticp (TREE_OPERAND (exp, 0))
7353 || TREE_STATIC (exp)
7354 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7355 return 1;
7357 /* Otherwise, the only way this can conflict is if we are taking
7358 the address of a DECL a that address if part of X, which is
7359 very rare. */
7360 exp = TREE_OPERAND (exp, 0);
7361 if (DECL_P (exp))
7363 if (!DECL_RTL_SET_P (exp)
7364 || !MEM_P (DECL_RTL (exp)))
7365 return 0;
7366 else
7367 exp_rtl = XEXP (DECL_RTL (exp), 0);
7369 break;
7371 case MEM_REF:
7372 if (MEM_P (x)
7373 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7374 get_alias_set (exp)))
7375 return 0;
7376 break;
7378 case CALL_EXPR:
7379 /* Assume that the call will clobber all hard registers and
7380 all of memory. */
7381 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7382 || MEM_P (x))
7383 return 0;
7384 break;
7386 case WITH_CLEANUP_EXPR:
7387 case CLEANUP_POINT_EXPR:
7388 /* Lowered by gimplify.c. */
7389 gcc_unreachable ();
7391 case SAVE_EXPR:
7392 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7394 default:
7395 break;
7398 /* If we have an rtx, we do not need to scan our operands. */
7399 if (exp_rtl)
7400 break;
7402 nops = TREE_OPERAND_LENGTH (exp);
7403 for (i = 0; i < nops; i++)
7404 if (TREE_OPERAND (exp, i) != 0
7405 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7406 return 0;
7408 break;
7410 case tcc_type:
7411 /* Should never get a type here. */
7412 gcc_unreachable ();
7415 /* If we have an rtl, find any enclosed object. Then see if we conflict
7416 with it. */
7417 if (exp_rtl)
7419 if (GET_CODE (exp_rtl) == SUBREG)
7421 exp_rtl = SUBREG_REG (exp_rtl);
7422 if (REG_P (exp_rtl)
7423 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7424 return 0;
7427 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7428 are memory and they conflict. */
7429 return ! (rtx_equal_p (x, exp_rtl)
7430 || (MEM_P (x) && MEM_P (exp_rtl)
7431 && true_dependence (exp_rtl, VOIDmode, x)));
7434 /* If we reach here, it is safe. */
7435 return 1;
7439 /* Return the highest power of two that EXP is known to be a multiple of.
7440 This is used in updating alignment of MEMs in array references. */
7442 unsigned HOST_WIDE_INT
7443 highest_pow2_factor (const_tree exp)
7445 unsigned HOST_WIDE_INT ret;
7446 int trailing_zeros = tree_ctz (exp);
7447 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7448 return BIGGEST_ALIGNMENT;
7449 ret = (unsigned HOST_WIDE_INT) 1 << trailing_zeros;
7450 if (ret > BIGGEST_ALIGNMENT)
7451 return BIGGEST_ALIGNMENT;
7452 return ret;
7455 /* Similar, except that the alignment requirements of TARGET are
7456 taken into account. Assume it is at least as aligned as its
7457 type, unless it is a COMPONENT_REF in which case the layout of
7458 the structure gives the alignment. */
7460 static unsigned HOST_WIDE_INT
7461 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7463 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7464 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7466 return MAX (factor, talign);
7469 /* Convert the tree comparison code TCODE to the rtl one where the
7470 signedness is UNSIGNEDP. */
7472 static enum rtx_code
7473 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7475 enum rtx_code code;
7476 switch (tcode)
7478 case EQ_EXPR:
7479 code = EQ;
7480 break;
7481 case NE_EXPR:
7482 code = NE;
7483 break;
7484 case LT_EXPR:
7485 code = unsignedp ? LTU : LT;
7486 break;
7487 case LE_EXPR:
7488 code = unsignedp ? LEU : LE;
7489 break;
7490 case GT_EXPR:
7491 code = unsignedp ? GTU : GT;
7492 break;
7493 case GE_EXPR:
7494 code = unsignedp ? GEU : GE;
7495 break;
7496 case UNORDERED_EXPR:
7497 code = UNORDERED;
7498 break;
7499 case ORDERED_EXPR:
7500 code = ORDERED;
7501 break;
7502 case UNLT_EXPR:
7503 code = UNLT;
7504 break;
7505 case UNLE_EXPR:
7506 code = UNLE;
7507 break;
7508 case UNGT_EXPR:
7509 code = UNGT;
7510 break;
7511 case UNGE_EXPR:
7512 code = UNGE;
7513 break;
7514 case UNEQ_EXPR:
7515 code = UNEQ;
7516 break;
7517 case LTGT_EXPR:
7518 code = LTGT;
7519 break;
7521 default:
7522 gcc_unreachable ();
7524 return code;
7527 /* Subroutine of expand_expr. Expand the two operands of a binary
7528 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7529 The value may be stored in TARGET if TARGET is nonzero. The
7530 MODIFIER argument is as documented by expand_expr. */
7532 void
7533 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7534 enum expand_modifier modifier)
7536 if (! safe_from_p (target, exp1, 1))
7537 target = 0;
7538 if (operand_equal_p (exp0, exp1, 0))
7540 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7541 *op1 = copy_rtx (*op0);
7543 else
7545 /* If we need to preserve evaluation order, copy exp0 into its own
7546 temporary variable so that it can't be clobbered by exp1. */
7547 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7548 exp0 = save_expr (exp0);
7549 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7550 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7555 /* Return a MEM that contains constant EXP. DEFER is as for
7556 output_constant_def and MODIFIER is as for expand_expr. */
7558 static rtx
7559 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7561 rtx mem;
7563 mem = output_constant_def (exp, defer);
7564 if (modifier != EXPAND_INITIALIZER)
7565 mem = use_anchored_address (mem);
7566 return mem;
7569 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7570 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7572 static rtx
7573 expand_expr_addr_expr_1 (tree exp, rtx target, machine_mode tmode,
7574 enum expand_modifier modifier, addr_space_t as)
7576 rtx result, subtarget;
7577 tree inner, offset;
7578 HOST_WIDE_INT bitsize, bitpos;
7579 int unsignedp, reversep, volatilep = 0;
7580 machine_mode mode1;
7582 /* If we are taking the address of a constant and are at the top level,
7583 we have to use output_constant_def since we can't call force_const_mem
7584 at top level. */
7585 /* ??? This should be considered a front-end bug. We should not be
7586 generating ADDR_EXPR of something that isn't an LVALUE. The only
7587 exception here is STRING_CST. */
7588 if (CONSTANT_CLASS_P (exp))
7590 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7591 if (modifier < EXPAND_SUM)
7592 result = force_operand (result, target);
7593 return result;
7596 /* Everything must be something allowed by is_gimple_addressable. */
7597 switch (TREE_CODE (exp))
7599 case INDIRECT_REF:
7600 /* This case will happen via recursion for &a->b. */
7601 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7603 case MEM_REF:
7605 tree tem = TREE_OPERAND (exp, 0);
7606 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7607 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7608 return expand_expr (tem, target, tmode, modifier);
7611 case CONST_DECL:
7612 /* Expand the initializer like constants above. */
7613 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7614 0, modifier), 0);
7615 if (modifier < EXPAND_SUM)
7616 result = force_operand (result, target);
7617 return result;
7619 case REALPART_EXPR:
7620 /* The real part of the complex number is always first, therefore
7621 the address is the same as the address of the parent object. */
7622 offset = 0;
7623 bitpos = 0;
7624 inner = TREE_OPERAND (exp, 0);
7625 break;
7627 case IMAGPART_EXPR:
7628 /* The imaginary part of the complex number is always second.
7629 The expression is therefore always offset by the size of the
7630 scalar type. */
7631 offset = 0;
7632 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7633 inner = TREE_OPERAND (exp, 0);
7634 break;
7636 case COMPOUND_LITERAL_EXPR:
7637 /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
7638 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
7639 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
7640 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
7641 the initializers aren't gimplified. */
7642 if (COMPOUND_LITERAL_EXPR_DECL (exp)
7643 && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp)))
7644 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7645 target, tmode, modifier, as);
7646 /* FALLTHRU */
7647 default:
7648 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7649 expand_expr, as that can have various side effects; LABEL_DECLs for
7650 example, may not have their DECL_RTL set yet. Expand the rtl of
7651 CONSTRUCTORs too, which should yield a memory reference for the
7652 constructor's contents. Assume language specific tree nodes can
7653 be expanded in some interesting way. */
7654 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7655 if (DECL_P (exp)
7656 || TREE_CODE (exp) == CONSTRUCTOR
7657 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7659 result = expand_expr (exp, target, tmode,
7660 modifier == EXPAND_INITIALIZER
7661 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7663 /* If the DECL isn't in memory, then the DECL wasn't properly
7664 marked TREE_ADDRESSABLE, which will be either a front-end
7665 or a tree optimizer bug. */
7667 gcc_assert (MEM_P (result));
7668 result = XEXP (result, 0);
7670 /* ??? Is this needed anymore? */
7671 if (DECL_P (exp))
7672 TREE_USED (exp) = 1;
7674 if (modifier != EXPAND_INITIALIZER
7675 && modifier != EXPAND_CONST_ADDRESS
7676 && modifier != EXPAND_SUM)
7677 result = force_operand (result, target);
7678 return result;
7681 /* Pass FALSE as the last argument to get_inner_reference although
7682 we are expanding to RTL. The rationale is that we know how to
7683 handle "aligning nodes" here: we can just bypass them because
7684 they won't change the final object whose address will be returned
7685 (they actually exist only for that purpose). */
7686 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
7687 &unsignedp, &reversep, &volatilep, false);
7688 break;
7691 /* We must have made progress. */
7692 gcc_assert (inner != exp);
7694 subtarget = offset || bitpos ? NULL_RTX : target;
7695 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7696 inner alignment, force the inner to be sufficiently aligned. */
7697 if (CONSTANT_CLASS_P (inner)
7698 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7700 inner = copy_node (inner);
7701 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7702 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7703 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7705 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7707 if (offset)
7709 rtx tmp;
7711 if (modifier != EXPAND_NORMAL)
7712 result = force_operand (result, NULL);
7713 tmp = expand_expr (offset, NULL_RTX, tmode,
7714 modifier == EXPAND_INITIALIZER
7715 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7717 /* expand_expr is allowed to return an object in a mode other
7718 than TMODE. If it did, we need to convert. */
7719 if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
7720 tmp = convert_modes (tmode, GET_MODE (tmp),
7721 tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
7722 result = convert_memory_address_addr_space (tmode, result, as);
7723 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7725 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7726 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7727 else
7729 subtarget = bitpos ? NULL_RTX : target;
7730 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7731 1, OPTAB_LIB_WIDEN);
7735 if (bitpos)
7737 /* Someone beforehand should have rejected taking the address
7738 of such an object. */
7739 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7741 result = convert_memory_address_addr_space (tmode, result, as);
7742 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7743 if (modifier < EXPAND_SUM)
7744 result = force_operand (result, target);
7747 return result;
7750 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7751 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7753 static rtx
7754 expand_expr_addr_expr (tree exp, rtx target, machine_mode tmode,
7755 enum expand_modifier modifier)
7757 addr_space_t as = ADDR_SPACE_GENERIC;
7758 machine_mode address_mode = Pmode;
7759 machine_mode pointer_mode = ptr_mode;
7760 machine_mode rmode;
7761 rtx result;
7763 /* Target mode of VOIDmode says "whatever's natural". */
7764 if (tmode == VOIDmode)
7765 tmode = TYPE_MODE (TREE_TYPE (exp));
7767 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7769 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7770 address_mode = targetm.addr_space.address_mode (as);
7771 pointer_mode = targetm.addr_space.pointer_mode (as);
7774 /* We can get called with some Weird Things if the user does silliness
7775 like "(short) &a". In that case, convert_memory_address won't do
7776 the right thing, so ignore the given target mode. */
7777 if (tmode != address_mode && tmode != pointer_mode)
7778 tmode = address_mode;
7780 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7781 tmode, modifier, as);
7783 /* Despite expand_expr claims concerning ignoring TMODE when not
7784 strictly convenient, stuff breaks if we don't honor it. Note
7785 that combined with the above, we only do this for pointer modes. */
7786 rmode = GET_MODE (result);
7787 if (rmode == VOIDmode)
7788 rmode = tmode;
7789 if (rmode != tmode)
7790 result = convert_memory_address_addr_space (tmode, result, as);
7792 return result;
7795 /* Generate code for computing CONSTRUCTOR EXP.
7796 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7797 is TRUE, instead of creating a temporary variable in memory
7798 NULL is returned and the caller needs to handle it differently. */
7800 static rtx
7801 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7802 bool avoid_temp_mem)
7804 tree type = TREE_TYPE (exp);
7805 machine_mode mode = TYPE_MODE (type);
7807 /* Try to avoid creating a temporary at all. This is possible
7808 if all of the initializer is zero.
7809 FIXME: try to handle all [0..255] initializers we can handle
7810 with memset. */
7811 if (TREE_STATIC (exp)
7812 && !TREE_ADDRESSABLE (exp)
7813 && target != 0 && mode == BLKmode
7814 && all_zeros_p (exp))
7816 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7817 return target;
7820 /* All elts simple constants => refer to a constant in memory. But
7821 if this is a non-BLKmode mode, let it store a field at a time
7822 since that should make a CONST_INT, CONST_WIDE_INT or
7823 CONST_DOUBLE when we fold. Likewise, if we have a target we can
7824 use, it is best to store directly into the target unless the type
7825 is large enough that memcpy will be used. If we are making an
7826 initializer and all operands are constant, put it in memory as
7827 well.
7829 FIXME: Avoid trying to fill vector constructors piece-meal.
7830 Output them with output_constant_def below unless we're sure
7831 they're zeros. This should go away when vector initializers
7832 are treated like VECTOR_CST instead of arrays. */
7833 if ((TREE_STATIC (exp)
7834 && ((mode == BLKmode
7835 && ! (target != 0 && safe_from_p (target, exp, 1)))
7836 || TREE_ADDRESSABLE (exp)
7837 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
7838 && (! can_move_by_pieces
7839 (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
7840 TYPE_ALIGN (type)))
7841 && ! mostly_zeros_p (exp))))
7842 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7843 && TREE_CONSTANT (exp)))
7845 rtx constructor;
7847 if (avoid_temp_mem)
7848 return NULL_RTX;
7850 constructor = expand_expr_constant (exp, 1, modifier);
7852 if (modifier != EXPAND_CONST_ADDRESS
7853 && modifier != EXPAND_INITIALIZER
7854 && modifier != EXPAND_SUM)
7855 constructor = validize_mem (constructor);
7857 return constructor;
7860 /* Handle calls that pass values in multiple non-contiguous
7861 locations. The Irix 6 ABI has examples of this. */
7862 if (target == 0 || ! safe_from_p (target, exp, 1)
7863 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7865 if (avoid_temp_mem)
7866 return NULL_RTX;
7868 target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
7871 store_constructor (exp, target, 0, int_expr_size (exp), false);
7872 return target;
7876 /* expand_expr: generate code for computing expression EXP.
7877 An rtx for the computed value is returned. The value is never null.
7878 In the case of a void EXP, const0_rtx is returned.
7880 The value may be stored in TARGET if TARGET is nonzero.
7881 TARGET is just a suggestion; callers must assume that
7882 the rtx returned may not be the same as TARGET.
7884 If TARGET is CONST0_RTX, it means that the value will be ignored.
7886 If TMODE is not VOIDmode, it suggests generating the
7887 result in mode TMODE. But this is done only when convenient.
7888 Otherwise, TMODE is ignored and the value generated in its natural mode.
7889 TMODE is just a suggestion; callers must assume that
7890 the rtx returned may not have mode TMODE.
7892 Note that TARGET may have neither TMODE nor MODE. In that case, it
7893 probably will not be used.
7895 If MODIFIER is EXPAND_SUM then when EXP is an addition
7896 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7897 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7898 products as above, or REG or MEM, or constant.
7899 Ordinarily in such cases we would output mul or add instructions
7900 and then return a pseudo reg containing the sum.
7902 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7903 it also marks a label as absolutely required (it can't be dead).
7904 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7905 This is used for outputting expressions used in initializers.
7907 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7908 with a constant address even if that address is not normally legitimate.
7909 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7911 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7912 a call parameter. Such targets require special care as we haven't yet
7913 marked TARGET so that it's safe from being trashed by libcalls. We
7914 don't want to use TARGET for anything but the final result;
7915 Intermediate values must go elsewhere. Additionally, calls to
7916 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7918 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7919 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7920 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7921 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7922 recursively.
7924 If INNER_REFERENCE_P is true, we are expanding an inner reference.
7925 In this case, we don't adjust a returned MEM rtx that wouldn't be
7926 sufficiently aligned for its mode; instead, it's up to the caller
7927 to deal with it afterwards. This is used to make sure that unaligned
7928 base objects for which out-of-bounds accesses are supported, for
7929 example record types with trailing arrays, aren't realigned behind
7930 the back of the caller.
7931 The normal operating mode is to pass FALSE for this parameter. */
7934 expand_expr_real (tree exp, rtx target, machine_mode tmode,
7935 enum expand_modifier modifier, rtx *alt_rtl,
7936 bool inner_reference_p)
7938 rtx ret;
7940 /* Handle ERROR_MARK before anybody tries to access its type. */
7941 if (TREE_CODE (exp) == ERROR_MARK
7942 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7944 ret = CONST0_RTX (tmode);
7945 return ret ? ret : const0_rtx;
7948 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
7949 inner_reference_p);
7950 return ret;
7953 /* Try to expand the conditional expression which is represented by
7954 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If it succeeds
7955 return the rtl reg which represents the result. Otherwise return
7956 NULL_RTX. */
7958 static rtx
7959 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
7960 tree treeop1 ATTRIBUTE_UNUSED,
7961 tree treeop2 ATTRIBUTE_UNUSED)
7963 rtx insn;
7964 rtx op00, op01, op1, op2;
7965 enum rtx_code comparison_code;
7966 machine_mode comparison_mode;
7967 gimple *srcstmt;
7968 rtx temp;
7969 tree type = TREE_TYPE (treeop1);
7970 int unsignedp = TYPE_UNSIGNED (type);
7971 machine_mode mode = TYPE_MODE (type);
7972 machine_mode orig_mode = mode;
7974 /* If we cannot do a conditional move on the mode, try doing it
7975 with the promoted mode. */
7976 if (!can_conditionally_move_p (mode))
7978 mode = promote_mode (type, mode, &unsignedp);
7979 if (!can_conditionally_move_p (mode))
7980 return NULL_RTX;
7981 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
7983 else
7984 temp = assign_temp (type, 0, 1);
7986 start_sequence ();
7987 expand_operands (treeop1, treeop2,
7988 temp, &op1, &op2, EXPAND_NORMAL);
7990 if (TREE_CODE (treeop0) == SSA_NAME
7991 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
7993 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
7994 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
7995 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
7996 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
7997 comparison_mode = TYPE_MODE (type);
7998 unsignedp = TYPE_UNSIGNED (type);
7999 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8001 else if (COMPARISON_CLASS_P (treeop0))
8003 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
8004 enum tree_code cmpcode = TREE_CODE (treeop0);
8005 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
8006 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
8007 unsignedp = TYPE_UNSIGNED (type);
8008 comparison_mode = TYPE_MODE (type);
8009 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8011 else
8013 op00 = expand_normal (treeop0);
8014 op01 = const0_rtx;
8015 comparison_code = NE;
8016 comparison_mode = GET_MODE (op00);
8017 if (comparison_mode == VOIDmode)
8018 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
8021 if (GET_MODE (op1) != mode)
8022 op1 = gen_lowpart (mode, op1);
8024 if (GET_MODE (op2) != mode)
8025 op2 = gen_lowpart (mode, op2);
8027 /* Try to emit the conditional move. */
8028 insn = emit_conditional_move (temp, comparison_code,
8029 op00, op01, comparison_mode,
8030 op1, op2, mode,
8031 unsignedp);
8033 /* If we could do the conditional move, emit the sequence,
8034 and return. */
8035 if (insn)
8037 rtx_insn *seq = get_insns ();
8038 end_sequence ();
8039 emit_insn (seq);
8040 return convert_modes (orig_mode, mode, temp, 0);
8043 /* Otherwise discard the sequence and fall back to code with
8044 branches. */
8045 end_sequence ();
8046 return NULL_RTX;
8050 expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
8051 enum expand_modifier modifier)
8053 rtx op0, op1, op2, temp;
8054 rtx_code_label *lab;
8055 tree type;
8056 int unsignedp;
8057 machine_mode mode;
8058 enum tree_code code = ops->code;
8059 optab this_optab;
8060 rtx subtarget, original_target;
8061 int ignore;
8062 bool reduce_bit_field;
8063 location_t loc = ops->location;
8064 tree treeop0, treeop1, treeop2;
8065 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8066 ? reduce_to_bit_field_precision ((expr), \
8067 target, \
8068 type) \
8069 : (expr))
8071 type = ops->type;
8072 mode = TYPE_MODE (type);
8073 unsignedp = TYPE_UNSIGNED (type);
8075 treeop0 = ops->op0;
8076 treeop1 = ops->op1;
8077 treeop2 = ops->op2;
8079 /* We should be called only on simple (binary or unary) expressions,
8080 exactly those that are valid in gimple expressions that aren't
8081 GIMPLE_SINGLE_RHS (or invalid). */
8082 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8083 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8084 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8086 ignore = (target == const0_rtx
8087 || ((CONVERT_EXPR_CODE_P (code)
8088 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8089 && TREE_CODE (type) == VOID_TYPE));
8091 /* We should be called only if we need the result. */
8092 gcc_assert (!ignore);
8094 /* An operation in what may be a bit-field type needs the
8095 result to be reduced to the precision of the bit-field type,
8096 which is narrower than that of the type's mode. */
8097 reduce_bit_field = (INTEGRAL_TYPE_P (type)
8098 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8100 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8101 target = 0;
8103 /* Use subtarget as the target for operand 0 of a binary operation. */
8104 subtarget = get_subtarget (target);
8105 original_target = target;
8107 switch (code)
8109 case NON_LVALUE_EXPR:
8110 case PAREN_EXPR:
8111 CASE_CONVERT:
8112 if (treeop0 == error_mark_node)
8113 return const0_rtx;
8115 if (TREE_CODE (type) == UNION_TYPE)
8117 tree valtype = TREE_TYPE (treeop0);
8119 /* If both input and output are BLKmode, this conversion isn't doing
8120 anything except possibly changing memory attribute. */
8121 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8123 rtx result = expand_expr (treeop0, target, tmode,
8124 modifier);
8126 result = copy_rtx (result);
8127 set_mem_attributes (result, type, 0);
8128 return result;
8131 if (target == 0)
8133 if (TYPE_MODE (type) != BLKmode)
8134 target = gen_reg_rtx (TYPE_MODE (type));
8135 else
8136 target = assign_temp (type, 1, 1);
8139 if (MEM_P (target))
8140 /* Store data into beginning of memory target. */
8141 store_expr (treeop0,
8142 adjust_address (target, TYPE_MODE (valtype), 0),
8143 modifier == EXPAND_STACK_PARM,
8144 false, TYPE_REVERSE_STORAGE_ORDER (type));
8146 else
8148 gcc_assert (REG_P (target)
8149 && !TYPE_REVERSE_STORAGE_ORDER (type));
8151 /* Store this field into a union of the proper type. */
8152 store_field (target,
8153 MIN ((int_size_in_bytes (TREE_TYPE
8154 (treeop0))
8155 * BITS_PER_UNIT),
8156 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8157 0, 0, 0, TYPE_MODE (valtype), treeop0, 0,
8158 false, false);
8161 /* Return the entire union. */
8162 return target;
8165 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8167 op0 = expand_expr (treeop0, target, VOIDmode,
8168 modifier);
8170 /* If the signedness of the conversion differs and OP0 is
8171 a promoted SUBREG, clear that indication since we now
8172 have to do the proper extension. */
8173 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8174 && GET_CODE (op0) == SUBREG)
8175 SUBREG_PROMOTED_VAR_P (op0) = 0;
8177 return REDUCE_BIT_FIELD (op0);
8180 op0 = expand_expr (treeop0, NULL_RTX, mode,
8181 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8182 if (GET_MODE (op0) == mode)
8185 /* If OP0 is a constant, just convert it into the proper mode. */
8186 else if (CONSTANT_P (op0))
8188 tree inner_type = TREE_TYPE (treeop0);
8189 machine_mode inner_mode = GET_MODE (op0);
8191 if (inner_mode == VOIDmode)
8192 inner_mode = TYPE_MODE (inner_type);
8194 if (modifier == EXPAND_INITIALIZER)
8195 op0 = lowpart_subreg (mode, op0, inner_mode);
8196 else
8197 op0= convert_modes (mode, inner_mode, op0,
8198 TYPE_UNSIGNED (inner_type));
8201 else if (modifier == EXPAND_INITIALIZER)
8202 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8204 else if (target == 0)
8205 op0 = convert_to_mode (mode, op0,
8206 TYPE_UNSIGNED (TREE_TYPE
8207 (treeop0)));
8208 else
8210 convert_move (target, op0,
8211 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8212 op0 = target;
8215 return REDUCE_BIT_FIELD (op0);
8217 case ADDR_SPACE_CONVERT_EXPR:
8219 tree treeop0_type = TREE_TYPE (treeop0);
8221 gcc_assert (POINTER_TYPE_P (type));
8222 gcc_assert (POINTER_TYPE_P (treeop0_type));
8224 addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8225 addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8227 /* Conversions between pointers to the same address space should
8228 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8229 gcc_assert (as_to != as_from);
8231 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8233 /* Ask target code to handle conversion between pointers
8234 to overlapping address spaces. */
8235 if (targetm.addr_space.subset_p (as_to, as_from)
8236 || targetm.addr_space.subset_p (as_from, as_to))
8238 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8240 else
8242 /* For disjoint address spaces, converting anything but a null
8243 pointer invokes undefined behaviour. We truncate or extend the
8244 value as if we'd converted via integers, which handles 0 as
8245 required, and all others as the programmer likely expects. */
8246 #ifndef POINTERS_EXTEND_UNSIGNED
8247 const int POINTERS_EXTEND_UNSIGNED = 1;
8248 #endif
8249 op0 = convert_modes (mode, TYPE_MODE (treeop0_type),
8250 op0, POINTERS_EXTEND_UNSIGNED);
8252 gcc_assert (op0);
8253 return op0;
8256 case POINTER_PLUS_EXPR:
8257 /* Even though the sizetype mode and the pointer's mode can be different
8258 expand is able to handle this correctly and get the correct result out
8259 of the PLUS_EXPR code. */
8260 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8261 if sizetype precision is smaller than pointer precision. */
8262 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8263 treeop1 = fold_convert_loc (loc, type,
8264 fold_convert_loc (loc, ssizetype,
8265 treeop1));
8266 /* If sizetype precision is larger than pointer precision, truncate the
8267 offset to have matching modes. */
8268 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8269 treeop1 = fold_convert_loc (loc, type, treeop1);
8271 case PLUS_EXPR:
8272 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8273 something else, make sure we add the register to the constant and
8274 then to the other thing. This case can occur during strength
8275 reduction and doing it this way will produce better code if the
8276 frame pointer or argument pointer is eliminated.
8278 fold-const.c will ensure that the constant is always in the inner
8279 PLUS_EXPR, so the only case we need to do anything about is if
8280 sp, ap, or fp is our second argument, in which case we must swap
8281 the innermost first argument and our second argument. */
8283 if (TREE_CODE (treeop0) == PLUS_EXPR
8284 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8285 && TREE_CODE (treeop1) == VAR_DECL
8286 && (DECL_RTL (treeop1) == frame_pointer_rtx
8287 || DECL_RTL (treeop1) == stack_pointer_rtx
8288 || DECL_RTL (treeop1) == arg_pointer_rtx))
8290 gcc_unreachable ();
8293 /* If the result is to be ptr_mode and we are adding an integer to
8294 something, we might be forming a constant. So try to use
8295 plus_constant. If it produces a sum and we can't accept it,
8296 use force_operand. This allows P = &ARR[const] to generate
8297 efficient code on machines where a SYMBOL_REF is not a valid
8298 address.
8300 If this is an EXPAND_SUM call, always return the sum. */
8301 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8302 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8304 if (modifier == EXPAND_STACK_PARM)
8305 target = 0;
8306 if (TREE_CODE (treeop0) == INTEGER_CST
8307 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8308 && TREE_CONSTANT (treeop1))
8310 rtx constant_part;
8311 HOST_WIDE_INT wc;
8312 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
8314 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8315 EXPAND_SUM);
8316 /* Use wi::shwi to ensure that the constant is
8317 truncated according to the mode of OP1, then sign extended
8318 to a HOST_WIDE_INT. Using the constant directly can result
8319 in non-canonical RTL in a 64x32 cross compile. */
8320 wc = TREE_INT_CST_LOW (treeop0);
8321 constant_part =
8322 immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8323 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8324 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8325 op1 = force_operand (op1, target);
8326 return REDUCE_BIT_FIELD (op1);
8329 else if (TREE_CODE (treeop1) == INTEGER_CST
8330 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8331 && TREE_CONSTANT (treeop0))
8333 rtx constant_part;
8334 HOST_WIDE_INT wc;
8335 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
8337 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8338 (modifier == EXPAND_INITIALIZER
8339 ? EXPAND_INITIALIZER : EXPAND_SUM));
8340 if (! CONSTANT_P (op0))
8342 op1 = expand_expr (treeop1, NULL_RTX,
8343 VOIDmode, modifier);
8344 /* Return a PLUS if modifier says it's OK. */
8345 if (modifier == EXPAND_SUM
8346 || modifier == EXPAND_INITIALIZER)
8347 return simplify_gen_binary (PLUS, mode, op0, op1);
8348 goto binop2;
8350 /* Use wi::shwi to ensure that the constant is
8351 truncated according to the mode of OP1, then sign extended
8352 to a HOST_WIDE_INT. Using the constant directly can result
8353 in non-canonical RTL in a 64x32 cross compile. */
8354 wc = TREE_INT_CST_LOW (treeop1);
8355 constant_part
8356 = immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8357 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8358 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8359 op0 = force_operand (op0, target);
8360 return REDUCE_BIT_FIELD (op0);
8364 /* Use TER to expand pointer addition of a negated value
8365 as pointer subtraction. */
8366 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8367 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8368 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8369 && TREE_CODE (treeop1) == SSA_NAME
8370 && TYPE_MODE (TREE_TYPE (treeop0))
8371 == TYPE_MODE (TREE_TYPE (treeop1)))
8373 gimple *def = get_def_for_expr (treeop1, NEGATE_EXPR);
8374 if (def)
8376 treeop1 = gimple_assign_rhs1 (def);
8377 code = MINUS_EXPR;
8378 goto do_minus;
8382 /* No sense saving up arithmetic to be done
8383 if it's all in the wrong mode to form part of an address.
8384 And force_operand won't know whether to sign-extend or
8385 zero-extend. */
8386 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8387 || mode != ptr_mode)
8389 expand_operands (treeop0, treeop1,
8390 subtarget, &op0, &op1, EXPAND_NORMAL);
8391 if (op0 == const0_rtx)
8392 return op1;
8393 if (op1 == const0_rtx)
8394 return op0;
8395 goto binop2;
8398 expand_operands (treeop0, treeop1,
8399 subtarget, &op0, &op1, modifier);
8400 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8402 case MINUS_EXPR:
8403 do_minus:
8404 /* For initializers, we are allowed to return a MINUS of two
8405 symbolic constants. Here we handle all cases when both operands
8406 are constant. */
8407 /* Handle difference of two symbolic constants,
8408 for the sake of an initializer. */
8409 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8410 && really_constant_p (treeop0)
8411 && really_constant_p (treeop1))
8413 expand_operands (treeop0, treeop1,
8414 NULL_RTX, &op0, &op1, modifier);
8416 /* If the last operand is a CONST_INT, use plus_constant of
8417 the negated constant. Else make the MINUS. */
8418 if (CONST_INT_P (op1))
8419 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8420 -INTVAL (op1)));
8421 else
8422 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8425 /* No sense saving up arithmetic to be done
8426 if it's all in the wrong mode to form part of an address.
8427 And force_operand won't know whether to sign-extend or
8428 zero-extend. */
8429 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8430 || mode != ptr_mode)
8431 goto binop;
8433 expand_operands (treeop0, treeop1,
8434 subtarget, &op0, &op1, modifier);
8436 /* Convert A - const to A + (-const). */
8437 if (CONST_INT_P (op1))
8439 op1 = negate_rtx (mode, op1);
8440 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8443 goto binop2;
8445 case WIDEN_MULT_PLUS_EXPR:
8446 case WIDEN_MULT_MINUS_EXPR:
8447 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8448 op2 = expand_normal (treeop2);
8449 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8450 target, unsignedp);
8451 return target;
8453 case WIDEN_MULT_EXPR:
8454 /* If first operand is constant, swap them.
8455 Thus the following special case checks need only
8456 check the second operand. */
8457 if (TREE_CODE (treeop0) == INTEGER_CST)
8458 std::swap (treeop0, treeop1);
8460 /* First, check if we have a multiplication of one signed and one
8461 unsigned operand. */
8462 if (TREE_CODE (treeop1) != INTEGER_CST
8463 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8464 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8466 machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8467 this_optab = usmul_widen_optab;
8468 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8469 != CODE_FOR_nothing)
8471 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8472 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8473 EXPAND_NORMAL);
8474 else
8475 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8476 EXPAND_NORMAL);
8477 /* op0 and op1 might still be constant, despite the above
8478 != INTEGER_CST check. Handle it. */
8479 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8481 op0 = convert_modes (innermode, mode, op0, true);
8482 op1 = convert_modes (innermode, mode, op1, false);
8483 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8484 target, unsignedp));
8486 goto binop3;
8489 /* Check for a multiplication with matching signedness. */
8490 else if ((TREE_CODE (treeop1) == INTEGER_CST
8491 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8492 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8493 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8495 tree op0type = TREE_TYPE (treeop0);
8496 machine_mode innermode = TYPE_MODE (op0type);
8497 bool zextend_p = TYPE_UNSIGNED (op0type);
8498 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8499 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8501 if (TREE_CODE (treeop0) != INTEGER_CST)
8503 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8504 != CODE_FOR_nothing)
8506 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8507 EXPAND_NORMAL);
8508 /* op0 and op1 might still be constant, despite the above
8509 != INTEGER_CST check. Handle it. */
8510 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8512 widen_mult_const:
8513 op0 = convert_modes (innermode, mode, op0, zextend_p);
8515 = convert_modes (innermode, mode, op1,
8516 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8517 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8518 target,
8519 unsignedp));
8521 temp = expand_widening_mult (mode, op0, op1, target,
8522 unsignedp, this_optab);
8523 return REDUCE_BIT_FIELD (temp);
8525 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8526 != CODE_FOR_nothing
8527 && innermode == word_mode)
8529 rtx htem, hipart;
8530 op0 = expand_normal (treeop0);
8531 if (TREE_CODE (treeop1) == INTEGER_CST)
8532 op1 = convert_modes (innermode, mode,
8533 expand_normal (treeop1),
8534 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8535 else
8536 op1 = expand_normal (treeop1);
8537 /* op0 and op1 might still be constant, despite the above
8538 != INTEGER_CST check. Handle it. */
8539 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8540 goto widen_mult_const;
8541 temp = expand_binop (mode, other_optab, op0, op1, target,
8542 unsignedp, OPTAB_LIB_WIDEN);
8543 hipart = gen_highpart (innermode, temp);
8544 htem = expand_mult_highpart_adjust (innermode, hipart,
8545 op0, op1, hipart,
8546 zextend_p);
8547 if (htem != hipart)
8548 emit_move_insn (hipart, htem);
8549 return REDUCE_BIT_FIELD (temp);
8553 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8554 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8555 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8556 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8558 case FMA_EXPR:
8560 optab opt = fma_optab;
8561 gimple *def0, *def2;
8563 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8564 call. */
8565 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8567 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8568 tree call_expr;
8570 gcc_assert (fn != NULL_TREE);
8571 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8572 return expand_builtin (call_expr, target, subtarget, mode, false);
8575 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8576 /* The multiplication is commutative - look at its 2nd operand
8577 if the first isn't fed by a negate. */
8578 if (!def0)
8580 def0 = get_def_for_expr (treeop1, NEGATE_EXPR);
8581 /* Swap operands if the 2nd operand is fed by a negate. */
8582 if (def0)
8583 std::swap (treeop0, treeop1);
8585 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8587 op0 = op2 = NULL;
8589 if (def0 && def2
8590 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8592 opt = fnms_optab;
8593 op0 = expand_normal (gimple_assign_rhs1 (def0));
8594 op2 = expand_normal (gimple_assign_rhs1 (def2));
8596 else if (def0
8597 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8599 opt = fnma_optab;
8600 op0 = expand_normal (gimple_assign_rhs1 (def0));
8602 else if (def2
8603 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8605 opt = fms_optab;
8606 op2 = expand_normal (gimple_assign_rhs1 (def2));
8609 if (op0 == NULL)
8610 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8611 if (op2 == NULL)
8612 op2 = expand_normal (treeop2);
8613 op1 = expand_normal (treeop1);
8615 return expand_ternary_op (TYPE_MODE (type), opt,
8616 op0, op1, op2, target, 0);
8619 case MULT_EXPR:
8620 /* If this is a fixed-point operation, then we cannot use the code
8621 below because "expand_mult" doesn't support sat/no-sat fixed-point
8622 multiplications. */
8623 if (ALL_FIXED_POINT_MODE_P (mode))
8624 goto binop;
8626 /* If first operand is constant, swap them.
8627 Thus the following special case checks need only
8628 check the second operand. */
8629 if (TREE_CODE (treeop0) == INTEGER_CST)
8630 std::swap (treeop0, treeop1);
8632 /* Attempt to return something suitable for generating an
8633 indexed address, for machines that support that. */
8635 if (modifier == EXPAND_SUM && mode == ptr_mode
8636 && tree_fits_shwi_p (treeop1))
8638 tree exp1 = treeop1;
8640 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8641 EXPAND_SUM);
8643 if (!REG_P (op0))
8644 op0 = force_operand (op0, NULL_RTX);
8645 if (!REG_P (op0))
8646 op0 = copy_to_mode_reg (mode, op0);
8648 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8649 gen_int_mode (tree_to_shwi (exp1),
8650 TYPE_MODE (TREE_TYPE (exp1)))));
8653 if (modifier == EXPAND_STACK_PARM)
8654 target = 0;
8656 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8657 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8659 case TRUNC_DIV_EXPR:
8660 case FLOOR_DIV_EXPR:
8661 case CEIL_DIV_EXPR:
8662 case ROUND_DIV_EXPR:
8663 case EXACT_DIV_EXPR:
8664 /* If this is a fixed-point operation, then we cannot use the code
8665 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8666 divisions. */
8667 if (ALL_FIXED_POINT_MODE_P (mode))
8668 goto binop;
8670 if (modifier == EXPAND_STACK_PARM)
8671 target = 0;
8672 /* Possible optimization: compute the dividend with EXPAND_SUM
8673 then if the divisor is constant can optimize the case
8674 where some terms of the dividend have coeffs divisible by it. */
8675 expand_operands (treeop0, treeop1,
8676 subtarget, &op0, &op1, EXPAND_NORMAL);
8677 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8679 case RDIV_EXPR:
8680 goto binop;
8682 case MULT_HIGHPART_EXPR:
8683 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8684 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8685 gcc_assert (temp);
8686 return temp;
8688 case TRUNC_MOD_EXPR:
8689 case FLOOR_MOD_EXPR:
8690 case CEIL_MOD_EXPR:
8691 case ROUND_MOD_EXPR:
8692 if (modifier == EXPAND_STACK_PARM)
8693 target = 0;
8694 expand_operands (treeop0, treeop1,
8695 subtarget, &op0, &op1, EXPAND_NORMAL);
8696 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8698 case FIXED_CONVERT_EXPR:
8699 op0 = expand_normal (treeop0);
8700 if (target == 0 || modifier == EXPAND_STACK_PARM)
8701 target = gen_reg_rtx (mode);
8703 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8704 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8705 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8706 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8707 else
8708 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8709 return target;
8711 case FIX_TRUNC_EXPR:
8712 op0 = expand_normal (treeop0);
8713 if (target == 0 || modifier == EXPAND_STACK_PARM)
8714 target = gen_reg_rtx (mode);
8715 expand_fix (target, op0, unsignedp);
8716 return target;
8718 case FLOAT_EXPR:
8719 op0 = expand_normal (treeop0);
8720 if (target == 0 || modifier == EXPAND_STACK_PARM)
8721 target = gen_reg_rtx (mode);
8722 /* expand_float can't figure out what to do if FROM has VOIDmode.
8723 So give it the correct mode. With -O, cse will optimize this. */
8724 if (GET_MODE (op0) == VOIDmode)
8725 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8726 op0);
8727 expand_float (target, op0,
8728 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8729 return target;
8731 case NEGATE_EXPR:
8732 op0 = expand_expr (treeop0, subtarget,
8733 VOIDmode, EXPAND_NORMAL);
8734 if (modifier == EXPAND_STACK_PARM)
8735 target = 0;
8736 temp = expand_unop (mode,
8737 optab_for_tree_code (NEGATE_EXPR, type,
8738 optab_default),
8739 op0, target, 0);
8740 gcc_assert (temp);
8741 return REDUCE_BIT_FIELD (temp);
8743 case ABS_EXPR:
8744 op0 = expand_expr (treeop0, subtarget,
8745 VOIDmode, EXPAND_NORMAL);
8746 if (modifier == EXPAND_STACK_PARM)
8747 target = 0;
8749 /* ABS_EXPR is not valid for complex arguments. */
8750 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8751 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8753 /* Unsigned abs is simply the operand. Testing here means we don't
8754 risk generating incorrect code below. */
8755 if (TYPE_UNSIGNED (type))
8756 return op0;
8758 return expand_abs (mode, op0, target, unsignedp,
8759 safe_from_p (target, treeop0, 1));
8761 case MAX_EXPR:
8762 case MIN_EXPR:
8763 target = original_target;
8764 if (target == 0
8765 || modifier == EXPAND_STACK_PARM
8766 || (MEM_P (target) && MEM_VOLATILE_P (target))
8767 || GET_MODE (target) != mode
8768 || (REG_P (target)
8769 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8770 target = gen_reg_rtx (mode);
8771 expand_operands (treeop0, treeop1,
8772 target, &op0, &op1, EXPAND_NORMAL);
8774 /* First try to do it with a special MIN or MAX instruction.
8775 If that does not win, use a conditional jump to select the proper
8776 value. */
8777 this_optab = optab_for_tree_code (code, type, optab_default);
8778 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8779 OPTAB_WIDEN);
8780 if (temp != 0)
8781 return temp;
8783 /* At this point, a MEM target is no longer useful; we will get better
8784 code without it. */
8786 if (! REG_P (target))
8787 target = gen_reg_rtx (mode);
8789 /* If op1 was placed in target, swap op0 and op1. */
8790 if (target != op0 && target == op1)
8791 std::swap (op0, op1);
8793 /* We generate better code and avoid problems with op1 mentioning
8794 target by forcing op1 into a pseudo if it isn't a constant. */
8795 if (! CONSTANT_P (op1))
8796 op1 = force_reg (mode, op1);
8799 enum rtx_code comparison_code;
8800 rtx cmpop1 = op1;
8802 if (code == MAX_EXPR)
8803 comparison_code = unsignedp ? GEU : GE;
8804 else
8805 comparison_code = unsignedp ? LEU : LE;
8807 /* Canonicalize to comparisons against 0. */
8808 if (op1 == const1_rtx)
8810 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8811 or (a != 0 ? a : 1) for unsigned.
8812 For MIN we are safe converting (a <= 1 ? a : 1)
8813 into (a <= 0 ? a : 1) */
8814 cmpop1 = const0_rtx;
8815 if (code == MAX_EXPR)
8816 comparison_code = unsignedp ? NE : GT;
8818 if (op1 == constm1_rtx && !unsignedp)
8820 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8821 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8822 cmpop1 = const0_rtx;
8823 if (code == MIN_EXPR)
8824 comparison_code = LT;
8827 /* Use a conditional move if possible. */
8828 if (can_conditionally_move_p (mode))
8830 rtx insn;
8832 start_sequence ();
8834 /* Try to emit the conditional move. */
8835 insn = emit_conditional_move (target, comparison_code,
8836 op0, cmpop1, mode,
8837 op0, op1, mode,
8838 unsignedp);
8840 /* If we could do the conditional move, emit the sequence,
8841 and return. */
8842 if (insn)
8844 rtx_insn *seq = get_insns ();
8845 end_sequence ();
8846 emit_insn (seq);
8847 return target;
8850 /* Otherwise discard the sequence and fall back to code with
8851 branches. */
8852 end_sequence ();
8855 if (target != op0)
8856 emit_move_insn (target, op0);
8858 lab = gen_label_rtx ();
8859 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8860 unsignedp, mode, NULL_RTX, NULL, lab,
8861 -1);
8863 emit_move_insn (target, op1);
8864 emit_label (lab);
8865 return target;
8867 case BIT_NOT_EXPR:
8868 op0 = expand_expr (treeop0, subtarget,
8869 VOIDmode, EXPAND_NORMAL);
8870 if (modifier == EXPAND_STACK_PARM)
8871 target = 0;
8872 /* In case we have to reduce the result to bitfield precision
8873 for unsigned bitfield expand this as XOR with a proper constant
8874 instead. */
8875 if (reduce_bit_field && TYPE_UNSIGNED (type))
8877 wide_int mask = wi::mask (TYPE_PRECISION (type),
8878 false, GET_MODE_PRECISION (mode));
8880 temp = expand_binop (mode, xor_optab, op0,
8881 immed_wide_int_const (mask, mode),
8882 target, 1, OPTAB_LIB_WIDEN);
8884 else
8885 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8886 gcc_assert (temp);
8887 return temp;
8889 /* ??? Can optimize bitwise operations with one arg constant.
8890 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8891 and (a bitwise1 b) bitwise2 b (etc)
8892 but that is probably not worth while. */
8894 case BIT_AND_EXPR:
8895 case BIT_IOR_EXPR:
8896 case BIT_XOR_EXPR:
8897 goto binop;
8899 case LROTATE_EXPR:
8900 case RROTATE_EXPR:
8901 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8902 || (GET_MODE_PRECISION (TYPE_MODE (type))
8903 == TYPE_PRECISION (type)));
8904 /* fall through */
8906 case LSHIFT_EXPR:
8907 case RSHIFT_EXPR:
8909 /* If this is a fixed-point operation, then we cannot use the code
8910 below because "expand_shift" doesn't support sat/no-sat fixed-point
8911 shifts. */
8912 if (ALL_FIXED_POINT_MODE_P (mode))
8913 goto binop;
8915 if (! safe_from_p (subtarget, treeop1, 1))
8916 subtarget = 0;
8917 if (modifier == EXPAND_STACK_PARM)
8918 target = 0;
8919 op0 = expand_expr (treeop0, subtarget,
8920 VOIDmode, EXPAND_NORMAL);
8922 /* Left shift optimization when shifting across word_size boundary.
8924 If mode == GET_MODE_WIDER_MODE (word_mode), then normally there isn't
8925 native instruction to support this wide mode left shift. Given below
8926 scenario:
8928 Type A = (Type) B << C
8930 |< T >|
8931 | dest_high | dest_low |
8933 | word_size |
8935 If the shift amount C caused we shift B to across the word size
8936 boundary, i.e part of B shifted into high half of destination
8937 register, and part of B remains in the low half, then GCC will use
8938 the following left shift expand logic:
8940 1. Initialize dest_low to B.
8941 2. Initialize every bit of dest_high to the sign bit of B.
8942 3. Logic left shift dest_low by C bit to finalize dest_low.
8943 The value of dest_low before this shift is kept in a temp D.
8944 4. Logic left shift dest_high by C.
8945 5. Logic right shift D by (word_size - C).
8946 6. Or the result of 4 and 5 to finalize dest_high.
8948 While, by checking gimple statements, if operand B is coming from
8949 signed extension, then we can simplify above expand logic into:
8951 1. dest_high = src_low >> (word_size - C).
8952 2. dest_low = src_low << C.
8954 We can use one arithmetic right shift to finish all the purpose of
8955 steps 2, 4, 5, 6, thus we reduce the steps needed from 6 into 2. */
8957 temp = NULL_RTX;
8958 if (code == LSHIFT_EXPR
8959 && target
8960 && REG_P (target)
8961 && ! unsignedp
8962 && mode == GET_MODE_WIDER_MODE (word_mode)
8963 && GET_MODE_SIZE (mode) == 2 * GET_MODE_SIZE (word_mode)
8964 && TREE_CONSTANT (treeop1)
8965 && TREE_CODE (treeop0) == SSA_NAME)
8967 gimple *def = SSA_NAME_DEF_STMT (treeop0);
8968 if (is_gimple_assign (def)
8969 && gimple_assign_rhs_code (def) == NOP_EXPR)
8971 machine_mode rmode = TYPE_MODE
8972 (TREE_TYPE (gimple_assign_rhs1 (def)));
8974 if (GET_MODE_SIZE (rmode) < GET_MODE_SIZE (mode)
8975 && TREE_INT_CST_LOW (treeop1) < GET_MODE_BITSIZE (word_mode)
8976 && ((TREE_INT_CST_LOW (treeop1) + GET_MODE_BITSIZE (rmode))
8977 >= GET_MODE_BITSIZE (word_mode)))
8979 rtx_insn *seq, *seq_old;
8980 unsigned int high_off = subreg_highpart_offset (word_mode,
8981 mode);
8982 rtx low = lowpart_subreg (word_mode, op0, mode);
8983 rtx dest_low = lowpart_subreg (word_mode, target, mode);
8984 rtx dest_high = simplify_gen_subreg (word_mode, target,
8985 mode, high_off);
8986 HOST_WIDE_INT ramount = (BITS_PER_WORD
8987 - TREE_INT_CST_LOW (treeop1));
8988 tree rshift = build_int_cst (TREE_TYPE (treeop1), ramount);
8990 start_sequence ();
8991 /* dest_high = src_low >> (word_size - C). */
8992 temp = expand_variable_shift (RSHIFT_EXPR, word_mode, low,
8993 rshift, dest_high, unsignedp);
8994 if (temp != dest_high)
8995 emit_move_insn (dest_high, temp);
8997 /* dest_low = src_low << C. */
8998 temp = expand_variable_shift (LSHIFT_EXPR, word_mode, low,
8999 treeop1, dest_low, unsignedp);
9000 if (temp != dest_low)
9001 emit_move_insn (dest_low, temp);
9003 seq = get_insns ();
9004 end_sequence ();
9005 temp = target ;
9007 if (have_insn_for (ASHIFT, mode))
9009 bool speed_p = optimize_insn_for_speed_p ();
9010 start_sequence ();
9011 rtx ret_old = expand_variable_shift (code, mode, op0,
9012 treeop1, target,
9013 unsignedp);
9015 seq_old = get_insns ();
9016 end_sequence ();
9017 if (seq_cost (seq, speed_p)
9018 >= seq_cost (seq_old, speed_p))
9020 seq = seq_old;
9021 temp = ret_old;
9024 emit_insn (seq);
9029 if (temp == NULL_RTX)
9030 temp = expand_variable_shift (code, mode, op0, treeop1, target,
9031 unsignedp);
9032 if (code == LSHIFT_EXPR)
9033 temp = REDUCE_BIT_FIELD (temp);
9034 return temp;
9037 /* Could determine the answer when only additive constants differ. Also,
9038 the addition of one can be handled by changing the condition. */
9039 case LT_EXPR:
9040 case LE_EXPR:
9041 case GT_EXPR:
9042 case GE_EXPR:
9043 case EQ_EXPR:
9044 case NE_EXPR:
9045 case UNORDERED_EXPR:
9046 case ORDERED_EXPR:
9047 case UNLT_EXPR:
9048 case UNLE_EXPR:
9049 case UNGT_EXPR:
9050 case UNGE_EXPR:
9051 case UNEQ_EXPR:
9052 case LTGT_EXPR:
9054 temp = do_store_flag (ops,
9055 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
9056 tmode != VOIDmode ? tmode : mode);
9057 if (temp)
9058 return temp;
9060 /* Use a compare and a jump for BLKmode comparisons, or for function
9061 type comparisons is have_canonicalize_funcptr_for_compare. */
9063 if ((target == 0
9064 || modifier == EXPAND_STACK_PARM
9065 || ! safe_from_p (target, treeop0, 1)
9066 || ! safe_from_p (target, treeop1, 1)
9067 /* Make sure we don't have a hard reg (such as function's return
9068 value) live across basic blocks, if not optimizing. */
9069 || (!optimize && REG_P (target)
9070 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9071 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9073 emit_move_insn (target, const0_rtx);
9075 rtx_code_label *lab1 = gen_label_rtx ();
9076 jumpifnot_1 (code, treeop0, treeop1, lab1, -1);
9078 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
9079 emit_move_insn (target, constm1_rtx);
9080 else
9081 emit_move_insn (target, const1_rtx);
9083 emit_label (lab1);
9084 return target;
9086 case COMPLEX_EXPR:
9087 /* Get the rtx code of the operands. */
9088 op0 = expand_normal (treeop0);
9089 op1 = expand_normal (treeop1);
9091 if (!target)
9092 target = gen_reg_rtx (TYPE_MODE (type));
9093 else
9094 /* If target overlaps with op1, then either we need to force
9095 op1 into a pseudo (if target also overlaps with op0),
9096 or write the complex parts in reverse order. */
9097 switch (GET_CODE (target))
9099 case CONCAT:
9100 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
9102 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
9104 complex_expr_force_op1:
9105 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
9106 emit_move_insn (temp, op1);
9107 op1 = temp;
9108 break;
9110 complex_expr_swap_order:
9111 /* Move the imaginary (op1) and real (op0) parts to their
9112 location. */
9113 write_complex_part (target, op1, true);
9114 write_complex_part (target, op0, false);
9116 return target;
9118 break;
9119 case MEM:
9120 temp = adjust_address_nv (target,
9121 GET_MODE_INNER (GET_MODE (target)), 0);
9122 if (reg_overlap_mentioned_p (temp, op1))
9124 machine_mode imode = GET_MODE_INNER (GET_MODE (target));
9125 temp = adjust_address_nv (target, imode,
9126 GET_MODE_SIZE (imode));
9127 if (reg_overlap_mentioned_p (temp, op0))
9128 goto complex_expr_force_op1;
9129 goto complex_expr_swap_order;
9131 break;
9132 default:
9133 if (reg_overlap_mentioned_p (target, op1))
9135 if (reg_overlap_mentioned_p (target, op0))
9136 goto complex_expr_force_op1;
9137 goto complex_expr_swap_order;
9139 break;
9142 /* Move the real (op0) and imaginary (op1) parts to their location. */
9143 write_complex_part (target, op0, false);
9144 write_complex_part (target, op1, true);
9146 return target;
9148 case WIDEN_SUM_EXPR:
9150 tree oprnd0 = treeop0;
9151 tree oprnd1 = treeop1;
9153 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9154 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
9155 target, unsignedp);
9156 return target;
9159 case REDUC_MAX_EXPR:
9160 case REDUC_MIN_EXPR:
9161 case REDUC_PLUS_EXPR:
9163 op0 = expand_normal (treeop0);
9164 this_optab = optab_for_tree_code (code, type, optab_default);
9165 machine_mode vec_mode = TYPE_MODE (TREE_TYPE (treeop0));
9167 if (optab_handler (this_optab, vec_mode) != CODE_FOR_nothing)
9169 struct expand_operand ops[2];
9170 enum insn_code icode = optab_handler (this_optab, vec_mode);
9172 create_output_operand (&ops[0], target, mode);
9173 create_input_operand (&ops[1], op0, vec_mode);
9174 if (maybe_expand_insn (icode, 2, ops))
9176 target = ops[0].value;
9177 if (GET_MODE (target) != mode)
9178 return gen_lowpart (tmode, target);
9179 return target;
9182 /* Fall back to optab with vector result, and then extract scalar. */
9183 this_optab = scalar_reduc_to_vector (this_optab, type);
9184 temp = expand_unop (vec_mode, this_optab, op0, NULL_RTX, unsignedp);
9185 gcc_assert (temp);
9186 /* The tree code produces a scalar result, but (somewhat by convention)
9187 the optab produces a vector with the result in element 0 if
9188 little-endian, or element N-1 if big-endian. So pull the scalar
9189 result out of that element. */
9190 int index = BYTES_BIG_ENDIAN ? GET_MODE_NUNITS (vec_mode) - 1 : 0;
9191 int bitsize = GET_MODE_UNIT_BITSIZE (vec_mode);
9192 temp = extract_bit_field (temp, bitsize, bitsize * index, unsignedp,
9193 target, mode, mode, false);
9194 gcc_assert (temp);
9195 return temp;
9198 case VEC_UNPACK_HI_EXPR:
9199 case VEC_UNPACK_LO_EXPR:
9201 op0 = expand_normal (treeop0);
9202 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9203 target, unsignedp);
9204 gcc_assert (temp);
9205 return temp;
9208 case VEC_UNPACK_FLOAT_HI_EXPR:
9209 case VEC_UNPACK_FLOAT_LO_EXPR:
9211 op0 = expand_normal (treeop0);
9212 /* The signedness is determined from input operand. */
9213 temp = expand_widen_pattern_expr
9214 (ops, op0, NULL_RTX, NULL_RTX,
9215 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9217 gcc_assert (temp);
9218 return temp;
9221 case VEC_WIDEN_MULT_HI_EXPR:
9222 case VEC_WIDEN_MULT_LO_EXPR:
9223 case VEC_WIDEN_MULT_EVEN_EXPR:
9224 case VEC_WIDEN_MULT_ODD_EXPR:
9225 case VEC_WIDEN_LSHIFT_HI_EXPR:
9226 case VEC_WIDEN_LSHIFT_LO_EXPR:
9227 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9228 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9229 target, unsignedp);
9230 gcc_assert (target);
9231 return target;
9233 case VEC_PACK_TRUNC_EXPR:
9234 case VEC_PACK_SAT_EXPR:
9235 case VEC_PACK_FIX_TRUNC_EXPR:
9236 mode = TYPE_MODE (TREE_TYPE (treeop0));
9237 goto binop;
9239 case VEC_PERM_EXPR:
9240 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9241 op2 = expand_normal (treeop2);
9243 /* Careful here: if the target doesn't support integral vector modes,
9244 a constant selection vector could wind up smooshed into a normal
9245 integral constant. */
9246 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9248 tree sel_type = TREE_TYPE (treeop2);
9249 machine_mode vmode
9250 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9251 TYPE_VECTOR_SUBPARTS (sel_type));
9252 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9253 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9254 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9256 else
9257 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9259 temp = expand_vec_perm (mode, op0, op1, op2, target);
9260 gcc_assert (temp);
9261 return temp;
9263 case DOT_PROD_EXPR:
9265 tree oprnd0 = treeop0;
9266 tree oprnd1 = treeop1;
9267 tree oprnd2 = treeop2;
9268 rtx op2;
9270 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9271 op2 = expand_normal (oprnd2);
9272 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9273 target, unsignedp);
9274 return target;
9277 case SAD_EXPR:
9279 tree oprnd0 = treeop0;
9280 tree oprnd1 = treeop1;
9281 tree oprnd2 = treeop2;
9282 rtx op2;
9284 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9285 op2 = expand_normal (oprnd2);
9286 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9287 target, unsignedp);
9288 return target;
9291 case REALIGN_LOAD_EXPR:
9293 tree oprnd0 = treeop0;
9294 tree oprnd1 = treeop1;
9295 tree oprnd2 = treeop2;
9296 rtx op2;
9298 this_optab = optab_for_tree_code (code, type, optab_default);
9299 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9300 op2 = expand_normal (oprnd2);
9301 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9302 target, unsignedp);
9303 gcc_assert (temp);
9304 return temp;
9307 case COND_EXPR:
9309 /* A COND_EXPR with its type being VOID_TYPE represents a
9310 conditional jump and is handled in
9311 expand_gimple_cond_expr. */
9312 gcc_assert (!VOID_TYPE_P (type));
9314 /* Note that COND_EXPRs whose type is a structure or union
9315 are required to be constructed to contain assignments of
9316 a temporary variable, so that we can evaluate them here
9317 for side effect only. If type is void, we must do likewise. */
9319 gcc_assert (!TREE_ADDRESSABLE (type)
9320 && !ignore
9321 && TREE_TYPE (treeop1) != void_type_node
9322 && TREE_TYPE (treeop2) != void_type_node);
9324 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9325 if (temp)
9326 return temp;
9328 /* If we are not to produce a result, we have no target. Otherwise,
9329 if a target was specified use it; it will not be used as an
9330 intermediate target unless it is safe. If no target, use a
9331 temporary. */
9333 if (modifier != EXPAND_STACK_PARM
9334 && original_target
9335 && safe_from_p (original_target, treeop0, 1)
9336 && GET_MODE (original_target) == mode
9337 && !MEM_P (original_target))
9338 temp = original_target;
9339 else
9340 temp = assign_temp (type, 0, 1);
9342 do_pending_stack_adjust ();
9343 NO_DEFER_POP;
9344 rtx_code_label *lab0 = gen_label_rtx ();
9345 rtx_code_label *lab1 = gen_label_rtx ();
9346 jumpifnot (treeop0, lab0, -1);
9347 store_expr (treeop1, temp,
9348 modifier == EXPAND_STACK_PARM,
9349 false, false);
9351 emit_jump_insn (targetm.gen_jump (lab1));
9352 emit_barrier ();
9353 emit_label (lab0);
9354 store_expr (treeop2, temp,
9355 modifier == EXPAND_STACK_PARM,
9356 false, false);
9358 emit_label (lab1);
9359 OK_DEFER_POP;
9360 return temp;
9363 case VEC_COND_EXPR:
9364 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9365 return target;
9367 default:
9368 gcc_unreachable ();
9371 /* Here to do an ordinary binary operator. */
9372 binop:
9373 expand_operands (treeop0, treeop1,
9374 subtarget, &op0, &op1, EXPAND_NORMAL);
9375 binop2:
9376 this_optab = optab_for_tree_code (code, type, optab_default);
9377 binop3:
9378 if (modifier == EXPAND_STACK_PARM)
9379 target = 0;
9380 temp = expand_binop (mode, this_optab, op0, op1, target,
9381 unsignedp, OPTAB_LIB_WIDEN);
9382 gcc_assert (temp);
9383 /* Bitwise operations do not need bitfield reduction as we expect their
9384 operands being properly truncated. */
9385 if (code == BIT_XOR_EXPR
9386 || code == BIT_AND_EXPR
9387 || code == BIT_IOR_EXPR)
9388 return temp;
9389 return REDUCE_BIT_FIELD (temp);
9391 #undef REDUCE_BIT_FIELD
9394 /* Return TRUE if expression STMT is suitable for replacement.
9395 Never consider memory loads as replaceable, because those don't ever lead
9396 into constant expressions. */
9398 static bool
9399 stmt_is_replaceable_p (gimple *stmt)
9401 if (ssa_is_replaceable_p (stmt))
9403 /* Don't move around loads. */
9404 if (!gimple_assign_single_p (stmt)
9405 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9406 return true;
9408 return false;
9412 expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
9413 enum expand_modifier modifier, rtx *alt_rtl,
9414 bool inner_reference_p)
9416 rtx op0, op1, temp, decl_rtl;
9417 tree type;
9418 int unsignedp;
9419 machine_mode mode, dmode;
9420 enum tree_code code = TREE_CODE (exp);
9421 rtx subtarget, original_target;
9422 int ignore;
9423 tree context;
9424 bool reduce_bit_field;
9425 location_t loc = EXPR_LOCATION (exp);
9426 struct separate_ops ops;
9427 tree treeop0, treeop1, treeop2;
9428 tree ssa_name = NULL_TREE;
9429 gimple *g;
9431 type = TREE_TYPE (exp);
9432 mode = TYPE_MODE (type);
9433 unsignedp = TYPE_UNSIGNED (type);
9435 treeop0 = treeop1 = treeop2 = NULL_TREE;
9436 if (!VL_EXP_CLASS_P (exp))
9437 switch (TREE_CODE_LENGTH (code))
9439 default:
9440 case 3: treeop2 = TREE_OPERAND (exp, 2);
9441 case 2: treeop1 = TREE_OPERAND (exp, 1);
9442 case 1: treeop0 = TREE_OPERAND (exp, 0);
9443 case 0: break;
9445 ops.code = code;
9446 ops.type = type;
9447 ops.op0 = treeop0;
9448 ops.op1 = treeop1;
9449 ops.op2 = treeop2;
9450 ops.location = loc;
9452 ignore = (target == const0_rtx
9453 || ((CONVERT_EXPR_CODE_P (code)
9454 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9455 && TREE_CODE (type) == VOID_TYPE));
9457 /* An operation in what may be a bit-field type needs the
9458 result to be reduced to the precision of the bit-field type,
9459 which is narrower than that of the type's mode. */
9460 reduce_bit_field = (!ignore
9461 && INTEGRAL_TYPE_P (type)
9462 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9464 /* If we are going to ignore this result, we need only do something
9465 if there is a side-effect somewhere in the expression. If there
9466 is, short-circuit the most common cases here. Note that we must
9467 not call expand_expr with anything but const0_rtx in case this
9468 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9470 if (ignore)
9472 if (! TREE_SIDE_EFFECTS (exp))
9473 return const0_rtx;
9475 /* Ensure we reference a volatile object even if value is ignored, but
9476 don't do this if all we are doing is taking its address. */
9477 if (TREE_THIS_VOLATILE (exp)
9478 && TREE_CODE (exp) != FUNCTION_DECL
9479 && mode != VOIDmode && mode != BLKmode
9480 && modifier != EXPAND_CONST_ADDRESS)
9482 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9483 if (MEM_P (temp))
9484 copy_to_reg (temp);
9485 return const0_rtx;
9488 if (TREE_CODE_CLASS (code) == tcc_unary
9489 || code == BIT_FIELD_REF
9490 || code == COMPONENT_REF
9491 || code == INDIRECT_REF)
9492 return expand_expr (treeop0, const0_rtx, VOIDmode,
9493 modifier);
9495 else if (TREE_CODE_CLASS (code) == tcc_binary
9496 || TREE_CODE_CLASS (code) == tcc_comparison
9497 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9499 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9500 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9501 return const0_rtx;
9504 target = 0;
9507 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9508 target = 0;
9510 /* Use subtarget as the target for operand 0 of a binary operation. */
9511 subtarget = get_subtarget (target);
9512 original_target = target;
9514 switch (code)
9516 case LABEL_DECL:
9518 tree function = decl_function_context (exp);
9520 temp = label_rtx (exp);
9521 temp = gen_rtx_LABEL_REF (Pmode, temp);
9523 if (function != current_function_decl
9524 && function != 0)
9525 LABEL_REF_NONLOCAL_P (temp) = 1;
9527 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9528 return temp;
9531 case SSA_NAME:
9532 /* ??? ivopts calls expander, without any preparation from
9533 out-of-ssa. So fake instructions as if this was an access to the
9534 base variable. This unnecessarily allocates a pseudo, see how we can
9535 reuse it, if partition base vars have it set already. */
9536 if (!currently_expanding_to_rtl)
9538 tree var = SSA_NAME_VAR (exp);
9539 if (var && DECL_RTL_SET_P (var))
9540 return DECL_RTL (var);
9541 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9542 LAST_VIRTUAL_REGISTER + 1);
9545 g = get_gimple_for_ssa_name (exp);
9546 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9547 if (g == NULL
9548 && modifier == EXPAND_INITIALIZER
9549 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9550 && (optimize || !SSA_NAME_VAR (exp)
9551 || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9552 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9553 g = SSA_NAME_DEF_STMT (exp);
9554 if (g)
9556 rtx r;
9557 location_t saved_loc = curr_insn_location ();
9558 location_t loc = gimple_location (g);
9559 if (loc != UNKNOWN_LOCATION)
9560 set_curr_insn_location (loc);
9561 ops.code = gimple_assign_rhs_code (g);
9562 switch (get_gimple_rhs_class (ops.code))
9564 case GIMPLE_TERNARY_RHS:
9565 ops.op2 = gimple_assign_rhs3 (g);
9566 /* Fallthru */
9567 case GIMPLE_BINARY_RHS:
9568 ops.op1 = gimple_assign_rhs2 (g);
9570 /* Try to expand conditonal compare. */
9571 if (targetm.gen_ccmp_first)
9573 gcc_checking_assert (targetm.gen_ccmp_next != NULL);
9574 r = expand_ccmp_expr (g);
9575 if (r)
9576 break;
9578 /* Fallthru */
9579 case GIMPLE_UNARY_RHS:
9580 ops.op0 = gimple_assign_rhs1 (g);
9581 ops.type = TREE_TYPE (gimple_assign_lhs (g));
9582 ops.location = loc;
9583 r = expand_expr_real_2 (&ops, target, tmode, modifier);
9584 break;
9585 case GIMPLE_SINGLE_RHS:
9587 r = expand_expr_real (gimple_assign_rhs1 (g), target,
9588 tmode, modifier, NULL, inner_reference_p);
9589 break;
9591 default:
9592 gcc_unreachable ();
9594 set_curr_insn_location (saved_loc);
9595 if (REG_P (r) && !REG_EXPR (r))
9596 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9597 return r;
9600 ssa_name = exp;
9601 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9602 exp = SSA_NAME_VAR (ssa_name);
9603 goto expand_decl_rtl;
9605 case PARM_DECL:
9606 case VAR_DECL:
9607 /* If a static var's type was incomplete when the decl was written,
9608 but the type is complete now, lay out the decl now. */
9609 if (DECL_SIZE (exp) == 0
9610 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9611 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9612 layout_decl (exp, 0);
9614 /* ... fall through ... */
9616 case FUNCTION_DECL:
9617 case RESULT_DECL:
9618 decl_rtl = DECL_RTL (exp);
9619 expand_decl_rtl:
9620 gcc_assert (decl_rtl);
9621 decl_rtl = copy_rtx (decl_rtl);
9622 /* Record writes to register variables. */
9623 if (modifier == EXPAND_WRITE
9624 && REG_P (decl_rtl)
9625 && HARD_REGISTER_P (decl_rtl))
9626 add_to_hard_reg_set (&crtl->asm_clobbers,
9627 GET_MODE (decl_rtl), REGNO (decl_rtl));
9629 /* Ensure variable marked as used even if it doesn't go through
9630 a parser. If it hasn't be used yet, write out an external
9631 definition. */
9632 if (exp)
9633 TREE_USED (exp) = 1;
9635 /* Show we haven't gotten RTL for this yet. */
9636 temp = 0;
9638 /* Variables inherited from containing functions should have
9639 been lowered by this point. */
9640 if (exp)
9641 context = decl_function_context (exp);
9642 gcc_assert (!exp
9643 || SCOPE_FILE_SCOPE_P (context)
9644 || context == current_function_decl
9645 || TREE_STATIC (exp)
9646 || DECL_EXTERNAL (exp)
9647 /* ??? C++ creates functions that are not TREE_STATIC. */
9648 || TREE_CODE (exp) == FUNCTION_DECL);
9650 /* This is the case of an array whose size is to be determined
9651 from its initializer, while the initializer is still being parsed.
9652 ??? We aren't parsing while expanding anymore. */
9654 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9655 temp = validize_mem (decl_rtl);
9657 /* If DECL_RTL is memory, we are in the normal case and the
9658 address is not valid, get the address into a register. */
9660 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9662 if (alt_rtl)
9663 *alt_rtl = decl_rtl;
9664 decl_rtl = use_anchored_address (decl_rtl);
9665 if (modifier != EXPAND_CONST_ADDRESS
9666 && modifier != EXPAND_SUM
9667 && !memory_address_addr_space_p (exp ? DECL_MODE (exp)
9668 : GET_MODE (decl_rtl),
9669 XEXP (decl_rtl, 0),
9670 MEM_ADDR_SPACE (decl_rtl)))
9671 temp = replace_equiv_address (decl_rtl,
9672 copy_rtx (XEXP (decl_rtl, 0)));
9675 /* If we got something, return it. But first, set the alignment
9676 if the address is a register. */
9677 if (temp != 0)
9679 if (exp && MEM_P (temp) && REG_P (XEXP (temp, 0)))
9680 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9682 return temp;
9685 if (exp)
9686 dmode = DECL_MODE (exp);
9687 else
9688 dmode = TYPE_MODE (TREE_TYPE (ssa_name));
9690 /* If the mode of DECL_RTL does not match that of the decl,
9691 there are two cases: we are dealing with a BLKmode value
9692 that is returned in a register, or we are dealing with
9693 a promoted value. In the latter case, return a SUBREG
9694 of the wanted mode, but mark it so that we know that it
9695 was already extended. */
9696 if (REG_P (decl_rtl)
9697 && dmode != BLKmode
9698 && GET_MODE (decl_rtl) != dmode)
9700 machine_mode pmode;
9702 /* Get the signedness to be used for this variable. Ensure we get
9703 the same mode we got when the variable was declared. */
9704 if (code != SSA_NAME)
9705 pmode = promote_decl_mode (exp, &unsignedp);
9706 else if ((g = SSA_NAME_DEF_STMT (ssa_name))
9707 && gimple_code (g) == GIMPLE_CALL
9708 && !gimple_call_internal_p (g))
9709 pmode = promote_function_mode (type, mode, &unsignedp,
9710 gimple_call_fntype (g),
9712 else
9713 pmode = promote_ssa_mode (ssa_name, &unsignedp);
9714 gcc_assert (GET_MODE (decl_rtl) == pmode);
9716 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9717 SUBREG_PROMOTED_VAR_P (temp) = 1;
9718 SUBREG_PROMOTED_SET (temp, unsignedp);
9719 return temp;
9722 return decl_rtl;
9724 case INTEGER_CST:
9725 /* Given that TYPE_PRECISION (type) is not always equal to
9726 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9727 the former to the latter according to the signedness of the
9728 type. */
9729 temp = immed_wide_int_const (wide_int::from
9730 (exp,
9731 GET_MODE_PRECISION (TYPE_MODE (type)),
9732 TYPE_SIGN (type)),
9733 TYPE_MODE (type));
9734 return temp;
9736 case VECTOR_CST:
9738 tree tmp = NULL_TREE;
9739 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9740 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9741 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9742 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9743 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9744 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9745 return const_vector_from_tree (exp);
9746 if (GET_MODE_CLASS (mode) == MODE_INT)
9748 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9749 if (type_for_mode)
9750 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9752 if (!tmp)
9754 vec<constructor_elt, va_gc> *v;
9755 unsigned i;
9756 vec_alloc (v, VECTOR_CST_NELTS (exp));
9757 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9758 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9759 tmp = build_constructor (type, v);
9761 return expand_expr (tmp, ignore ? const0_rtx : target,
9762 tmode, modifier);
9765 case CONST_DECL:
9766 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9768 case REAL_CST:
9769 /* If optimized, generate immediate CONST_DOUBLE
9770 which will be turned into memory by reload if necessary.
9772 We used to force a register so that loop.c could see it. But
9773 this does not allow gen_* patterns to perform optimizations with
9774 the constants. It also produces two insns in cases like "x = 1.0;".
9775 On most machines, floating-point constants are not permitted in
9776 many insns, so we'd end up copying it to a register in any case.
9778 Now, we do the copying in expand_binop, if appropriate. */
9779 return const_double_from_real_value (TREE_REAL_CST (exp),
9780 TYPE_MODE (TREE_TYPE (exp)));
9782 case FIXED_CST:
9783 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9784 TYPE_MODE (TREE_TYPE (exp)));
9786 case COMPLEX_CST:
9787 /* Handle evaluating a complex constant in a CONCAT target. */
9788 if (original_target && GET_CODE (original_target) == CONCAT)
9790 machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9791 rtx rtarg, itarg;
9793 rtarg = XEXP (original_target, 0);
9794 itarg = XEXP (original_target, 1);
9796 /* Move the real and imaginary parts separately. */
9797 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9798 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9800 if (op0 != rtarg)
9801 emit_move_insn (rtarg, op0);
9802 if (op1 != itarg)
9803 emit_move_insn (itarg, op1);
9805 return original_target;
9808 /* ... fall through ... */
9810 case STRING_CST:
9811 temp = expand_expr_constant (exp, 1, modifier);
9813 /* temp contains a constant address.
9814 On RISC machines where a constant address isn't valid,
9815 make some insns to get that address into a register. */
9816 if (modifier != EXPAND_CONST_ADDRESS
9817 && modifier != EXPAND_INITIALIZER
9818 && modifier != EXPAND_SUM
9819 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9820 MEM_ADDR_SPACE (temp)))
9821 return replace_equiv_address (temp,
9822 copy_rtx (XEXP (temp, 0)));
9823 return temp;
9825 case SAVE_EXPR:
9827 tree val = treeop0;
9828 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
9829 inner_reference_p);
9831 if (!SAVE_EXPR_RESOLVED_P (exp))
9833 /* We can indeed still hit this case, typically via builtin
9834 expanders calling save_expr immediately before expanding
9835 something. Assume this means that we only have to deal
9836 with non-BLKmode values. */
9837 gcc_assert (GET_MODE (ret) != BLKmode);
9839 val = build_decl (curr_insn_location (),
9840 VAR_DECL, NULL, TREE_TYPE (exp));
9841 DECL_ARTIFICIAL (val) = 1;
9842 DECL_IGNORED_P (val) = 1;
9843 treeop0 = val;
9844 TREE_OPERAND (exp, 0) = treeop0;
9845 SAVE_EXPR_RESOLVED_P (exp) = 1;
9847 if (!CONSTANT_P (ret))
9848 ret = copy_to_reg (ret);
9849 SET_DECL_RTL (val, ret);
9852 return ret;
9856 case CONSTRUCTOR:
9857 /* If we don't need the result, just ensure we evaluate any
9858 subexpressions. */
9859 if (ignore)
9861 unsigned HOST_WIDE_INT idx;
9862 tree value;
9864 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9865 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9867 return const0_rtx;
9870 return expand_constructor (exp, target, modifier, false);
9872 case TARGET_MEM_REF:
9874 addr_space_t as
9875 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9876 enum insn_code icode;
9877 unsigned int align;
9879 op0 = addr_for_mem_ref (exp, as, true);
9880 op0 = memory_address_addr_space (mode, op0, as);
9881 temp = gen_rtx_MEM (mode, op0);
9882 set_mem_attributes (temp, exp, 0);
9883 set_mem_addr_space (temp, as);
9884 align = get_object_alignment (exp);
9885 if (modifier != EXPAND_WRITE
9886 && modifier != EXPAND_MEMORY
9887 && mode != BLKmode
9888 && align < GET_MODE_ALIGNMENT (mode)
9889 /* If the target does not have special handling for unaligned
9890 loads of mode then it can use regular moves for them. */
9891 && ((icode = optab_handler (movmisalign_optab, mode))
9892 != CODE_FOR_nothing))
9894 struct expand_operand ops[2];
9896 /* We've already validated the memory, and we're creating a
9897 new pseudo destination. The predicates really can't fail,
9898 nor can the generator. */
9899 create_output_operand (&ops[0], NULL_RTX, mode);
9900 create_fixed_operand (&ops[1], temp);
9901 expand_insn (icode, 2, ops);
9902 temp = ops[0].value;
9904 return temp;
9907 case MEM_REF:
9909 const bool reverse = REF_REVERSE_STORAGE_ORDER (exp);
9910 addr_space_t as
9911 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9912 machine_mode address_mode;
9913 tree base = TREE_OPERAND (exp, 0);
9914 gimple *def_stmt;
9915 enum insn_code icode;
9916 unsigned align;
9917 /* Handle expansion of non-aliased memory with non-BLKmode. That
9918 might end up in a register. */
9919 if (mem_ref_refers_to_non_mem_p (exp))
9921 HOST_WIDE_INT offset = mem_ref_offset (exp).to_short_addr ();
9922 base = TREE_OPERAND (base, 0);
9923 if (offset == 0
9924 && !reverse
9925 && tree_fits_uhwi_p (TYPE_SIZE (type))
9926 && (GET_MODE_BITSIZE (DECL_MODE (base))
9927 == tree_to_uhwi (TYPE_SIZE (type))))
9928 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
9929 target, tmode, modifier);
9930 if (TYPE_MODE (type) == BLKmode)
9932 temp = assign_stack_temp (DECL_MODE (base),
9933 GET_MODE_SIZE (DECL_MODE (base)));
9934 store_expr (base, temp, 0, false, false);
9935 temp = adjust_address (temp, BLKmode, offset);
9936 set_mem_size (temp, int_size_in_bytes (type));
9937 return temp;
9939 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
9940 bitsize_int (offset * BITS_PER_UNIT));
9941 REF_REVERSE_STORAGE_ORDER (exp) = reverse;
9942 return expand_expr (exp, target, tmode, modifier);
9944 address_mode = targetm.addr_space.address_mode (as);
9945 base = TREE_OPERAND (exp, 0);
9946 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9948 tree mask = gimple_assign_rhs2 (def_stmt);
9949 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9950 gimple_assign_rhs1 (def_stmt), mask);
9951 TREE_OPERAND (exp, 0) = base;
9953 align = get_object_alignment (exp);
9954 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9955 op0 = memory_address_addr_space (mode, op0, as);
9956 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9958 rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
9959 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9960 op0 = memory_address_addr_space (mode, op0, as);
9962 temp = gen_rtx_MEM (mode, op0);
9963 set_mem_attributes (temp, exp, 0);
9964 set_mem_addr_space (temp, as);
9965 if (TREE_THIS_VOLATILE (exp))
9966 MEM_VOLATILE_P (temp) = 1;
9967 if (modifier != EXPAND_WRITE
9968 && modifier != EXPAND_MEMORY
9969 && !inner_reference_p
9970 && mode != BLKmode
9971 && align < GET_MODE_ALIGNMENT (mode))
9973 if ((icode = optab_handler (movmisalign_optab, mode))
9974 != CODE_FOR_nothing)
9976 struct expand_operand ops[2];
9978 /* We've already validated the memory, and we're creating a
9979 new pseudo destination. The predicates really can't fail,
9980 nor can the generator. */
9981 create_output_operand (&ops[0], NULL_RTX, mode);
9982 create_fixed_operand (&ops[1], temp);
9983 expand_insn (icode, 2, ops);
9984 temp = ops[0].value;
9986 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9987 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9988 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9989 (modifier == EXPAND_STACK_PARM
9990 ? NULL_RTX : target),
9991 mode, mode, false);
9993 if (reverse
9994 && modifier != EXPAND_MEMORY
9995 && modifier != EXPAND_WRITE)
9996 temp = flip_storage_order (mode, temp);
9997 return temp;
10000 case ARRAY_REF:
10003 tree array = treeop0;
10004 tree index = treeop1;
10005 tree init;
10007 /* Fold an expression like: "foo"[2].
10008 This is not done in fold so it won't happen inside &.
10009 Don't fold if this is for wide characters since it's too
10010 difficult to do correctly and this is a very rare case. */
10012 if (modifier != EXPAND_CONST_ADDRESS
10013 && modifier != EXPAND_INITIALIZER
10014 && modifier != EXPAND_MEMORY)
10016 tree t = fold_read_from_constant_string (exp);
10018 if (t)
10019 return expand_expr (t, target, tmode, modifier);
10022 /* If this is a constant index into a constant array,
10023 just get the value from the array. Handle both the cases when
10024 we have an explicit constructor and when our operand is a variable
10025 that was declared const. */
10027 if (modifier != EXPAND_CONST_ADDRESS
10028 && modifier != EXPAND_INITIALIZER
10029 && modifier != EXPAND_MEMORY
10030 && TREE_CODE (array) == CONSTRUCTOR
10031 && ! TREE_SIDE_EFFECTS (array)
10032 && TREE_CODE (index) == INTEGER_CST)
10034 unsigned HOST_WIDE_INT ix;
10035 tree field, value;
10037 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
10038 field, value)
10039 if (tree_int_cst_equal (field, index))
10041 if (!TREE_SIDE_EFFECTS (value))
10042 return expand_expr (fold (value), target, tmode, modifier);
10043 break;
10047 else if (optimize >= 1
10048 && modifier != EXPAND_CONST_ADDRESS
10049 && modifier != EXPAND_INITIALIZER
10050 && modifier != EXPAND_MEMORY
10051 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
10052 && TREE_CODE (index) == INTEGER_CST
10053 && (TREE_CODE (array) == VAR_DECL
10054 || TREE_CODE (array) == CONST_DECL)
10055 && (init = ctor_for_folding (array)) != error_mark_node)
10057 if (init == NULL_TREE)
10059 tree value = build_zero_cst (type);
10060 if (TREE_CODE (value) == CONSTRUCTOR)
10062 /* If VALUE is a CONSTRUCTOR, this optimization is only
10063 useful if this doesn't store the CONSTRUCTOR into
10064 memory. If it does, it is more efficient to just
10065 load the data from the array directly. */
10066 rtx ret = expand_constructor (value, target,
10067 modifier, true);
10068 if (ret == NULL_RTX)
10069 value = NULL_TREE;
10072 if (value)
10073 return expand_expr (value, target, tmode, modifier);
10075 else if (TREE_CODE (init) == CONSTRUCTOR)
10077 unsigned HOST_WIDE_INT ix;
10078 tree field, value;
10080 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
10081 field, value)
10082 if (tree_int_cst_equal (field, index))
10084 if (TREE_SIDE_EFFECTS (value))
10085 break;
10087 if (TREE_CODE (value) == CONSTRUCTOR)
10089 /* If VALUE is a CONSTRUCTOR, this
10090 optimization is only useful if
10091 this doesn't store the CONSTRUCTOR
10092 into memory. If it does, it is more
10093 efficient to just load the data from
10094 the array directly. */
10095 rtx ret = expand_constructor (value, target,
10096 modifier, true);
10097 if (ret == NULL_RTX)
10098 break;
10101 return
10102 expand_expr (fold (value), target, tmode, modifier);
10105 else if (TREE_CODE (init) == STRING_CST)
10107 tree low_bound = array_ref_low_bound (exp);
10108 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
10110 /* Optimize the special case of a zero lower bound.
10112 We convert the lower bound to sizetype to avoid problems
10113 with constant folding. E.g. suppose the lower bound is
10114 1 and its mode is QI. Without the conversion
10115 (ARRAY + (INDEX - (unsigned char)1))
10116 becomes
10117 (ARRAY + (-(unsigned char)1) + INDEX)
10118 which becomes
10119 (ARRAY + 255 + INDEX). Oops! */
10120 if (!integer_zerop (low_bound))
10121 index1 = size_diffop_loc (loc, index1,
10122 fold_convert_loc (loc, sizetype,
10123 low_bound));
10125 if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
10127 tree type = TREE_TYPE (TREE_TYPE (init));
10128 machine_mode mode = TYPE_MODE (type);
10130 if (GET_MODE_CLASS (mode) == MODE_INT
10131 && GET_MODE_SIZE (mode) == 1)
10132 return gen_int_mode (TREE_STRING_POINTER (init)
10133 [TREE_INT_CST_LOW (index1)],
10134 mode);
10139 goto normal_inner_ref;
10141 case COMPONENT_REF:
10142 /* If the operand is a CONSTRUCTOR, we can just extract the
10143 appropriate field if it is present. */
10144 if (TREE_CODE (treeop0) == CONSTRUCTOR)
10146 unsigned HOST_WIDE_INT idx;
10147 tree field, value;
10149 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
10150 idx, field, value)
10151 if (field == treeop1
10152 /* We can normally use the value of the field in the
10153 CONSTRUCTOR. However, if this is a bitfield in
10154 an integral mode that we can fit in a HOST_WIDE_INT,
10155 we must mask only the number of bits in the bitfield,
10156 since this is done implicitly by the constructor. If
10157 the bitfield does not meet either of those conditions,
10158 we can't do this optimization. */
10159 && (! DECL_BIT_FIELD (field)
10160 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
10161 && (GET_MODE_PRECISION (DECL_MODE (field))
10162 <= HOST_BITS_PER_WIDE_INT))))
10164 if (DECL_BIT_FIELD (field)
10165 && modifier == EXPAND_STACK_PARM)
10166 target = 0;
10167 op0 = expand_expr (value, target, tmode, modifier);
10168 if (DECL_BIT_FIELD (field))
10170 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
10171 machine_mode imode = TYPE_MODE (TREE_TYPE (field));
10173 if (TYPE_UNSIGNED (TREE_TYPE (field)))
10175 op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
10176 imode);
10177 op0 = expand_and (imode, op0, op1, target);
10179 else
10181 int count = GET_MODE_PRECISION (imode) - bitsize;
10183 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
10184 target, 0);
10185 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
10186 target, 0);
10190 return op0;
10193 goto normal_inner_ref;
10195 case BIT_FIELD_REF:
10196 case ARRAY_RANGE_REF:
10197 normal_inner_ref:
10199 machine_mode mode1, mode2;
10200 HOST_WIDE_INT bitsize, bitpos;
10201 tree offset;
10202 int reversep, volatilep = 0, must_force_mem;
10203 tree tem
10204 = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
10205 &unsignedp, &reversep, &volatilep, true);
10206 rtx orig_op0, memloc;
10207 bool clear_mem_expr = false;
10209 /* If we got back the original object, something is wrong. Perhaps
10210 we are evaluating an expression too early. In any event, don't
10211 infinitely recurse. */
10212 gcc_assert (tem != exp);
10214 /* If TEM's type is a union of variable size, pass TARGET to the inner
10215 computation, since it will need a temporary and TARGET is known
10216 to have to do. This occurs in unchecked conversion in Ada. */
10217 orig_op0 = op0
10218 = expand_expr_real (tem,
10219 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10220 && COMPLETE_TYPE_P (TREE_TYPE (tem))
10221 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10222 != INTEGER_CST)
10223 && modifier != EXPAND_STACK_PARM
10224 ? target : NULL_RTX),
10225 VOIDmode,
10226 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10227 NULL, true);
10229 /* If the field has a mode, we want to access it in the
10230 field's mode, not the computed mode.
10231 If a MEM has VOIDmode (external with incomplete type),
10232 use BLKmode for it instead. */
10233 if (MEM_P (op0))
10235 if (mode1 != VOIDmode)
10236 op0 = adjust_address (op0, mode1, 0);
10237 else if (GET_MODE (op0) == VOIDmode)
10238 op0 = adjust_address (op0, BLKmode, 0);
10241 mode2
10242 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10244 /* If we have either an offset, a BLKmode result, or a reference
10245 outside the underlying object, we must force it to memory.
10246 Such a case can occur in Ada if we have unchecked conversion
10247 of an expression from a scalar type to an aggregate type or
10248 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10249 passed a partially uninitialized object or a view-conversion
10250 to a larger size. */
10251 must_force_mem = (offset
10252 || mode1 == BLKmode
10253 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
10255 /* Handle CONCAT first. */
10256 if (GET_CODE (op0) == CONCAT && !must_force_mem)
10258 if (bitpos == 0
10259 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
10261 if (reversep)
10262 op0 = flip_storage_order (GET_MODE (op0), op0);
10263 return op0;
10265 if (bitpos == 0
10266 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10267 && bitsize)
10269 op0 = XEXP (op0, 0);
10270 mode2 = GET_MODE (op0);
10272 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10273 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
10274 && bitpos
10275 && bitsize)
10277 op0 = XEXP (op0, 1);
10278 bitpos = 0;
10279 mode2 = GET_MODE (op0);
10281 else
10282 /* Otherwise force into memory. */
10283 must_force_mem = 1;
10286 /* If this is a constant, put it in a register if it is a legitimate
10287 constant and we don't need a memory reference. */
10288 if (CONSTANT_P (op0)
10289 && mode2 != BLKmode
10290 && targetm.legitimate_constant_p (mode2, op0)
10291 && !must_force_mem)
10292 op0 = force_reg (mode2, op0);
10294 /* Otherwise, if this is a constant, try to force it to the constant
10295 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10296 is a legitimate constant. */
10297 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10298 op0 = validize_mem (memloc);
10300 /* Otherwise, if this is a constant or the object is not in memory
10301 and need be, put it there. */
10302 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10304 memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10305 emit_move_insn (memloc, op0);
10306 op0 = memloc;
10307 clear_mem_expr = true;
10310 if (offset)
10312 machine_mode address_mode;
10313 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10314 EXPAND_SUM);
10316 gcc_assert (MEM_P (op0));
10318 address_mode = get_address_mode (op0);
10319 if (GET_MODE (offset_rtx) != address_mode)
10321 /* We cannot be sure that the RTL in offset_rtx is valid outside
10322 of a memory address context, so force it into a register
10323 before attempting to convert it to the desired mode. */
10324 offset_rtx = force_operand (offset_rtx, NULL_RTX);
10325 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10328 /* See the comment in expand_assignment for the rationale. */
10329 if (mode1 != VOIDmode
10330 && bitpos != 0
10331 && bitsize > 0
10332 && (bitpos % bitsize) == 0
10333 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
10334 && MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
10336 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10337 bitpos = 0;
10340 op0 = offset_address (op0, offset_rtx,
10341 highest_pow2_factor (offset));
10344 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10345 record its alignment as BIGGEST_ALIGNMENT. */
10346 if (MEM_P (op0) && bitpos == 0 && offset != 0
10347 && is_aligning_offset (offset, tem))
10348 set_mem_align (op0, BIGGEST_ALIGNMENT);
10350 /* Don't forget about volatility even if this is a bitfield. */
10351 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10353 if (op0 == orig_op0)
10354 op0 = copy_rtx (op0);
10356 MEM_VOLATILE_P (op0) = 1;
10359 /* In cases where an aligned union has an unaligned object
10360 as a field, we might be extracting a BLKmode value from
10361 an integer-mode (e.g., SImode) object. Handle this case
10362 by doing the extract into an object as wide as the field
10363 (which we know to be the width of a basic mode), then
10364 storing into memory, and changing the mode to BLKmode. */
10365 if (mode1 == VOIDmode
10366 || REG_P (op0) || GET_CODE (op0) == SUBREG
10367 || (mode1 != BLKmode && ! direct_load[(int) mode1]
10368 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10369 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10370 && modifier != EXPAND_CONST_ADDRESS
10371 && modifier != EXPAND_INITIALIZER
10372 && modifier != EXPAND_MEMORY)
10373 /* If the bitfield is volatile and the bitsize
10374 is narrower than the access size of the bitfield,
10375 we need to extract bitfields from the access. */
10376 || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10377 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10378 && mode1 != BLKmode
10379 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)
10380 /* If the field isn't aligned enough to fetch as a memref,
10381 fetch it as a bit field. */
10382 || (mode1 != BLKmode
10383 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10384 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10385 || (MEM_P (op0)
10386 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10387 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10388 && modifier != EXPAND_MEMORY
10389 && ((modifier == EXPAND_CONST_ADDRESS
10390 || modifier == EXPAND_INITIALIZER)
10391 ? STRICT_ALIGNMENT
10392 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10393 || (bitpos % BITS_PER_UNIT != 0)))
10394 /* If the type and the field are a constant size and the
10395 size of the type isn't the same size as the bitfield,
10396 we must use bitfield operations. */
10397 || (bitsize >= 0
10398 && TYPE_SIZE (TREE_TYPE (exp))
10399 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10400 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10401 bitsize)))
10403 machine_mode ext_mode = mode;
10405 if (ext_mode == BLKmode
10406 && ! (target != 0 && MEM_P (op0)
10407 && MEM_P (target)
10408 && bitpos % BITS_PER_UNIT == 0))
10409 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10411 if (ext_mode == BLKmode)
10413 if (target == 0)
10414 target = assign_temp (type, 1, 1);
10416 /* ??? Unlike the similar test a few lines below, this one is
10417 very likely obsolete. */
10418 if (bitsize == 0)
10419 return target;
10421 /* In this case, BITPOS must start at a byte boundary and
10422 TARGET, if specified, must be a MEM. */
10423 gcc_assert (MEM_P (op0)
10424 && (!target || MEM_P (target))
10425 && !(bitpos % BITS_PER_UNIT));
10427 emit_block_move (target,
10428 adjust_address (op0, VOIDmode,
10429 bitpos / BITS_PER_UNIT),
10430 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10431 / BITS_PER_UNIT),
10432 (modifier == EXPAND_STACK_PARM
10433 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10435 return target;
10438 /* If we have nothing to extract, the result will be 0 for targets
10439 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10440 return 0 for the sake of consistency, as reading a zero-sized
10441 bitfield is valid in Ada and the value is fully specified. */
10442 if (bitsize == 0)
10443 return const0_rtx;
10445 op0 = validize_mem (op0);
10447 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10448 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10450 /* If the result has a record type and the extraction is done in
10451 an integral mode, then the field may be not aligned on a byte
10452 boundary; in this case, if it has reverse storage order, it
10453 needs to be extracted as a scalar field with reverse storage
10454 order and put back into memory order afterwards. */
10455 if (TREE_CODE (type) == RECORD_TYPE
10456 && GET_MODE_CLASS (ext_mode) == MODE_INT)
10457 reversep = TYPE_REVERSE_STORAGE_ORDER (type);
10459 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10460 (modifier == EXPAND_STACK_PARM
10461 ? NULL_RTX : target),
10462 ext_mode, ext_mode, reversep);
10464 /* If the result has a record type and the mode of OP0 is an
10465 integral mode then, if BITSIZE is narrower than this mode
10466 and this is for big-endian data, we must put the field
10467 into the high-order bits. And we must also put it back
10468 into memory order if it has been previously reversed. */
10469 if (TREE_CODE (type) == RECORD_TYPE
10470 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
10472 HOST_WIDE_INT size = GET_MODE_BITSIZE (GET_MODE (op0));
10474 if (bitsize < size
10475 && reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
10476 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10477 size - bitsize, op0, 1);
10479 if (reversep)
10480 op0 = flip_storage_order (GET_MODE (op0), op0);
10483 /* If the result type is BLKmode, store the data into a temporary
10484 of the appropriate type, but with the mode corresponding to the
10485 mode for the data we have (op0's mode). */
10486 if (mode == BLKmode)
10488 rtx new_rtx
10489 = assign_stack_temp_for_type (ext_mode,
10490 GET_MODE_BITSIZE (ext_mode),
10491 type);
10492 emit_move_insn (new_rtx, op0);
10493 op0 = copy_rtx (new_rtx);
10494 PUT_MODE (op0, BLKmode);
10497 return op0;
10500 /* If the result is BLKmode, use that to access the object
10501 now as well. */
10502 if (mode == BLKmode)
10503 mode1 = BLKmode;
10505 /* Get a reference to just this component. */
10506 if (modifier == EXPAND_CONST_ADDRESS
10507 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10508 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10509 else
10510 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10512 if (op0 == orig_op0)
10513 op0 = copy_rtx (op0);
10515 set_mem_attributes (op0, exp, 0);
10517 if (REG_P (XEXP (op0, 0)))
10518 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10520 /* If op0 is a temporary because the original expressions was forced
10521 to memory, clear MEM_EXPR so that the original expression cannot
10522 be marked as addressable through MEM_EXPR of the temporary. */
10523 if (clear_mem_expr)
10524 set_mem_expr (op0, NULL_TREE);
10526 MEM_VOLATILE_P (op0) |= volatilep;
10528 if (reversep
10529 && modifier != EXPAND_MEMORY
10530 && modifier != EXPAND_WRITE)
10531 op0 = flip_storage_order (mode1, op0);
10533 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10534 || modifier == EXPAND_CONST_ADDRESS
10535 || modifier == EXPAND_INITIALIZER)
10536 return op0;
10538 if (target == 0)
10539 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10541 convert_move (target, op0, unsignedp);
10542 return target;
10545 case OBJ_TYPE_REF:
10546 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10548 case CALL_EXPR:
10549 /* All valid uses of __builtin_va_arg_pack () are removed during
10550 inlining. */
10551 if (CALL_EXPR_VA_ARG_PACK (exp))
10552 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10554 tree fndecl = get_callee_fndecl (exp), attr;
10556 if (fndecl
10557 && (attr = lookup_attribute ("error",
10558 DECL_ATTRIBUTES (fndecl))) != NULL)
10559 error ("%Kcall to %qs declared with attribute error: %s",
10560 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10561 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10562 if (fndecl
10563 && (attr = lookup_attribute ("warning",
10564 DECL_ATTRIBUTES (fndecl))) != NULL)
10565 warning_at (tree_nonartificial_location (exp),
10566 0, "%Kcall to %qs declared with attribute warning: %s",
10567 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10568 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10570 /* Check for a built-in function. */
10571 if (fndecl && DECL_BUILT_IN (fndecl))
10573 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10574 if (CALL_WITH_BOUNDS_P (exp))
10575 return expand_builtin_with_bounds (exp, target, subtarget,
10576 tmode, ignore);
10577 else
10578 return expand_builtin (exp, target, subtarget, tmode, ignore);
10581 return expand_call (exp, target, ignore);
10583 case VIEW_CONVERT_EXPR:
10584 op0 = NULL_RTX;
10586 /* If we are converting to BLKmode, try to avoid an intermediate
10587 temporary by fetching an inner memory reference. */
10588 if (mode == BLKmode
10589 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10590 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10591 && handled_component_p (treeop0))
10593 machine_mode mode1;
10594 HOST_WIDE_INT bitsize, bitpos;
10595 tree offset;
10596 int unsignedp, reversep, volatilep = 0;
10597 tree tem
10598 = get_inner_reference (treeop0, &bitsize, &bitpos, &offset, &mode1,
10599 &unsignedp, &reversep, &volatilep, true);
10600 rtx orig_op0;
10602 /* ??? We should work harder and deal with non-zero offsets. */
10603 if (!offset
10604 && (bitpos % BITS_PER_UNIT) == 0
10605 && !reversep
10606 && bitsize >= 0
10607 && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10609 /* See the normal_inner_ref case for the rationale. */
10610 orig_op0
10611 = expand_expr_real (tem,
10612 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10613 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10614 != INTEGER_CST)
10615 && modifier != EXPAND_STACK_PARM
10616 ? target : NULL_RTX),
10617 VOIDmode,
10618 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10619 NULL, true);
10621 if (MEM_P (orig_op0))
10623 op0 = orig_op0;
10625 /* Get a reference to just this component. */
10626 if (modifier == EXPAND_CONST_ADDRESS
10627 || modifier == EXPAND_SUM
10628 || modifier == EXPAND_INITIALIZER)
10629 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10630 else
10631 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10633 if (op0 == orig_op0)
10634 op0 = copy_rtx (op0);
10636 set_mem_attributes (op0, treeop0, 0);
10637 if (REG_P (XEXP (op0, 0)))
10638 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10640 MEM_VOLATILE_P (op0) |= volatilep;
10645 if (!op0)
10646 op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
10647 NULL, inner_reference_p);
10649 /* If the input and output modes are both the same, we are done. */
10650 if (mode == GET_MODE (op0))
10652 /* If neither mode is BLKmode, and both modes are the same size
10653 then we can use gen_lowpart. */
10654 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10655 && (GET_MODE_PRECISION (mode)
10656 == GET_MODE_PRECISION (GET_MODE (op0)))
10657 && !COMPLEX_MODE_P (GET_MODE (op0)))
10659 if (GET_CODE (op0) == SUBREG)
10660 op0 = force_reg (GET_MODE (op0), op0);
10661 temp = gen_lowpart_common (mode, op0);
10662 if (temp)
10663 op0 = temp;
10664 else
10666 if (!REG_P (op0) && !MEM_P (op0))
10667 op0 = force_reg (GET_MODE (op0), op0);
10668 op0 = gen_lowpart (mode, op0);
10671 /* If both types are integral, convert from one mode to the other. */
10672 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10673 op0 = convert_modes (mode, GET_MODE (op0), op0,
10674 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10675 /* If the output type is a bit-field type, do an extraction. */
10676 else if (reduce_bit_field)
10677 return extract_bit_field (op0, TYPE_PRECISION (type), 0,
10678 TYPE_UNSIGNED (type), NULL_RTX,
10679 mode, mode, false);
10680 /* As a last resort, spill op0 to memory, and reload it in a
10681 different mode. */
10682 else if (!MEM_P (op0))
10684 /* If the operand is not a MEM, force it into memory. Since we
10685 are going to be changing the mode of the MEM, don't call
10686 force_const_mem for constants because we don't allow pool
10687 constants to change mode. */
10688 tree inner_type = TREE_TYPE (treeop0);
10690 gcc_assert (!TREE_ADDRESSABLE (exp));
10692 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10693 target
10694 = assign_stack_temp_for_type
10695 (TYPE_MODE (inner_type),
10696 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10698 emit_move_insn (target, op0);
10699 op0 = target;
10702 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10703 output type is such that the operand is known to be aligned, indicate
10704 that it is. Otherwise, we need only be concerned about alignment for
10705 non-BLKmode results. */
10706 if (MEM_P (op0))
10708 enum insn_code icode;
10710 if (TYPE_ALIGN_OK (type))
10712 /* ??? Copying the MEM without substantially changing it might
10713 run afoul of the code handling volatile memory references in
10714 store_expr, which assumes that TARGET is returned unmodified
10715 if it has been used. */
10716 op0 = copy_rtx (op0);
10717 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10719 else if (modifier != EXPAND_WRITE
10720 && modifier != EXPAND_MEMORY
10721 && !inner_reference_p
10722 && mode != BLKmode
10723 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10725 /* If the target does have special handling for unaligned
10726 loads of mode then use them. */
10727 if ((icode = optab_handler (movmisalign_optab, mode))
10728 != CODE_FOR_nothing)
10730 rtx reg;
10732 op0 = adjust_address (op0, mode, 0);
10733 /* We've already validated the memory, and we're creating a
10734 new pseudo destination. The predicates really can't
10735 fail. */
10736 reg = gen_reg_rtx (mode);
10738 /* Nor can the insn generator. */
10739 rtx_insn *insn = GEN_FCN (icode) (reg, op0);
10740 emit_insn (insn);
10741 return reg;
10743 else if (STRICT_ALIGNMENT)
10745 tree inner_type = TREE_TYPE (treeop0);
10746 HOST_WIDE_INT temp_size
10747 = MAX (int_size_in_bytes (inner_type),
10748 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10749 rtx new_rtx
10750 = assign_stack_temp_for_type (mode, temp_size, type);
10751 rtx new_with_op0_mode
10752 = adjust_address (new_rtx, GET_MODE (op0), 0);
10754 gcc_assert (!TREE_ADDRESSABLE (exp));
10756 if (GET_MODE (op0) == BLKmode)
10757 emit_block_move (new_with_op0_mode, op0,
10758 GEN_INT (GET_MODE_SIZE (mode)),
10759 (modifier == EXPAND_STACK_PARM
10760 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10761 else
10762 emit_move_insn (new_with_op0_mode, op0);
10764 op0 = new_rtx;
10768 op0 = adjust_address (op0, mode, 0);
10771 return op0;
10773 case MODIFY_EXPR:
10775 tree lhs = treeop0;
10776 tree rhs = treeop1;
10777 gcc_assert (ignore);
10779 /* Check for |= or &= of a bitfield of size one into another bitfield
10780 of size 1. In this case, (unless we need the result of the
10781 assignment) we can do this more efficiently with a
10782 test followed by an assignment, if necessary.
10784 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10785 things change so we do, this code should be enhanced to
10786 support it. */
10787 if (TREE_CODE (lhs) == COMPONENT_REF
10788 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10789 || TREE_CODE (rhs) == BIT_AND_EXPR)
10790 && TREE_OPERAND (rhs, 0) == lhs
10791 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10792 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10793 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10795 rtx_code_label *label = gen_label_rtx ();
10796 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10797 do_jump (TREE_OPERAND (rhs, 1),
10798 value ? label : 0,
10799 value ? 0 : label, -1);
10800 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10801 false);
10802 do_pending_stack_adjust ();
10803 emit_label (label);
10804 return const0_rtx;
10807 expand_assignment (lhs, rhs, false);
10808 return const0_rtx;
10811 case ADDR_EXPR:
10812 return expand_expr_addr_expr (exp, target, tmode, modifier);
10814 case REALPART_EXPR:
10815 op0 = expand_normal (treeop0);
10816 return read_complex_part (op0, false);
10818 case IMAGPART_EXPR:
10819 op0 = expand_normal (treeop0);
10820 return read_complex_part (op0, true);
10822 case RETURN_EXPR:
10823 case LABEL_EXPR:
10824 case GOTO_EXPR:
10825 case SWITCH_EXPR:
10826 case ASM_EXPR:
10827 /* Expanded in cfgexpand.c. */
10828 gcc_unreachable ();
10830 case TRY_CATCH_EXPR:
10831 case CATCH_EXPR:
10832 case EH_FILTER_EXPR:
10833 case TRY_FINALLY_EXPR:
10834 /* Lowered by tree-eh.c. */
10835 gcc_unreachable ();
10837 case WITH_CLEANUP_EXPR:
10838 case CLEANUP_POINT_EXPR:
10839 case TARGET_EXPR:
10840 case CASE_LABEL_EXPR:
10841 case VA_ARG_EXPR:
10842 case BIND_EXPR:
10843 case INIT_EXPR:
10844 case CONJ_EXPR:
10845 case COMPOUND_EXPR:
10846 case PREINCREMENT_EXPR:
10847 case PREDECREMENT_EXPR:
10848 case POSTINCREMENT_EXPR:
10849 case POSTDECREMENT_EXPR:
10850 case LOOP_EXPR:
10851 case EXIT_EXPR:
10852 case COMPOUND_LITERAL_EXPR:
10853 /* Lowered by gimplify.c. */
10854 gcc_unreachable ();
10856 case FDESC_EXPR:
10857 /* Function descriptors are not valid except for as
10858 initialization constants, and should not be expanded. */
10859 gcc_unreachable ();
10861 case WITH_SIZE_EXPR:
10862 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10863 have pulled out the size to use in whatever context it needed. */
10864 return expand_expr_real (treeop0, original_target, tmode,
10865 modifier, alt_rtl, inner_reference_p);
10867 default:
10868 return expand_expr_real_2 (&ops, target, tmode, modifier);
10872 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10873 signedness of TYPE), possibly returning the result in TARGET. */
10874 static rtx
10875 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10877 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10878 if (target && GET_MODE (target) != GET_MODE (exp))
10879 target = 0;
10880 /* For constant values, reduce using build_int_cst_type. */
10881 if (CONST_INT_P (exp))
10883 HOST_WIDE_INT value = INTVAL (exp);
10884 tree t = build_int_cst_type (type, value);
10885 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10887 else if (TYPE_UNSIGNED (type))
10889 machine_mode mode = GET_MODE (exp);
10890 rtx mask = immed_wide_int_const
10891 (wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
10892 return expand_and (mode, exp, mask, target);
10894 else
10896 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10897 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10898 exp, count, target, 0);
10899 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10900 exp, count, target, 0);
10904 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10905 when applied to the address of EXP produces an address known to be
10906 aligned more than BIGGEST_ALIGNMENT. */
10908 static int
10909 is_aligning_offset (const_tree offset, const_tree exp)
10911 /* Strip off any conversions. */
10912 while (CONVERT_EXPR_P (offset))
10913 offset = TREE_OPERAND (offset, 0);
10915 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10916 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10917 if (TREE_CODE (offset) != BIT_AND_EXPR
10918 || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
10919 || compare_tree_int (TREE_OPERAND (offset, 1),
10920 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10921 || exact_log2 (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1) < 0)
10922 return 0;
10924 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10925 It must be NEGATE_EXPR. Then strip any more conversions. */
10926 offset = TREE_OPERAND (offset, 0);
10927 while (CONVERT_EXPR_P (offset))
10928 offset = TREE_OPERAND (offset, 0);
10930 if (TREE_CODE (offset) != NEGATE_EXPR)
10931 return 0;
10933 offset = TREE_OPERAND (offset, 0);
10934 while (CONVERT_EXPR_P (offset))
10935 offset = TREE_OPERAND (offset, 0);
10937 /* This must now be the address of EXP. */
10938 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10941 /* Return the tree node if an ARG corresponds to a string constant or zero
10942 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10943 in bytes within the string that ARG is accessing. The type of the
10944 offset will be `sizetype'. */
10946 tree
10947 string_constant (tree arg, tree *ptr_offset)
10949 tree array, offset, lower_bound;
10950 STRIP_NOPS (arg);
10952 if (TREE_CODE (arg) == ADDR_EXPR)
10954 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10956 *ptr_offset = size_zero_node;
10957 return TREE_OPERAND (arg, 0);
10959 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10961 array = TREE_OPERAND (arg, 0);
10962 offset = size_zero_node;
10964 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10966 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10967 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10968 if (TREE_CODE (array) != STRING_CST
10969 && TREE_CODE (array) != VAR_DECL)
10970 return 0;
10972 /* Check if the array has a nonzero lower bound. */
10973 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10974 if (!integer_zerop (lower_bound))
10976 /* If the offset and base aren't both constants, return 0. */
10977 if (TREE_CODE (lower_bound) != INTEGER_CST)
10978 return 0;
10979 if (TREE_CODE (offset) != INTEGER_CST)
10980 return 0;
10981 /* Adjust offset by the lower bound. */
10982 offset = size_diffop (fold_convert (sizetype, offset),
10983 fold_convert (sizetype, lower_bound));
10986 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10988 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10989 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10990 if (TREE_CODE (array) != ADDR_EXPR)
10991 return 0;
10992 array = TREE_OPERAND (array, 0);
10993 if (TREE_CODE (array) != STRING_CST
10994 && TREE_CODE (array) != VAR_DECL)
10995 return 0;
10997 else
10998 return 0;
11000 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
11002 tree arg0 = TREE_OPERAND (arg, 0);
11003 tree arg1 = TREE_OPERAND (arg, 1);
11005 STRIP_NOPS (arg0);
11006 STRIP_NOPS (arg1);
11008 if (TREE_CODE (arg0) == ADDR_EXPR
11009 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
11010 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
11012 array = TREE_OPERAND (arg0, 0);
11013 offset = arg1;
11015 else if (TREE_CODE (arg1) == ADDR_EXPR
11016 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
11017 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
11019 array = TREE_OPERAND (arg1, 0);
11020 offset = arg0;
11022 else
11023 return 0;
11025 else
11026 return 0;
11028 if (TREE_CODE (array) == STRING_CST)
11030 *ptr_offset = fold_convert (sizetype, offset);
11031 return array;
11033 else if (TREE_CODE (array) == VAR_DECL
11034 || TREE_CODE (array) == CONST_DECL)
11036 int length;
11037 tree init = ctor_for_folding (array);
11039 /* Variables initialized to string literals can be handled too. */
11040 if (init == error_mark_node
11041 || !init
11042 || TREE_CODE (init) != STRING_CST)
11043 return 0;
11045 /* Avoid const char foo[4] = "abcde"; */
11046 if (DECL_SIZE_UNIT (array) == NULL_TREE
11047 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
11048 || (length = TREE_STRING_LENGTH (init)) <= 0
11049 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
11050 return 0;
11052 /* If variable is bigger than the string literal, OFFSET must be constant
11053 and inside of the bounds of the string literal. */
11054 offset = fold_convert (sizetype, offset);
11055 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
11056 && (! tree_fits_uhwi_p (offset)
11057 || compare_tree_int (offset, length) >= 0))
11058 return 0;
11060 *ptr_offset = offset;
11061 return init;
11064 return 0;
11067 /* Generate code to calculate OPS, and exploded expression
11068 using a store-flag instruction and return an rtx for the result.
11069 OPS reflects a comparison.
11071 If TARGET is nonzero, store the result there if convenient.
11073 Return zero if there is no suitable set-flag instruction
11074 available on this machine.
11076 Once expand_expr has been called on the arguments of the comparison,
11077 we are committed to doing the store flag, since it is not safe to
11078 re-evaluate the expression. We emit the store-flag insn by calling
11079 emit_store_flag, but only expand the arguments if we have a reason
11080 to believe that emit_store_flag will be successful. If we think that
11081 it will, but it isn't, we have to simulate the store-flag with a
11082 set/jump/set sequence. */
11084 static rtx
11085 do_store_flag (sepops ops, rtx target, machine_mode mode)
11087 enum rtx_code code;
11088 tree arg0, arg1, type;
11089 machine_mode operand_mode;
11090 int unsignedp;
11091 rtx op0, op1;
11092 rtx subtarget = target;
11093 location_t loc = ops->location;
11095 arg0 = ops->op0;
11096 arg1 = ops->op1;
11098 /* Don't crash if the comparison was erroneous. */
11099 if (arg0 == error_mark_node || arg1 == error_mark_node)
11100 return const0_rtx;
11102 type = TREE_TYPE (arg0);
11103 operand_mode = TYPE_MODE (type);
11104 unsignedp = TYPE_UNSIGNED (type);
11106 /* We won't bother with BLKmode store-flag operations because it would mean
11107 passing a lot of information to emit_store_flag. */
11108 if (operand_mode == BLKmode)
11109 return 0;
11111 /* We won't bother with store-flag operations involving function pointers
11112 when function pointers must be canonicalized before comparisons. */
11113 if (targetm.have_canonicalize_funcptr_for_compare ()
11114 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
11115 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
11116 == FUNCTION_TYPE))
11117 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
11118 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
11119 == FUNCTION_TYPE))))
11120 return 0;
11122 STRIP_NOPS (arg0);
11123 STRIP_NOPS (arg1);
11125 /* For vector typed comparisons emit code to generate the desired
11126 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
11127 expander for this. */
11128 if (TREE_CODE (ops->type) == VECTOR_TYPE)
11130 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
11131 if (VECTOR_BOOLEAN_TYPE_P (ops->type)
11132 && expand_vec_cmp_expr_p (TREE_TYPE (arg0), ops->type))
11133 return expand_vec_cmp_expr (ops->type, ifexp, target);
11134 else
11136 tree if_true = constant_boolean_node (true, ops->type);
11137 tree if_false = constant_boolean_node (false, ops->type);
11138 return expand_vec_cond_expr (ops->type, ifexp, if_true,
11139 if_false, target);
11143 /* Get the rtx comparison code to use. We know that EXP is a comparison
11144 operation of some type. Some comparisons against 1 and -1 can be
11145 converted to comparisons with zero. Do so here so that the tests
11146 below will be aware that we have a comparison with zero. These
11147 tests will not catch constants in the first operand, but constants
11148 are rarely passed as the first operand. */
11150 switch (ops->code)
11152 case EQ_EXPR:
11153 code = EQ;
11154 break;
11155 case NE_EXPR:
11156 code = NE;
11157 break;
11158 case LT_EXPR:
11159 if (integer_onep (arg1))
11160 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11161 else
11162 code = unsignedp ? LTU : LT;
11163 break;
11164 case LE_EXPR:
11165 if (! unsignedp && integer_all_onesp (arg1))
11166 arg1 = integer_zero_node, code = LT;
11167 else
11168 code = unsignedp ? LEU : LE;
11169 break;
11170 case GT_EXPR:
11171 if (! unsignedp && integer_all_onesp (arg1))
11172 arg1 = integer_zero_node, code = GE;
11173 else
11174 code = unsignedp ? GTU : GT;
11175 break;
11176 case GE_EXPR:
11177 if (integer_onep (arg1))
11178 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11179 else
11180 code = unsignedp ? GEU : GE;
11181 break;
11183 case UNORDERED_EXPR:
11184 code = UNORDERED;
11185 break;
11186 case ORDERED_EXPR:
11187 code = ORDERED;
11188 break;
11189 case UNLT_EXPR:
11190 code = UNLT;
11191 break;
11192 case UNLE_EXPR:
11193 code = UNLE;
11194 break;
11195 case UNGT_EXPR:
11196 code = UNGT;
11197 break;
11198 case UNGE_EXPR:
11199 code = UNGE;
11200 break;
11201 case UNEQ_EXPR:
11202 code = UNEQ;
11203 break;
11204 case LTGT_EXPR:
11205 code = LTGT;
11206 break;
11208 default:
11209 gcc_unreachable ();
11212 /* Put a constant second. */
11213 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
11214 || TREE_CODE (arg0) == FIXED_CST)
11216 std::swap (arg0, arg1);
11217 code = swap_condition (code);
11220 /* If this is an equality or inequality test of a single bit, we can
11221 do this by shifting the bit being tested to the low-order bit and
11222 masking the result with the constant 1. If the condition was EQ,
11223 we xor it with 1. This does not require an scc insn and is faster
11224 than an scc insn even if we have it.
11226 The code to make this transformation was moved into fold_single_bit_test,
11227 so we just call into the folder and expand its result. */
11229 if ((code == NE || code == EQ)
11230 && integer_zerop (arg1)
11231 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
11233 gimple *srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
11234 if (srcstmt
11235 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
11237 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
11238 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
11239 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
11240 gimple_assign_rhs1 (srcstmt),
11241 gimple_assign_rhs2 (srcstmt));
11242 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
11243 if (temp)
11244 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
11248 if (! get_subtarget (target)
11249 || GET_MODE (subtarget) != operand_mode)
11250 subtarget = 0;
11252 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
11254 if (target == 0)
11255 target = gen_reg_rtx (mode);
11257 /* Try a cstore if possible. */
11258 return emit_store_flag_force (target, code, op0, op1,
11259 operand_mode, unsignedp,
11260 (TYPE_PRECISION (ops->type) == 1
11261 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
11264 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11265 0 otherwise (i.e. if there is no casesi instruction).
11267 DEFAULT_PROBABILITY is the probability of jumping to the default
11268 label. */
11270 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
11271 rtx table_label, rtx default_label, rtx fallback_label,
11272 int default_probability)
11274 struct expand_operand ops[5];
11275 machine_mode index_mode = SImode;
11276 rtx op1, op2, index;
11278 if (! targetm.have_casesi ())
11279 return 0;
11281 /* Convert the index to SImode. */
11282 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11284 machine_mode omode = TYPE_MODE (index_type);
11285 rtx rangertx = expand_normal (range);
11287 /* We must handle the endpoints in the original mode. */
11288 index_expr = build2 (MINUS_EXPR, index_type,
11289 index_expr, minval);
11290 minval = integer_zero_node;
11291 index = expand_normal (index_expr);
11292 if (default_label)
11293 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11294 omode, 1, default_label,
11295 default_probability);
11296 /* Now we can safely truncate. */
11297 index = convert_to_mode (index_mode, index, 0);
11299 else
11301 if (TYPE_MODE (index_type) != index_mode)
11303 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
11304 index_expr = fold_convert (index_type, index_expr);
11307 index = expand_normal (index_expr);
11310 do_pending_stack_adjust ();
11312 op1 = expand_normal (minval);
11313 op2 = expand_normal (range);
11315 create_input_operand (&ops[0], index, index_mode);
11316 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11317 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11318 create_fixed_operand (&ops[3], table_label);
11319 create_fixed_operand (&ops[4], (default_label
11320 ? default_label
11321 : fallback_label));
11322 expand_jump_insn (targetm.code_for_casesi, 5, ops);
11323 return 1;
11326 /* Attempt to generate a tablejump instruction; same concept. */
11327 /* Subroutine of the next function.
11329 INDEX is the value being switched on, with the lowest value
11330 in the table already subtracted.
11331 MODE is its expected mode (needed if INDEX is constant).
11332 RANGE is the length of the jump table.
11333 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11335 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11336 index value is out of range.
11337 DEFAULT_PROBABILITY is the probability of jumping to
11338 the default label. */
11340 static void
11341 do_tablejump (rtx index, machine_mode mode, rtx range, rtx table_label,
11342 rtx default_label, int default_probability)
11344 rtx temp, vector;
11346 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11347 cfun->cfg->max_jumptable_ents = INTVAL (range);
11349 /* Do an unsigned comparison (in the proper mode) between the index
11350 expression and the value which represents the length of the range.
11351 Since we just finished subtracting the lower bound of the range
11352 from the index expression, this comparison allows us to simultaneously
11353 check that the original index expression value is both greater than
11354 or equal to the minimum value of the range and less than or equal to
11355 the maximum value of the range. */
11357 if (default_label)
11358 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11359 default_label, default_probability);
11362 /* If index is in range, it must fit in Pmode.
11363 Convert to Pmode so we can index with it. */
11364 if (mode != Pmode)
11365 index = convert_to_mode (Pmode, index, 1);
11367 /* Don't let a MEM slip through, because then INDEX that comes
11368 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11369 and break_out_memory_refs will go to work on it and mess it up. */
11370 #ifdef PIC_CASE_VECTOR_ADDRESS
11371 if (flag_pic && !REG_P (index))
11372 index = copy_to_mode_reg (Pmode, index);
11373 #endif
11375 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11376 GET_MODE_SIZE, because this indicates how large insns are. The other
11377 uses should all be Pmode, because they are addresses. This code
11378 could fail if addresses and insns are not the same size. */
11379 index = simplify_gen_binary (MULT, Pmode, index,
11380 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
11381 Pmode));
11382 index = simplify_gen_binary (PLUS, Pmode, index,
11383 gen_rtx_LABEL_REF (Pmode, table_label));
11385 #ifdef PIC_CASE_VECTOR_ADDRESS
11386 if (flag_pic)
11387 index = PIC_CASE_VECTOR_ADDRESS (index);
11388 else
11389 #endif
11390 index = memory_address (CASE_VECTOR_MODE, index);
11391 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11392 vector = gen_const_mem (CASE_VECTOR_MODE, index);
11393 convert_move (temp, vector, 0);
11395 emit_jump_insn (targetm.gen_tablejump (temp, table_label));
11397 /* If we are generating PIC code or if the table is PC-relative, the
11398 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11399 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11400 emit_barrier ();
11404 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11405 rtx table_label, rtx default_label, int default_probability)
11407 rtx index;
11409 if (! targetm.have_tablejump ())
11410 return 0;
11412 index_expr = fold_build2 (MINUS_EXPR, index_type,
11413 fold_convert (index_type, index_expr),
11414 fold_convert (index_type, minval));
11415 index = expand_normal (index_expr);
11416 do_pending_stack_adjust ();
11418 do_tablejump (index, TYPE_MODE (index_type),
11419 convert_modes (TYPE_MODE (index_type),
11420 TYPE_MODE (TREE_TYPE (range)),
11421 expand_normal (range),
11422 TYPE_UNSIGNED (TREE_TYPE (range))),
11423 table_label, default_label, default_probability);
11424 return 1;
11427 /* Return a CONST_VECTOR rtx representing vector mask for
11428 a VECTOR_CST of booleans. */
11429 static rtx
11430 const_vector_mask_from_tree (tree exp)
11432 rtvec v;
11433 unsigned i;
11434 int units;
11435 tree elt;
11436 machine_mode inner, mode;
11438 mode = TYPE_MODE (TREE_TYPE (exp));
11439 units = GET_MODE_NUNITS (mode);
11440 inner = GET_MODE_INNER (mode);
11442 v = rtvec_alloc (units);
11444 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11446 elt = VECTOR_CST_ELT (exp, i);
11448 gcc_assert (TREE_CODE (elt) == INTEGER_CST);
11449 if (integer_zerop (elt))
11450 RTVEC_ELT (v, i) = CONST0_RTX (inner);
11451 else if (integer_onep (elt)
11452 || integer_minus_onep (elt))
11453 RTVEC_ELT (v, i) = CONSTM1_RTX (inner);
11454 else
11455 gcc_unreachable ();
11458 return gen_rtx_CONST_VECTOR (mode, v);
11461 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11462 static rtx
11463 const_vector_from_tree (tree exp)
11465 rtvec v;
11466 unsigned i;
11467 int units;
11468 tree elt;
11469 machine_mode inner, mode;
11471 mode = TYPE_MODE (TREE_TYPE (exp));
11473 if (initializer_zerop (exp))
11474 return CONST0_RTX (mode);
11476 if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp)))
11477 return const_vector_mask_from_tree (exp);
11479 units = GET_MODE_NUNITS (mode);
11480 inner = GET_MODE_INNER (mode);
11482 v = rtvec_alloc (units);
11484 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11486 elt = VECTOR_CST_ELT (exp, i);
11488 if (TREE_CODE (elt) == REAL_CST)
11489 RTVEC_ELT (v, i) = const_double_from_real_value (TREE_REAL_CST (elt),
11490 inner);
11491 else if (TREE_CODE (elt) == FIXED_CST)
11492 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11493 inner);
11494 else
11495 RTVEC_ELT (v, i) = immed_wide_int_const (elt, inner);
11498 return gen_rtx_CONST_VECTOR (mode, v);
11501 /* Build a decl for a personality function given a language prefix. */
11503 tree
11504 build_personality_function (const char *lang)
11506 const char *unwind_and_version;
11507 tree decl, type;
11508 char *name;
11510 switch (targetm_common.except_unwind_info (&global_options))
11512 case UI_NONE:
11513 return NULL;
11514 case UI_SJLJ:
11515 unwind_and_version = "_sj0";
11516 break;
11517 case UI_DWARF2:
11518 case UI_TARGET:
11519 unwind_and_version = "_v0";
11520 break;
11521 case UI_SEH:
11522 unwind_and_version = "_seh0";
11523 break;
11524 default:
11525 gcc_unreachable ();
11528 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11530 type = build_function_type_list (integer_type_node, integer_type_node,
11531 long_long_unsigned_type_node,
11532 ptr_type_node, ptr_type_node, NULL_TREE);
11533 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11534 get_identifier (name), type);
11535 DECL_ARTIFICIAL (decl) = 1;
11536 DECL_EXTERNAL (decl) = 1;
11537 TREE_PUBLIC (decl) = 1;
11539 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11540 are the flags assigned by targetm.encode_section_info. */
11541 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11543 return decl;
11546 /* Extracts the personality function of DECL and returns the corresponding
11547 libfunc. */
11550 get_personality_function (tree decl)
11552 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11553 enum eh_personality_kind pk;
11555 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11556 if (pk == eh_personality_none)
11557 return NULL;
11559 if (!personality
11560 && pk == eh_personality_any)
11561 personality = lang_hooks.eh_personality ();
11563 if (pk == eh_personality_lang)
11564 gcc_assert (personality != NULL_TREE);
11566 return XEXP (DECL_RTL (personality), 0);
11569 /* Returns a tree for the size of EXP in bytes. */
11571 static tree
11572 tree_expr_size (const_tree exp)
11574 if (DECL_P (exp)
11575 && DECL_SIZE_UNIT (exp) != 0)
11576 return DECL_SIZE_UNIT (exp);
11577 else
11578 return size_in_bytes (TREE_TYPE (exp));
11581 /* Return an rtx for the size in bytes of the value of EXP. */
11584 expr_size (tree exp)
11586 tree size;
11588 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11589 size = TREE_OPERAND (exp, 1);
11590 else
11592 size = tree_expr_size (exp);
11593 gcc_assert (size);
11594 gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
11597 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
11600 /* Return a wide integer for the size in bytes of the value of EXP, or -1
11601 if the size can vary or is larger than an integer. */
11603 static HOST_WIDE_INT
11604 int_expr_size (tree exp)
11606 tree size;
11608 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11609 size = TREE_OPERAND (exp, 1);
11610 else
11612 size = tree_expr_size (exp);
11613 gcc_assert (size);
11616 if (size == 0 || !tree_fits_shwi_p (size))
11617 return -1;
11619 return tree_to_shwi (size);
11622 #include "gt-expr.h"