Merge from trunk @222673.
[official-gcc.git] / gcc / expr.c
blob8addb85c5945e961b38191462df08c92112cfe89
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "hash-set.h"
27 #include "vec.h"
28 #include "double-int.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "wide-int.h"
33 #include "inchash.h"
34 #include "tree.h"
35 #include "fold-const.h"
36 #include "stringpool.h"
37 #include "stor-layout.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "flags.h"
41 #include "regs.h"
42 #include "hard-reg-set.h"
43 #include "except.h"
44 #include "function.h"
45 #include "insn-config.h"
46 #include "insn-attr.h"
47 #include "hashtab.h"
48 #include "statistics.h"
49 #include "real.h"
50 #include "fixed-value.h"
51 #include "expmed.h"
52 #include "dojump.h"
53 #include "explow.h"
54 #include "calls.h"
55 #include "emit-rtl.h"
56 #include "stmt.h"
57 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
58 #include "expr.h"
59 #include "insn-codes.h"
60 #include "optabs.h"
61 #include "libfuncs.h"
62 #include "recog.h"
63 #include "reload.h"
64 #include "typeclass.h"
65 #include "toplev.h"
66 #include "langhooks.h"
67 #include "intl.h"
68 #include "tm_p.h"
69 #include "tree-iterator.h"
70 #include "predict.h"
71 #include "dominance.h"
72 #include "cfg.h"
73 #include "basic-block.h"
74 #include "tree-ssa-alias.h"
75 #include "internal-fn.h"
76 #include "gimple-expr.h"
77 #include "is-a.h"
78 #include "gimple.h"
79 #include "gimple-ssa.h"
80 #include "hash-map.h"
81 #include "plugin-api.h"
82 #include "ipa-ref.h"
83 #include "cgraph.h"
84 #include "tree-ssanames.h"
85 #include "target.h"
86 #include "common/common-target.h"
87 #include "timevar.h"
88 #include "df.h"
89 #include "diagnostic.h"
90 #include "tree-ssa-live.h"
91 #include "tree-outof-ssa.h"
92 #include "target-globals.h"
93 #include "params.h"
94 #include "tree-ssa-address.h"
95 #include "cfgexpand.h"
96 #include "builtins.h"
97 #include "tree-chkp.h"
98 #include "rtl-chkp.h"
99 #include "ccmp.h"
101 #ifndef STACK_PUSH_CODE
102 #ifdef STACK_GROWS_DOWNWARD
103 #define STACK_PUSH_CODE PRE_DEC
104 #else
105 #define STACK_PUSH_CODE PRE_INC
106 #endif
107 #endif
110 /* If this is nonzero, we do not bother generating VOLATILE
111 around volatile memory references, and we are willing to
112 output indirect addresses. If cse is to follow, we reject
113 indirect addresses so a useful potential cse is generated;
114 if it is used only once, instruction combination will produce
115 the same indirect address eventually. */
116 int cse_not_expected;
118 /* This structure is used by move_by_pieces to describe the move to
119 be performed. */
120 struct move_by_pieces_d
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
126 rtx from;
127 rtx from_addr;
128 int autinc_from;
129 int explicit_inc_from;
130 unsigned HOST_WIDE_INT len;
131 HOST_WIDE_INT offset;
132 int reverse;
135 /* This structure is used by store_by_pieces to describe the clear to
136 be performed. */
138 struct store_by_pieces_d
140 rtx to;
141 rtx to_addr;
142 int autinc_to;
143 int explicit_inc_to;
144 unsigned HOST_WIDE_INT len;
145 HOST_WIDE_INT offset;
146 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode);
147 void *constfundata;
148 int reverse;
151 static void move_by_pieces_1 (insn_gen_fn, machine_mode,
152 struct move_by_pieces_d *);
153 static bool block_move_libcall_safe_for_call_parm (void);
154 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
155 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
156 unsigned HOST_WIDE_INT);
157 static tree emit_block_move_libcall_fn (int);
158 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
159 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, machine_mode);
160 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
161 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
162 static void store_by_pieces_2 (insn_gen_fn, machine_mode,
163 struct store_by_pieces_d *);
164 static tree clear_storage_libcall_fn (int);
165 static rtx_insn *compress_float_constant (rtx, rtx);
166 static rtx get_subtarget (rtx);
167 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
168 HOST_WIDE_INT, machine_mode,
169 tree, int, alias_set_type, bool);
170 static void store_constructor (tree, rtx, int, HOST_WIDE_INT, bool);
171 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
172 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
173 machine_mode, tree, alias_set_type, bool, bool);
175 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
177 static int is_aligning_offset (const_tree, const_tree);
178 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
179 static rtx do_store_flag (sepops, rtx, machine_mode);
180 #ifdef PUSH_ROUNDING
181 static void emit_single_push_insn (machine_mode, rtx, tree);
182 #endif
183 static void do_tablejump (rtx, machine_mode, rtx, rtx, rtx, int);
184 static rtx const_vector_from_tree (tree);
185 static tree tree_expr_size (const_tree);
186 static HOST_WIDE_INT int_expr_size (tree);
189 /* This is run to set up which modes can be used
190 directly in memory and to initialize the block move optab. It is run
191 at the beginning of compilation and when the target is reinitialized. */
193 void
194 init_expr_target (void)
196 rtx insn, pat;
197 machine_mode mode;
198 int num_clobbers;
199 rtx mem, mem1;
200 rtx reg;
202 /* Try indexing by frame ptr and try by stack ptr.
203 It is known that on the Convex the stack ptr isn't a valid index.
204 With luck, one or the other is valid on any machine. */
205 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
206 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
208 /* A scratch register we can modify in-place below to avoid
209 useless RTL allocations. */
210 reg = gen_rtx_REG (VOIDmode, -1);
212 insn = rtx_alloc (INSN);
213 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
214 PATTERN (insn) = pat;
216 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
217 mode = (machine_mode) ((int) mode + 1))
219 int regno;
221 direct_load[(int) mode] = direct_store[(int) mode] = 0;
222 PUT_MODE (mem, mode);
223 PUT_MODE (mem1, mode);
224 PUT_MODE (reg, mode);
226 /* See if there is some register that can be used in this mode and
227 directly loaded or stored from memory. */
229 if (mode != VOIDmode && mode != BLKmode)
230 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
231 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
232 regno++)
234 if (! HARD_REGNO_MODE_OK (regno, mode))
235 continue;
237 SET_REGNO (reg, regno);
239 SET_SRC (pat) = mem;
240 SET_DEST (pat) = reg;
241 if (recog (pat, insn, &num_clobbers) >= 0)
242 direct_load[(int) mode] = 1;
244 SET_SRC (pat) = mem1;
245 SET_DEST (pat) = reg;
246 if (recog (pat, insn, &num_clobbers) >= 0)
247 direct_load[(int) mode] = 1;
249 SET_SRC (pat) = reg;
250 SET_DEST (pat) = mem;
251 if (recog (pat, insn, &num_clobbers) >= 0)
252 direct_store[(int) mode] = 1;
254 SET_SRC (pat) = reg;
255 SET_DEST (pat) = mem1;
256 if (recog (pat, insn, &num_clobbers) >= 0)
257 direct_store[(int) mode] = 1;
261 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
263 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
264 mode = GET_MODE_WIDER_MODE (mode))
266 machine_mode srcmode;
267 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
268 srcmode = GET_MODE_WIDER_MODE (srcmode))
270 enum insn_code ic;
272 ic = can_extend_p (mode, srcmode, 0);
273 if (ic == CODE_FOR_nothing)
274 continue;
276 PUT_MODE (mem, srcmode);
278 if (insn_operand_matches (ic, 1, mem))
279 float_extend_from_mem[mode][srcmode] = true;
284 /* This is run at the start of compiling a function. */
286 void
287 init_expr (void)
289 memset (&crtl->expr, 0, sizeof (crtl->expr));
292 /* Copy data from FROM to TO, where the machine modes are not the same.
293 Both modes may be integer, or both may be floating, or both may be
294 fixed-point.
295 UNSIGNEDP should be nonzero if FROM is an unsigned type.
296 This causes zero-extension instead of sign-extension. */
298 void
299 convert_move (rtx to, rtx from, int unsignedp)
301 machine_mode to_mode = GET_MODE (to);
302 machine_mode from_mode = GET_MODE (from);
303 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
304 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
305 enum insn_code code;
306 rtx libcall;
308 /* rtx code for making an equivalent value. */
309 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
310 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
313 gcc_assert (to_real == from_real);
314 gcc_assert (to_mode != BLKmode);
315 gcc_assert (from_mode != BLKmode);
317 /* If the source and destination are already the same, then there's
318 nothing to do. */
319 if (to == from)
320 return;
322 /* If FROM is a SUBREG that indicates that we have already done at least
323 the required extension, strip it. We don't handle such SUBREGs as
324 TO here. */
326 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
327 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
328 >= GET_MODE_PRECISION (to_mode))
329 && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp))
330 from = gen_lowpart (to_mode, from), from_mode = to_mode;
332 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
334 if (to_mode == from_mode
335 || (from_mode == VOIDmode && CONSTANT_P (from)))
337 emit_move_insn (to, from);
338 return;
341 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
343 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
345 if (VECTOR_MODE_P (to_mode))
346 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
347 else
348 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
350 emit_move_insn (to, from);
351 return;
354 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
356 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
357 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
358 return;
361 if (to_real)
363 rtx value;
364 rtx_insn *insns;
365 convert_optab tab;
367 gcc_assert ((GET_MODE_PRECISION (from_mode)
368 != GET_MODE_PRECISION (to_mode))
369 || (DECIMAL_FLOAT_MODE_P (from_mode)
370 != DECIMAL_FLOAT_MODE_P (to_mode)));
372 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
373 /* Conversion between decimal float and binary float, same size. */
374 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
375 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
376 tab = sext_optab;
377 else
378 tab = trunc_optab;
380 /* Try converting directly if the insn is supported. */
382 code = convert_optab_handler (tab, to_mode, from_mode);
383 if (code != CODE_FOR_nothing)
385 emit_unop_insn (code, to, from,
386 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
387 return;
390 /* Otherwise use a libcall. */
391 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
393 /* Is this conversion implemented yet? */
394 gcc_assert (libcall);
396 start_sequence ();
397 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
398 1, from, from_mode);
399 insns = get_insns ();
400 end_sequence ();
401 emit_libcall_block (insns, to, value,
402 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
403 from)
404 : gen_rtx_FLOAT_EXTEND (to_mode, from));
405 return;
408 /* Handle pointer conversion. */ /* SPEE 900220. */
409 /* If the target has a converter from FROM_MODE to TO_MODE, use it. */
411 convert_optab ctab;
413 if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
414 ctab = trunc_optab;
415 else if (unsignedp)
416 ctab = zext_optab;
417 else
418 ctab = sext_optab;
420 if (convert_optab_handler (ctab, to_mode, from_mode)
421 != CODE_FOR_nothing)
423 emit_unop_insn (convert_optab_handler (ctab, to_mode, from_mode),
424 to, from, UNKNOWN);
425 return;
429 /* Targets are expected to provide conversion insns between PxImode and
430 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
431 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
433 machine_mode full_mode
434 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
436 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
437 != CODE_FOR_nothing);
439 if (full_mode != from_mode)
440 from = convert_to_mode (full_mode, from, unsignedp);
441 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
442 to, from, UNKNOWN);
443 return;
445 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
447 rtx new_from;
448 machine_mode full_mode
449 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
450 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
451 enum insn_code icode;
453 icode = convert_optab_handler (ctab, full_mode, from_mode);
454 gcc_assert (icode != CODE_FOR_nothing);
456 if (to_mode == full_mode)
458 emit_unop_insn (icode, to, from, UNKNOWN);
459 return;
462 new_from = gen_reg_rtx (full_mode);
463 emit_unop_insn (icode, new_from, from, UNKNOWN);
465 /* else proceed to integer conversions below. */
466 from_mode = full_mode;
467 from = new_from;
470 /* Make sure both are fixed-point modes or both are not. */
471 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
472 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
473 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
475 /* If we widen from_mode to to_mode and they are in the same class,
476 we won't saturate the result.
477 Otherwise, always saturate the result to play safe. */
478 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
479 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
480 expand_fixed_convert (to, from, 0, 0);
481 else
482 expand_fixed_convert (to, from, 0, 1);
483 return;
486 /* Now both modes are integers. */
488 /* Handle expanding beyond a word. */
489 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
490 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
492 rtx_insn *insns;
493 rtx lowpart;
494 rtx fill_value;
495 rtx lowfrom;
496 int i;
497 machine_mode lowpart_mode;
498 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
500 /* Try converting directly if the insn is supported. */
501 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
502 != CODE_FOR_nothing)
504 /* If FROM is a SUBREG, put it into a register. Do this
505 so that we always generate the same set of insns for
506 better cse'ing; if an intermediate assignment occurred,
507 we won't be doing the operation directly on the SUBREG. */
508 if (optimize > 0 && GET_CODE (from) == SUBREG)
509 from = force_reg (from_mode, from);
510 emit_unop_insn (code, to, from, equiv_code);
511 return;
513 /* Next, try converting via full word. */
514 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
515 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
516 != CODE_FOR_nothing))
518 rtx word_to = gen_reg_rtx (word_mode);
519 if (REG_P (to))
521 if (reg_overlap_mentioned_p (to, from))
522 from = force_reg (from_mode, from);
523 emit_clobber (to);
525 convert_move (word_to, from, unsignedp);
526 emit_unop_insn (code, to, word_to, equiv_code);
527 return;
530 /* No special multiword conversion insn; do it by hand. */
531 start_sequence ();
533 /* Since we will turn this into a no conflict block, we must ensure the
534 the source does not overlap the target so force it into an isolated
535 register when maybe so. Likewise for any MEM input, since the
536 conversion sequence might require several references to it and we
537 must ensure we're getting the same value every time. */
539 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
540 from = force_reg (from_mode, from);
542 /* Get a copy of FROM widened to a word, if necessary. */
543 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
544 lowpart_mode = word_mode;
545 else
546 lowpart_mode = from_mode;
548 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
550 lowpart = gen_lowpart (lowpart_mode, to);
551 emit_move_insn (lowpart, lowfrom);
553 /* Compute the value to put in each remaining word. */
554 if (unsignedp)
555 fill_value = const0_rtx;
556 else
557 fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
558 LT, lowfrom, const0_rtx,
559 lowpart_mode, 0, -1);
561 /* Fill the remaining words. */
562 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
564 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
565 rtx subword = operand_subword (to, index, 1, to_mode);
567 gcc_assert (subword);
569 if (fill_value != subword)
570 emit_move_insn (subword, fill_value);
573 insns = get_insns ();
574 end_sequence ();
576 emit_insn (insns);
577 return;
580 /* Truncating multi-word to a word or less. */
581 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
582 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
584 if (!((MEM_P (from)
585 && ! MEM_VOLATILE_P (from)
586 && direct_load[(int) to_mode]
587 && ! mode_dependent_address_p (XEXP (from, 0),
588 MEM_ADDR_SPACE (from)))
589 || REG_P (from)
590 || GET_CODE (from) == SUBREG))
591 from = force_reg (from_mode, from);
592 convert_move (to, gen_lowpart (word_mode, from), 0);
593 return;
596 /* Now follow all the conversions between integers
597 no more than a word long. */
599 /* For truncation, usually we can just refer to FROM in a narrower mode. */
600 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
601 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
603 if (!((MEM_P (from)
604 && ! MEM_VOLATILE_P (from)
605 && direct_load[(int) to_mode]
606 && ! mode_dependent_address_p (XEXP (from, 0),
607 MEM_ADDR_SPACE (from)))
608 || REG_P (from)
609 || GET_CODE (from) == SUBREG))
610 from = force_reg (from_mode, from);
611 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
612 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
613 from = copy_to_reg (from);
614 emit_move_insn (to, gen_lowpart (to_mode, from));
615 return;
618 /* Handle extension. */
619 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
621 /* Convert directly if that works. */
622 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
623 != CODE_FOR_nothing)
625 emit_unop_insn (code, to, from, equiv_code);
626 return;
628 else
630 machine_mode intermediate;
631 rtx tmp;
632 int shift_amount;
634 /* Search for a mode to convert via. */
635 for (intermediate = from_mode; intermediate != VOIDmode;
636 intermediate = GET_MODE_WIDER_MODE (intermediate))
637 if (((can_extend_p (to_mode, intermediate, unsignedp)
638 != CODE_FOR_nothing)
639 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
640 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
641 && (can_extend_p (intermediate, from_mode, unsignedp)
642 != CODE_FOR_nothing))
644 convert_move (to, convert_to_mode (intermediate, from,
645 unsignedp), unsignedp);
646 return;
649 /* No suitable intermediate mode.
650 Generate what we need with shifts. */
651 shift_amount = (GET_MODE_PRECISION (to_mode)
652 - GET_MODE_PRECISION (from_mode));
653 from = gen_lowpart (to_mode, force_reg (from_mode, from));
654 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
655 to, unsignedp);
656 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
657 to, unsignedp);
658 if (tmp != to)
659 emit_move_insn (to, tmp);
660 return;
664 /* Support special truncate insns for certain modes. */
665 if (convert_optab_handler (trunc_optab, to_mode,
666 from_mode) != CODE_FOR_nothing)
668 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
669 to, from, UNKNOWN);
670 return;
673 /* Handle truncation of volatile memrefs, and so on;
674 the things that couldn't be truncated directly,
675 and for which there was no special instruction.
677 ??? Code above formerly short-circuited this, for most integer
678 mode pairs, with a force_reg in from_mode followed by a recursive
679 call to this routine. Appears always to have been wrong. */
680 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
682 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
683 emit_move_insn (to, temp);
684 return;
687 /* Mode combination is not recognized. */
688 gcc_unreachable ();
691 /* Return an rtx for a value that would result
692 from converting X to mode MODE.
693 Both X and MODE may be floating, or both integer.
694 UNSIGNEDP is nonzero if X is an unsigned value.
695 This can be done by referring to a part of X in place
696 or by copying to a new temporary with conversion. */
699 convert_to_mode (machine_mode mode, rtx x, int unsignedp)
701 return convert_modes (mode, VOIDmode, x, unsignedp);
704 /* Return an rtx for a value that would result
705 from converting X from mode OLDMODE to mode MODE.
706 Both modes may be floating, or both integer.
707 UNSIGNEDP is nonzero if X is an unsigned value.
709 This can be done by referring to a part of X in place
710 or by copying to a new temporary with conversion.
712 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
715 convert_modes (machine_mode mode, machine_mode oldmode, rtx x, int unsignedp)
717 rtx temp;
719 /* If FROM is a SUBREG that indicates that we have already done at least
720 the required extension, strip it. */
722 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
723 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
724 && SUBREG_CHECK_PROMOTED_SIGN (x, unsignedp))
725 x = gen_lowpart (mode, SUBREG_REG (x));
727 if (GET_MODE (x) != VOIDmode)
728 oldmode = GET_MODE (x);
730 if (mode == oldmode)
731 return x;
733 if (CONST_SCALAR_INT_P (x) && GET_MODE_CLASS (mode) == MODE_INT)
735 /* If the caller did not tell us the old mode, then there is not
736 much to do with respect to canonicalization. We have to
737 assume that all the bits are significant. */
738 if (GET_MODE_CLASS (oldmode) != MODE_INT)
739 oldmode = MAX_MODE_INT;
740 wide_int w = wide_int::from (std::make_pair (x, oldmode),
741 GET_MODE_PRECISION (mode),
742 unsignedp ? UNSIGNED : SIGNED);
743 return immed_wide_int_const (w, mode);
746 /* We can do this with a gen_lowpart if both desired and current modes
747 are integer, and this is either a constant integer, a register, or a
748 non-volatile MEM. */
749 if (GET_MODE_CLASS (mode) == MODE_INT
750 && GET_MODE_CLASS (oldmode) == MODE_INT
751 && GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
752 && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) mode])
753 || (REG_P (x)
754 && (!HARD_REGISTER_P (x)
755 || HARD_REGNO_MODE_OK (REGNO (x), mode))
756 && TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (x)))))
758 return gen_lowpart (mode, x);
760 /* Converting from integer constant into mode is always equivalent to an
761 subreg operation. */
762 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
764 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
765 return simplify_gen_subreg (mode, x, oldmode, 0);
768 temp = gen_reg_rtx (mode);
769 convert_move (temp, x, unsignedp);
770 return temp;
773 /* Return the largest alignment we can use for doing a move (or store)
774 of MAX_PIECES. ALIGN is the largest alignment we could use. */
776 static unsigned int
777 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
779 machine_mode tmode;
781 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
782 if (align >= GET_MODE_ALIGNMENT (tmode))
783 align = GET_MODE_ALIGNMENT (tmode);
784 else
786 machine_mode tmode, xmode;
788 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
789 tmode != VOIDmode;
790 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
791 if (GET_MODE_SIZE (tmode) > max_pieces
792 || SLOW_UNALIGNED_ACCESS (tmode, align))
793 break;
795 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
798 return align;
801 /* Return the widest integer mode no wider than SIZE. If no such mode
802 can be found, return VOIDmode. */
804 static machine_mode
805 widest_int_mode_for_size (unsigned int size)
807 machine_mode tmode, mode = VOIDmode;
809 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
810 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
811 if (GET_MODE_SIZE (tmode) < size)
812 mode = tmode;
814 return mode;
817 /* Determine whether the LEN bytes can be moved by using several move
818 instructions. Return nonzero if a call to move_by_pieces should
819 succeed. */
822 can_move_by_pieces (unsigned HOST_WIDE_INT len,
823 unsigned int align)
825 return targetm.use_by_pieces_infrastructure_p (len, align, MOVE_BY_PIECES,
826 optimize_insn_for_speed_p ());
829 /* Generate several move instructions to copy LEN bytes from block FROM to
830 block TO. (These are MEM rtx's with BLKmode).
832 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
833 used to push FROM to the stack.
835 ALIGN is maximum stack alignment we can assume.
837 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
838 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
839 stpcpy. */
842 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
843 unsigned int align, int endp)
845 struct move_by_pieces_d data;
846 machine_mode to_addr_mode;
847 machine_mode from_addr_mode = get_address_mode (from);
848 rtx to_addr, from_addr = XEXP (from, 0);
849 unsigned int max_size = MOVE_MAX_PIECES + 1;
850 enum insn_code icode;
852 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
854 data.offset = 0;
855 data.from_addr = from_addr;
856 if (to)
858 to_addr_mode = get_address_mode (to);
859 to_addr = XEXP (to, 0);
860 data.to = to;
861 data.autinc_to
862 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
863 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
864 data.reverse
865 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
867 else
869 to_addr_mode = VOIDmode;
870 to_addr = NULL_RTX;
871 data.to = NULL_RTX;
872 data.autinc_to = 1;
873 #ifdef STACK_GROWS_DOWNWARD
874 data.reverse = 1;
875 #else
876 data.reverse = 0;
877 #endif
879 data.to_addr = to_addr;
880 data.from = from;
881 data.autinc_from
882 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
883 || GET_CODE (from_addr) == POST_INC
884 || GET_CODE (from_addr) == POST_DEC);
886 data.explicit_inc_from = 0;
887 data.explicit_inc_to = 0;
888 if (data.reverse) data.offset = len;
889 data.len = len;
891 /* If copying requires more than two move insns,
892 copy addresses to registers (to make displacements shorter)
893 and use post-increment if available. */
894 if (!(data.autinc_from && data.autinc_to)
895 && move_by_pieces_ninsns (len, align, max_size) > 2)
897 /* Find the mode of the largest move...
898 MODE might not be used depending on the definitions of the
899 USE_* macros below. */
900 machine_mode mode ATTRIBUTE_UNUSED
901 = widest_int_mode_for_size (max_size);
903 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
905 data.from_addr = copy_to_mode_reg (from_addr_mode,
906 plus_constant (from_addr_mode,
907 from_addr, len));
908 data.autinc_from = 1;
909 data.explicit_inc_from = -1;
911 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
913 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
914 data.autinc_from = 1;
915 data.explicit_inc_from = 1;
917 if (!data.autinc_from && CONSTANT_P (from_addr))
918 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
919 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
921 data.to_addr = copy_to_mode_reg (to_addr_mode,
922 plus_constant (to_addr_mode,
923 to_addr, len));
924 data.autinc_to = 1;
925 data.explicit_inc_to = -1;
927 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
929 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
930 data.autinc_to = 1;
931 data.explicit_inc_to = 1;
933 if (!data.autinc_to && CONSTANT_P (to_addr))
934 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
937 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
939 /* First move what we can in the largest integer mode, then go to
940 successively smaller modes. */
942 while (max_size > 1 && data.len > 0)
944 machine_mode mode = widest_int_mode_for_size (max_size);
946 if (mode == VOIDmode)
947 break;
949 icode = optab_handler (mov_optab, mode);
950 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
951 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
953 max_size = GET_MODE_SIZE (mode);
956 /* The code above should have handled everything. */
957 gcc_assert (!data.len);
959 if (endp)
961 rtx to1;
963 gcc_assert (!data.reverse);
964 if (data.autinc_to)
966 if (endp == 2)
968 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
969 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
970 else
971 data.to_addr = copy_to_mode_reg (to_addr_mode,
972 plus_constant (to_addr_mode,
973 data.to_addr,
974 -1));
976 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
977 data.offset);
979 else
981 if (endp == 2)
982 --data.offset;
983 to1 = adjust_address (data.to, QImode, data.offset);
985 return to1;
987 else
988 return data.to;
991 /* Return number of insns required to move L bytes by pieces.
992 ALIGN (in bits) is maximum alignment we can assume. */
994 unsigned HOST_WIDE_INT
995 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
996 unsigned int max_size)
998 unsigned HOST_WIDE_INT n_insns = 0;
1000 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1002 while (max_size > 1 && l > 0)
1004 machine_mode mode;
1005 enum insn_code icode;
1007 mode = widest_int_mode_for_size (max_size);
1009 if (mode == VOIDmode)
1010 break;
1012 icode = optab_handler (mov_optab, mode);
1013 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1014 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1016 max_size = GET_MODE_SIZE (mode);
1019 gcc_assert (!l);
1020 return n_insns;
1023 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1024 with move instructions for mode MODE. GENFUN is the gen_... function
1025 to make a move insn for that mode. DATA has all the other info. */
1027 static void
1028 move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
1029 struct move_by_pieces_d *data)
1031 unsigned int size = GET_MODE_SIZE (mode);
1032 rtx to1 = NULL_RTX, from1;
1034 while (data->len >= size)
1036 if (data->reverse)
1037 data->offset -= size;
1039 if (data->to)
1041 if (data->autinc_to)
1042 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1043 data->offset);
1044 else
1045 to1 = adjust_address (data->to, mode, data->offset);
1048 if (data->autinc_from)
1049 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1050 data->offset);
1051 else
1052 from1 = adjust_address (data->from, mode, data->offset);
1054 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1055 emit_insn (gen_add2_insn (data->to_addr,
1056 gen_int_mode (-(HOST_WIDE_INT) size,
1057 GET_MODE (data->to_addr))));
1058 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1059 emit_insn (gen_add2_insn (data->from_addr,
1060 gen_int_mode (-(HOST_WIDE_INT) size,
1061 GET_MODE (data->from_addr))));
1063 if (data->to)
1064 emit_insn ((*genfun) (to1, from1));
1065 else
1067 #ifdef PUSH_ROUNDING
1068 emit_single_push_insn (mode, from1, NULL);
1069 #else
1070 gcc_unreachable ();
1071 #endif
1074 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1075 emit_insn (gen_add2_insn (data->to_addr,
1076 gen_int_mode (size,
1077 GET_MODE (data->to_addr))));
1078 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1079 emit_insn (gen_add2_insn (data->from_addr,
1080 gen_int_mode (size,
1081 GET_MODE (data->from_addr))));
1083 if (! data->reverse)
1084 data->offset += size;
1086 data->len -= size;
1090 /* Emit code to move a block Y to a block X. This may be done with
1091 string-move instructions, with multiple scalar move instructions,
1092 or with a library call.
1094 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1095 SIZE is an rtx that says how long they are.
1096 ALIGN is the maximum alignment we can assume they have.
1097 METHOD describes what kind of copy this is, and what mechanisms may be used.
1098 MIN_SIZE is the minimal size of block to move
1099 MAX_SIZE is the maximal size of block to move, if it can not be represented
1100 in unsigned HOST_WIDE_INT, than it is mask of all ones.
1102 Return the address of the new block, if memcpy is called and returns it,
1103 0 otherwise. */
1106 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1107 unsigned int expected_align, HOST_WIDE_INT expected_size,
1108 unsigned HOST_WIDE_INT min_size,
1109 unsigned HOST_WIDE_INT max_size,
1110 unsigned HOST_WIDE_INT probable_max_size)
1112 bool may_use_call;
1113 rtx retval = 0;
1114 unsigned int align;
1116 gcc_assert (size);
1117 if (CONST_INT_P (size)
1118 && INTVAL (size) == 0)
1119 return 0;
1121 switch (method)
1123 case BLOCK_OP_NORMAL:
1124 case BLOCK_OP_TAILCALL:
1125 may_use_call = true;
1126 break;
1128 case BLOCK_OP_CALL_PARM:
1129 may_use_call = block_move_libcall_safe_for_call_parm ();
1131 /* Make inhibit_defer_pop nonzero around the library call
1132 to force it to pop the arguments right away. */
1133 NO_DEFER_POP;
1134 break;
1136 case BLOCK_OP_NO_LIBCALL:
1137 may_use_call = false;
1138 break;
1140 default:
1141 gcc_unreachable ();
1144 gcc_assert (MEM_P (x) && MEM_P (y));
1145 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1146 gcc_assert (align >= BITS_PER_UNIT);
1148 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1149 block copy is more efficient for other large modes, e.g. DCmode. */
1150 x = adjust_address (x, BLKmode, 0);
1151 y = adjust_address (y, BLKmode, 0);
1153 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1154 can be incorrect is coming from __builtin_memcpy. */
1155 if (CONST_INT_P (size))
1157 x = shallow_copy_rtx (x);
1158 y = shallow_copy_rtx (y);
1159 set_mem_size (x, INTVAL (size));
1160 set_mem_size (y, INTVAL (size));
1163 if (CONST_INT_P (size) && can_move_by_pieces (INTVAL (size), align))
1164 move_by_pieces (x, y, INTVAL (size), align, 0);
1165 else if (emit_block_move_via_movmem (x, y, size, align,
1166 expected_align, expected_size,
1167 min_size, max_size, probable_max_size))
1169 else if (may_use_call
1170 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1171 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1173 /* Since x and y are passed to a libcall, mark the corresponding
1174 tree EXPR as addressable. */
1175 tree y_expr = MEM_EXPR (y);
1176 tree x_expr = MEM_EXPR (x);
1177 if (y_expr)
1178 mark_addressable (y_expr);
1179 if (x_expr)
1180 mark_addressable (x_expr);
1181 retval = emit_block_move_via_libcall (x, y, size,
1182 method == BLOCK_OP_TAILCALL);
1185 else
1186 emit_block_move_via_loop (x, y, size, align);
1188 if (method == BLOCK_OP_CALL_PARM)
1189 OK_DEFER_POP;
1191 return retval;
1195 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1197 unsigned HOST_WIDE_INT max, min = 0;
1198 if (GET_CODE (size) == CONST_INT)
1199 min = max = UINTVAL (size);
1200 else
1201 max = GET_MODE_MASK (GET_MODE (size));
1202 return emit_block_move_hints (x, y, size, method, 0, -1,
1203 min, max, max);
1206 /* A subroutine of emit_block_move. Returns true if calling the
1207 block move libcall will not clobber any parameters which may have
1208 already been placed on the stack. */
1210 static bool
1211 block_move_libcall_safe_for_call_parm (void)
1213 #if defined (REG_PARM_STACK_SPACE)
1214 tree fn;
1215 #endif
1217 /* If arguments are pushed on the stack, then they're safe. */
1218 if (PUSH_ARGS)
1219 return true;
1221 /* If registers go on the stack anyway, any argument is sure to clobber
1222 an outgoing argument. */
1223 #if defined (REG_PARM_STACK_SPACE)
1224 fn = emit_block_move_libcall_fn (false);
1225 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1226 depend on its argument. */
1227 (void) fn;
1228 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1229 && REG_PARM_STACK_SPACE (fn) != 0)
1230 return false;
1231 #endif
1233 /* If any argument goes in memory, then it might clobber an outgoing
1234 argument. */
1236 CUMULATIVE_ARGS args_so_far_v;
1237 cumulative_args_t args_so_far;
1238 tree fn, arg;
1240 fn = emit_block_move_libcall_fn (false);
1241 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1242 args_so_far = pack_cumulative_args (&args_so_far_v);
1244 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1245 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1247 machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1248 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1249 NULL_TREE, true);
1250 if (!tmp || !REG_P (tmp))
1251 return false;
1252 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1253 return false;
1254 targetm.calls.function_arg_advance (args_so_far, mode,
1255 NULL_TREE, true);
1258 return true;
1261 /* A subroutine of emit_block_move. Expand a movmem pattern;
1262 return true if successful. */
1264 static bool
1265 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1266 unsigned int expected_align, HOST_WIDE_INT expected_size,
1267 unsigned HOST_WIDE_INT min_size,
1268 unsigned HOST_WIDE_INT max_size,
1269 unsigned HOST_WIDE_INT probable_max_size)
1271 int save_volatile_ok = volatile_ok;
1272 machine_mode mode;
1274 if (expected_align < align)
1275 expected_align = align;
1276 if (expected_size != -1)
1278 if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1279 expected_size = probable_max_size;
1280 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1281 expected_size = min_size;
1284 /* Since this is a move insn, we don't care about volatility. */
1285 volatile_ok = 1;
1287 /* Try the most limited insn first, because there's no point
1288 including more than one in the machine description unless
1289 the more limited one has some advantage. */
1291 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1292 mode = GET_MODE_WIDER_MODE (mode))
1294 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1296 if (code != CODE_FOR_nothing
1297 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1298 here because if SIZE is less than the mode mask, as it is
1299 returned by the macro, it will definitely be less than the
1300 actual mode mask. Since SIZE is within the Pmode address
1301 space, we limit MODE to Pmode. */
1302 && ((CONST_INT_P (size)
1303 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1304 <= (GET_MODE_MASK (mode) >> 1)))
1305 || max_size <= (GET_MODE_MASK (mode) >> 1)
1306 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1308 struct expand_operand ops[9];
1309 unsigned int nops;
1311 /* ??? When called via emit_block_move_for_call, it'd be
1312 nice if there were some way to inform the backend, so
1313 that it doesn't fail the expansion because it thinks
1314 emitting the libcall would be more efficient. */
1315 nops = insn_data[(int) code].n_generator_args;
1316 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1318 create_fixed_operand (&ops[0], x);
1319 create_fixed_operand (&ops[1], y);
1320 /* The check above guarantees that this size conversion is valid. */
1321 create_convert_operand_to (&ops[2], size, mode, true);
1322 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1323 if (nops >= 6)
1325 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1326 create_integer_operand (&ops[5], expected_size);
1328 if (nops >= 8)
1330 create_integer_operand (&ops[6], min_size);
1331 /* If we can not represent the maximal size,
1332 make parameter NULL. */
1333 if ((HOST_WIDE_INT) max_size != -1)
1334 create_integer_operand (&ops[7], max_size);
1335 else
1336 create_fixed_operand (&ops[7], NULL);
1338 if (nops == 9)
1340 /* If we can not represent the maximal size,
1341 make parameter NULL. */
1342 if ((HOST_WIDE_INT) probable_max_size != -1)
1343 create_integer_operand (&ops[8], probable_max_size);
1344 else
1345 create_fixed_operand (&ops[8], NULL);
1347 if (maybe_expand_insn (code, nops, ops))
1349 volatile_ok = save_volatile_ok;
1350 return true;
1355 volatile_ok = save_volatile_ok;
1356 return false;
1359 /* A subroutine of emit_block_move. Expand a call to memcpy.
1360 Return the return value from memcpy, 0 otherwise. */
1363 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1365 rtx dst_addr, src_addr;
1366 tree call_expr, fn, src_tree, dst_tree, size_tree;
1367 machine_mode size_mode;
1368 rtx retval;
1370 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1371 pseudos. We can then place those new pseudos into a VAR_DECL and
1372 use them later. */
1374 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1375 src_addr = copy_addr_to_reg (XEXP (src, 0));
1377 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1378 src_addr = convert_memory_address (ptr_mode, src_addr);
1380 dst_tree = make_tree (ptr_type_node, dst_addr);
1381 src_tree = make_tree (ptr_type_node, src_addr);
1383 size_mode = TYPE_MODE (sizetype);
1385 size = convert_to_mode (size_mode, size, 1);
1386 size = copy_to_mode_reg (size_mode, size);
1388 /* It is incorrect to use the libcall calling conventions to call
1389 memcpy in this context. This could be a user call to memcpy and
1390 the user may wish to examine the return value from memcpy. For
1391 targets where libcalls and normal calls have different conventions
1392 for returning pointers, we could end up generating incorrect code. */
1394 size_tree = make_tree (sizetype, size);
1396 fn = emit_block_move_libcall_fn (true);
1397 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1398 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1400 retval = expand_normal (call_expr);
1402 return retval;
1405 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1406 for the function we use for block copies. */
1408 static GTY(()) tree block_move_fn;
1410 void
1411 init_block_move_fn (const char *asmspec)
1413 if (!block_move_fn)
1415 tree args, fn, attrs, attr_args;
1417 fn = get_identifier ("memcpy");
1418 args = build_function_type_list (ptr_type_node, ptr_type_node,
1419 const_ptr_type_node, sizetype,
1420 NULL_TREE);
1422 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1423 DECL_EXTERNAL (fn) = 1;
1424 TREE_PUBLIC (fn) = 1;
1425 DECL_ARTIFICIAL (fn) = 1;
1426 TREE_NOTHROW (fn) = 1;
1427 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1428 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1430 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1431 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1433 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1435 block_move_fn = fn;
1438 if (asmspec)
1439 set_user_assembler_name (block_move_fn, asmspec);
1442 static tree
1443 emit_block_move_libcall_fn (int for_call)
1445 static bool emitted_extern;
1447 if (!block_move_fn)
1448 init_block_move_fn (NULL);
1450 if (for_call && !emitted_extern)
1452 emitted_extern = true;
1453 make_decl_rtl (block_move_fn);
1456 return block_move_fn;
1459 /* A subroutine of emit_block_move. Copy the data via an explicit
1460 loop. This is used only when libcalls are forbidden. */
1461 /* ??? It'd be nice to copy in hunks larger than QImode. */
1463 static void
1464 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1465 unsigned int align ATTRIBUTE_UNUSED)
1467 rtx_code_label *cmp_label, *top_label;
1468 rtx iter, x_addr, y_addr, tmp;
1469 machine_mode x_addr_mode = get_address_mode (x);
1470 machine_mode y_addr_mode = get_address_mode (y);
1471 machine_mode iter_mode;
1473 iter_mode = GET_MODE (size);
1474 if (iter_mode == VOIDmode)
1475 iter_mode = word_mode;
1477 top_label = gen_label_rtx ();
1478 cmp_label = gen_label_rtx ();
1479 iter = gen_reg_rtx (iter_mode);
1481 emit_move_insn (iter, const0_rtx);
1483 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1484 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1485 do_pending_stack_adjust ();
1487 emit_jump (cmp_label);
1488 emit_label (top_label);
1490 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1491 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1493 if (x_addr_mode != y_addr_mode)
1494 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1495 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1497 x = change_address (x, QImode, x_addr);
1498 y = change_address (y, QImode, y_addr);
1500 emit_move_insn (x, y);
1502 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1503 true, OPTAB_LIB_WIDEN);
1504 if (tmp != iter)
1505 emit_move_insn (iter, tmp);
1507 emit_label (cmp_label);
1509 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1510 true, top_label, REG_BR_PROB_BASE * 90 / 100);
1513 /* Copy all or part of a value X into registers starting at REGNO.
1514 The number of registers to be filled is NREGS. */
1516 void
1517 move_block_to_reg (int regno, rtx x, int nregs, machine_mode mode)
1519 int i;
1520 #ifdef HAVE_load_multiple
1521 rtx pat;
1522 rtx_insn *last;
1523 #endif
1525 if (nregs == 0)
1526 return;
1528 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1529 x = validize_mem (force_const_mem (mode, x));
1531 /* See if the machine can do this with a load multiple insn. */
1532 #ifdef HAVE_load_multiple
1533 if (HAVE_load_multiple)
1535 last = get_last_insn ();
1536 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1537 GEN_INT (nregs));
1538 if (pat)
1540 emit_insn (pat);
1541 return;
1543 else
1544 delete_insns_since (last);
1546 #endif
1548 for (i = 0; i < nregs; i++)
1549 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1550 operand_subword_force (x, i, mode));
1553 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1554 The number of registers to be filled is NREGS. */
1556 void
1557 move_block_from_reg (int regno, rtx x, int nregs)
1559 int i;
1561 if (nregs == 0)
1562 return;
1564 /* See if the machine can do this with a store multiple insn. */
1565 #ifdef HAVE_store_multiple
1566 if (HAVE_store_multiple)
1568 rtx_insn *last = get_last_insn ();
1569 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1570 GEN_INT (nregs));
1571 if (pat)
1573 emit_insn (pat);
1574 return;
1576 else
1577 delete_insns_since (last);
1579 #endif
1581 for (i = 0; i < nregs; i++)
1583 rtx tem = operand_subword (x, i, 1, BLKmode);
1585 gcc_assert (tem);
1587 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1591 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1592 ORIG, where ORIG is a non-consecutive group of registers represented by
1593 a PARALLEL. The clone is identical to the original except in that the
1594 original set of registers is replaced by a new set of pseudo registers.
1595 The new set has the same modes as the original set. */
1598 gen_group_rtx (rtx orig)
1600 int i, length;
1601 rtx *tmps;
1603 gcc_assert (GET_CODE (orig) == PARALLEL);
1605 length = XVECLEN (orig, 0);
1606 tmps = XALLOCAVEC (rtx, length);
1608 /* Skip a NULL entry in first slot. */
1609 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1611 if (i)
1612 tmps[0] = 0;
1614 for (; i < length; i++)
1616 machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1617 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1619 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1622 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1625 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1626 except that values are placed in TMPS[i], and must later be moved
1627 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1629 static void
1630 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1632 rtx src;
1633 int start, i;
1634 machine_mode m = GET_MODE (orig_src);
1636 gcc_assert (GET_CODE (dst) == PARALLEL);
1638 if (m != VOIDmode
1639 && !SCALAR_INT_MODE_P (m)
1640 && !MEM_P (orig_src)
1641 && GET_CODE (orig_src) != CONCAT)
1643 machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1644 if (imode == BLKmode)
1645 src = assign_stack_temp (GET_MODE (orig_src), ssize);
1646 else
1647 src = gen_reg_rtx (imode);
1648 if (imode != BLKmode)
1649 src = gen_lowpart (GET_MODE (orig_src), src);
1650 emit_move_insn (src, orig_src);
1651 /* ...and back again. */
1652 if (imode != BLKmode)
1653 src = gen_lowpart (imode, src);
1654 emit_group_load_1 (tmps, dst, src, type, ssize);
1655 return;
1658 /* Check for a NULL entry, used to indicate that the parameter goes
1659 both on the stack and in registers. */
1660 if (XEXP (XVECEXP (dst, 0, 0), 0))
1661 start = 0;
1662 else
1663 start = 1;
1665 /* Process the pieces. */
1666 for (i = start; i < XVECLEN (dst, 0); i++)
1668 machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1669 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1670 unsigned int bytelen = GET_MODE_SIZE (mode);
1671 int shift = 0;
1673 /* Handle trailing fragments that run over the size of the struct. */
1674 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1676 /* Arrange to shift the fragment to where it belongs.
1677 extract_bit_field loads to the lsb of the reg. */
1678 if (
1679 #ifdef BLOCK_REG_PADDING
1680 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1681 == (BYTES_BIG_ENDIAN ? upward : downward)
1682 #else
1683 BYTES_BIG_ENDIAN
1684 #endif
1686 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1687 bytelen = ssize - bytepos;
1688 gcc_assert (bytelen > 0);
1691 /* If we won't be loading directly from memory, protect the real source
1692 from strange tricks we might play; but make sure that the source can
1693 be loaded directly into the destination. */
1694 src = orig_src;
1695 if (!MEM_P (orig_src)
1696 && (!CONSTANT_P (orig_src)
1697 || (GET_MODE (orig_src) != mode
1698 && GET_MODE (orig_src) != VOIDmode)))
1700 if (GET_MODE (orig_src) == VOIDmode)
1701 src = gen_reg_rtx (mode);
1702 else
1703 src = gen_reg_rtx (GET_MODE (orig_src));
1705 emit_move_insn (src, orig_src);
1708 /* Optimize the access just a bit. */
1709 if (MEM_P (src)
1710 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1711 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1712 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1713 && bytelen == GET_MODE_SIZE (mode))
1715 tmps[i] = gen_reg_rtx (mode);
1716 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1718 else if (COMPLEX_MODE_P (mode)
1719 && GET_MODE (src) == mode
1720 && bytelen == GET_MODE_SIZE (mode))
1721 /* Let emit_move_complex do the bulk of the work. */
1722 tmps[i] = src;
1723 else if (GET_CODE (src) == CONCAT)
1725 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1726 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1728 if ((bytepos == 0 && bytelen == slen0)
1729 || (bytepos != 0 && bytepos + bytelen <= slen))
1731 /* The following assumes that the concatenated objects all
1732 have the same size. In this case, a simple calculation
1733 can be used to determine the object and the bit field
1734 to be extracted. */
1735 tmps[i] = XEXP (src, bytepos / slen0);
1736 if (! CONSTANT_P (tmps[i])
1737 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1738 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1739 (bytepos % slen0) * BITS_PER_UNIT,
1740 1, NULL_RTX, mode, mode, false);
1742 else
1744 rtx mem;
1746 gcc_assert (!bytepos);
1747 mem = assign_stack_temp (GET_MODE (src), slen);
1748 emit_move_insn (mem, src);
1749 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1750 0, 1, NULL_RTX, mode, mode, false);
1753 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1754 SIMD register, which is currently broken. While we get GCC
1755 to emit proper RTL for these cases, let's dump to memory. */
1756 else if (VECTOR_MODE_P (GET_MODE (dst))
1757 && REG_P (src))
1759 int slen = GET_MODE_SIZE (GET_MODE (src));
1760 rtx mem;
1762 mem = assign_stack_temp (GET_MODE (src), slen);
1763 emit_move_insn (mem, src);
1764 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1766 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1767 && XVECLEN (dst, 0) > 1)
1768 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
1769 else if (CONSTANT_P (src))
1771 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1773 if (len == ssize)
1774 tmps[i] = src;
1775 else
1777 rtx first, second;
1779 /* TODO: const_wide_int can have sizes other than this... */
1780 gcc_assert (2 * len == ssize);
1781 split_double (src, &first, &second);
1782 if (i)
1783 tmps[i] = second;
1784 else
1785 tmps[i] = first;
1788 else if (REG_P (src) && GET_MODE (src) == mode)
1789 tmps[i] = src;
1790 else
1791 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1792 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1793 mode, mode, false);
1795 if (shift)
1796 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1797 shift, tmps[i], 0);
1801 /* Emit code to move a block SRC of type TYPE to a block DST,
1802 where DST is non-consecutive registers represented by a PARALLEL.
1803 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1804 if not known. */
1806 void
1807 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1809 rtx *tmps;
1810 int i;
1812 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1813 emit_group_load_1 (tmps, dst, src, type, ssize);
1815 /* Copy the extracted pieces into the proper (probable) hard regs. */
1816 for (i = 0; i < XVECLEN (dst, 0); i++)
1818 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1819 if (d == NULL)
1820 continue;
1821 emit_move_insn (d, tmps[i]);
1825 /* Similar, but load SRC into new pseudos in a format that looks like
1826 PARALLEL. This can later be fed to emit_group_move to get things
1827 in the right place. */
1830 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1832 rtvec vec;
1833 int i;
1835 vec = rtvec_alloc (XVECLEN (parallel, 0));
1836 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1838 /* Convert the vector to look just like the original PARALLEL, except
1839 with the computed values. */
1840 for (i = 0; i < XVECLEN (parallel, 0); i++)
1842 rtx e = XVECEXP (parallel, 0, i);
1843 rtx d = XEXP (e, 0);
1845 if (d)
1847 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1848 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1850 RTVEC_ELT (vec, i) = e;
1853 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1856 /* Emit code to move a block SRC to block DST, where SRC and DST are
1857 non-consecutive groups of registers, each represented by a PARALLEL. */
1859 void
1860 emit_group_move (rtx dst, rtx src)
1862 int i;
1864 gcc_assert (GET_CODE (src) == PARALLEL
1865 && GET_CODE (dst) == PARALLEL
1866 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1868 /* Skip first entry if NULL. */
1869 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1870 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1871 XEXP (XVECEXP (src, 0, i), 0));
1874 /* Move a group of registers represented by a PARALLEL into pseudos. */
1877 emit_group_move_into_temps (rtx src)
1879 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1880 int i;
1882 for (i = 0; i < XVECLEN (src, 0); i++)
1884 rtx e = XVECEXP (src, 0, i);
1885 rtx d = XEXP (e, 0);
1887 if (d)
1888 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1889 RTVEC_ELT (vec, i) = e;
1892 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1895 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1896 where SRC is non-consecutive registers represented by a PARALLEL.
1897 SSIZE represents the total size of block ORIG_DST, or -1 if not
1898 known. */
1900 void
1901 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1903 rtx *tmps, dst;
1904 int start, finish, i;
1905 machine_mode m = GET_MODE (orig_dst);
1907 gcc_assert (GET_CODE (src) == PARALLEL);
1909 if (!SCALAR_INT_MODE_P (m)
1910 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1912 machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1913 if (imode == BLKmode)
1914 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1915 else
1916 dst = gen_reg_rtx (imode);
1917 emit_group_store (dst, src, type, ssize);
1918 if (imode != BLKmode)
1919 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1920 emit_move_insn (orig_dst, dst);
1921 return;
1924 /* Check for a NULL entry, used to indicate that the parameter goes
1925 both on the stack and in registers. */
1926 if (XEXP (XVECEXP (src, 0, 0), 0))
1927 start = 0;
1928 else
1929 start = 1;
1930 finish = XVECLEN (src, 0);
1932 tmps = XALLOCAVEC (rtx, finish);
1934 /* Copy the (probable) hard regs into pseudos. */
1935 for (i = start; i < finish; i++)
1937 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1938 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1940 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1941 emit_move_insn (tmps[i], reg);
1943 else
1944 tmps[i] = reg;
1947 /* If we won't be storing directly into memory, protect the real destination
1948 from strange tricks we might play. */
1949 dst = orig_dst;
1950 if (GET_CODE (dst) == PARALLEL)
1952 rtx temp;
1954 /* We can get a PARALLEL dst if there is a conditional expression in
1955 a return statement. In that case, the dst and src are the same,
1956 so no action is necessary. */
1957 if (rtx_equal_p (dst, src))
1958 return;
1960 /* It is unclear if we can ever reach here, but we may as well handle
1961 it. Allocate a temporary, and split this into a store/load to/from
1962 the temporary. */
1963 temp = assign_stack_temp (GET_MODE (dst), ssize);
1964 emit_group_store (temp, src, type, ssize);
1965 emit_group_load (dst, temp, type, ssize);
1966 return;
1968 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1970 machine_mode outer = GET_MODE (dst);
1971 machine_mode inner;
1972 HOST_WIDE_INT bytepos;
1973 bool done = false;
1974 rtx temp;
1976 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1977 dst = gen_reg_rtx (outer);
1979 /* Make life a bit easier for combine. */
1980 /* If the first element of the vector is the low part
1981 of the destination mode, use a paradoxical subreg to
1982 initialize the destination. */
1983 if (start < finish)
1985 inner = GET_MODE (tmps[start]);
1986 bytepos = subreg_lowpart_offset (inner, outer);
1987 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1989 temp = simplify_gen_subreg (outer, tmps[start],
1990 inner, 0);
1991 if (temp)
1993 emit_move_insn (dst, temp);
1994 done = true;
1995 start++;
2000 /* If the first element wasn't the low part, try the last. */
2001 if (!done
2002 && start < finish - 1)
2004 inner = GET_MODE (tmps[finish - 1]);
2005 bytepos = subreg_lowpart_offset (inner, outer);
2006 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
2008 temp = simplify_gen_subreg (outer, tmps[finish - 1],
2009 inner, 0);
2010 if (temp)
2012 emit_move_insn (dst, temp);
2013 done = true;
2014 finish--;
2019 /* Otherwise, simply initialize the result to zero. */
2020 if (!done)
2021 emit_move_insn (dst, CONST0_RTX (outer));
2024 /* Process the pieces. */
2025 for (i = start; i < finish; i++)
2027 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2028 machine_mode mode = GET_MODE (tmps[i]);
2029 unsigned int bytelen = GET_MODE_SIZE (mode);
2030 unsigned int adj_bytelen;
2031 rtx dest = dst;
2033 /* Handle trailing fragments that run over the size of the struct. */
2034 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2035 adj_bytelen = ssize - bytepos;
2036 else
2037 adj_bytelen = bytelen;
2039 if (GET_CODE (dst) == CONCAT)
2041 if (bytepos + adj_bytelen
2042 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2043 dest = XEXP (dst, 0);
2044 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2046 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2047 dest = XEXP (dst, 1);
2049 else
2051 machine_mode dest_mode = GET_MODE (dest);
2052 machine_mode tmp_mode = GET_MODE (tmps[i]);
2054 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2056 if (GET_MODE_ALIGNMENT (dest_mode)
2057 >= GET_MODE_ALIGNMENT (tmp_mode))
2059 dest = assign_stack_temp (dest_mode,
2060 GET_MODE_SIZE (dest_mode));
2061 emit_move_insn (adjust_address (dest,
2062 tmp_mode,
2063 bytepos),
2064 tmps[i]);
2065 dst = dest;
2067 else
2069 dest = assign_stack_temp (tmp_mode,
2070 GET_MODE_SIZE (tmp_mode));
2071 emit_move_insn (dest, tmps[i]);
2072 dst = adjust_address (dest, dest_mode, bytepos);
2074 break;
2078 /* Handle trailing fragments that run over the size of the struct. */
2079 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2081 /* store_bit_field always takes its value from the lsb.
2082 Move the fragment to the lsb if it's not already there. */
2083 if (
2084 #ifdef BLOCK_REG_PADDING
2085 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2086 == (BYTES_BIG_ENDIAN ? upward : downward)
2087 #else
2088 BYTES_BIG_ENDIAN
2089 #endif
2092 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2093 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2094 shift, tmps[i], 0);
2097 /* Make sure not to write past the end of the struct. */
2098 store_bit_field (dest,
2099 adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2100 bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2101 VOIDmode, tmps[i], false);
2104 /* Optimize the access just a bit. */
2105 else if (MEM_P (dest)
2106 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2107 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2108 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2109 && bytelen == GET_MODE_SIZE (mode))
2110 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2112 else
2113 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2114 0, 0, mode, tmps[i], false);
2117 /* Copy from the pseudo into the (probable) hard reg. */
2118 if (orig_dst != dst)
2119 emit_move_insn (orig_dst, dst);
2122 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2123 of the value stored in X. */
2126 maybe_emit_group_store (rtx x, tree type)
2128 machine_mode mode = TYPE_MODE (type);
2129 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2130 if (GET_CODE (x) == PARALLEL)
2132 rtx result = gen_reg_rtx (mode);
2133 emit_group_store (result, x, type, int_size_in_bytes (type));
2134 return result;
2136 return x;
2139 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2141 This is used on targets that return BLKmode values in registers. */
2143 void
2144 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2146 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2147 rtx src = NULL, dst = NULL;
2148 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2149 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2150 machine_mode mode = GET_MODE (srcreg);
2151 machine_mode tmode = GET_MODE (target);
2152 machine_mode copy_mode;
2154 /* BLKmode registers created in the back-end shouldn't have survived. */
2155 gcc_assert (mode != BLKmode);
2157 /* If the structure doesn't take up a whole number of words, see whether
2158 SRCREG is padded on the left or on the right. If it's on the left,
2159 set PADDING_CORRECTION to the number of bits to skip.
2161 In most ABIs, the structure will be returned at the least end of
2162 the register, which translates to right padding on little-endian
2163 targets and left padding on big-endian targets. The opposite
2164 holds if the structure is returned at the most significant
2165 end of the register. */
2166 if (bytes % UNITS_PER_WORD != 0
2167 && (targetm.calls.return_in_msb (type)
2168 ? !BYTES_BIG_ENDIAN
2169 : BYTES_BIG_ENDIAN))
2170 padding_correction
2171 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2173 /* We can use a single move if we have an exact mode for the size. */
2174 else if (MEM_P (target)
2175 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2176 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2177 && bytes == GET_MODE_SIZE (mode))
2179 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2180 return;
2183 /* And if we additionally have the same mode for a register. */
2184 else if (REG_P (target)
2185 && GET_MODE (target) == mode
2186 && bytes == GET_MODE_SIZE (mode))
2188 emit_move_insn (target, srcreg);
2189 return;
2192 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2193 into a new pseudo which is a full word. */
2194 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2196 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2197 mode = word_mode;
2200 /* Copy the structure BITSIZE bits at a time. If the target lives in
2201 memory, take care of not reading/writing past its end by selecting
2202 a copy mode suited to BITSIZE. This should always be possible given
2203 how it is computed.
2205 If the target lives in register, make sure not to select a copy mode
2206 larger than the mode of the register.
2208 We could probably emit more efficient code for machines which do not use
2209 strict alignment, but it doesn't seem worth the effort at the current
2210 time. */
2212 copy_mode = word_mode;
2213 if (MEM_P (target))
2215 machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2216 if (mem_mode != BLKmode)
2217 copy_mode = mem_mode;
2219 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2220 copy_mode = tmode;
2222 for (bitpos = 0, xbitpos = padding_correction;
2223 bitpos < bytes * BITS_PER_UNIT;
2224 bitpos += bitsize, xbitpos += bitsize)
2226 /* We need a new source operand each time xbitpos is on a
2227 word boundary and when xbitpos == padding_correction
2228 (the first time through). */
2229 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2230 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2232 /* We need a new destination operand each time bitpos is on
2233 a word boundary. */
2234 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2235 dst = target;
2236 else if (bitpos % BITS_PER_WORD == 0)
2237 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2239 /* Use xbitpos for the source extraction (right justified) and
2240 bitpos for the destination store (left justified). */
2241 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2242 extract_bit_field (src, bitsize,
2243 xbitpos % BITS_PER_WORD, 1,
2244 NULL_RTX, copy_mode, copy_mode,
2245 false),
2246 false);
2250 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2251 register if it contains any data, otherwise return null.
2253 This is used on targets that return BLKmode values in registers. */
2256 copy_blkmode_to_reg (machine_mode mode, tree src)
2258 int i, n_regs;
2259 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2260 unsigned int bitsize;
2261 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2262 machine_mode dst_mode;
2264 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2266 x = expand_normal (src);
2268 bytes = int_size_in_bytes (TREE_TYPE (src));
2269 if (bytes == 0)
2270 return NULL_RTX;
2272 /* If the structure doesn't take up a whole number of words, see
2273 whether the register value should be padded on the left or on
2274 the right. Set PADDING_CORRECTION to the number of padding
2275 bits needed on the left side.
2277 In most ABIs, the structure will be returned at the least end of
2278 the register, which translates to right padding on little-endian
2279 targets and left padding on big-endian targets. The opposite
2280 holds if the structure is returned at the most significant
2281 end of the register. */
2282 if (bytes % UNITS_PER_WORD != 0
2283 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2284 ? !BYTES_BIG_ENDIAN
2285 : BYTES_BIG_ENDIAN))
2286 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2287 * BITS_PER_UNIT));
2289 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2290 dst_words = XALLOCAVEC (rtx, n_regs);
2291 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2293 /* Copy the structure BITSIZE bits at a time. */
2294 for (bitpos = 0, xbitpos = padding_correction;
2295 bitpos < bytes * BITS_PER_UNIT;
2296 bitpos += bitsize, xbitpos += bitsize)
2298 /* We need a new destination pseudo each time xbitpos is
2299 on a word boundary and when xbitpos == padding_correction
2300 (the first time through). */
2301 if (xbitpos % BITS_PER_WORD == 0
2302 || xbitpos == padding_correction)
2304 /* Generate an appropriate register. */
2305 dst_word = gen_reg_rtx (word_mode);
2306 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2308 /* Clear the destination before we move anything into it. */
2309 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2312 /* We need a new source operand each time bitpos is on a word
2313 boundary. */
2314 if (bitpos % BITS_PER_WORD == 0)
2315 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2317 /* Use bitpos for the source extraction (left justified) and
2318 xbitpos for the destination store (right justified). */
2319 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2320 0, 0, word_mode,
2321 extract_bit_field (src_word, bitsize,
2322 bitpos % BITS_PER_WORD, 1,
2323 NULL_RTX, word_mode, word_mode,
2324 false),
2325 false);
2328 if (mode == BLKmode)
2330 /* Find the smallest integer mode large enough to hold the
2331 entire structure. */
2332 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2333 mode != VOIDmode;
2334 mode = GET_MODE_WIDER_MODE (mode))
2335 /* Have we found a large enough mode? */
2336 if (GET_MODE_SIZE (mode) >= bytes)
2337 break;
2339 /* A suitable mode should have been found. */
2340 gcc_assert (mode != VOIDmode);
2343 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2344 dst_mode = word_mode;
2345 else
2346 dst_mode = mode;
2347 dst = gen_reg_rtx (dst_mode);
2349 for (i = 0; i < n_regs; i++)
2350 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2352 if (mode != dst_mode)
2353 dst = gen_lowpart (mode, dst);
2355 return dst;
2358 /* Add a USE expression for REG to the (possibly empty) list pointed
2359 to by CALL_FUSAGE. REG must denote a hard register. */
2361 void
2362 use_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2364 gcc_assert (REG_P (reg));
2366 if (!HARD_REGISTER_P (reg))
2367 return;
2369 *call_fusage
2370 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2373 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2374 to by CALL_FUSAGE. REG must denote a hard register. */
2376 void
2377 clobber_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2379 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2381 *call_fusage
2382 = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2385 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2386 starting at REGNO. All of these registers must be hard registers. */
2388 void
2389 use_regs (rtx *call_fusage, int regno, int nregs)
2391 int i;
2393 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2395 for (i = 0; i < nregs; i++)
2396 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2399 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2400 PARALLEL REGS. This is for calls that pass values in multiple
2401 non-contiguous locations. The Irix 6 ABI has examples of this. */
2403 void
2404 use_group_regs (rtx *call_fusage, rtx regs)
2406 int i;
2408 for (i = 0; i < XVECLEN (regs, 0); i++)
2410 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2412 /* A NULL entry means the parameter goes both on the stack and in
2413 registers. This can also be a MEM for targets that pass values
2414 partially on the stack and partially in registers. */
2415 if (reg != 0 && REG_P (reg))
2416 use_reg (call_fusage, reg);
2420 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2421 assigment and the code of the expresion on the RHS is CODE. Return
2422 NULL otherwise. */
2424 static gimple
2425 get_def_for_expr (tree name, enum tree_code code)
2427 gimple def_stmt;
2429 if (TREE_CODE (name) != SSA_NAME)
2430 return NULL;
2432 def_stmt = get_gimple_for_ssa_name (name);
2433 if (!def_stmt
2434 || gimple_assign_rhs_code (def_stmt) != code)
2435 return NULL;
2437 return def_stmt;
2440 #ifdef HAVE_conditional_move
2441 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2442 assigment and the class of the expresion on the RHS is CLASS. Return
2443 NULL otherwise. */
2445 static gimple
2446 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2448 gimple def_stmt;
2450 if (TREE_CODE (name) != SSA_NAME)
2451 return NULL;
2453 def_stmt = get_gimple_for_ssa_name (name);
2454 if (!def_stmt
2455 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2456 return NULL;
2458 return def_stmt;
2460 #endif
2463 /* Determine whether the LEN bytes generated by CONSTFUN can be
2464 stored to memory using several move instructions. CONSTFUNDATA is
2465 a pointer which will be passed as argument in every CONSTFUN call.
2466 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2467 a memset operation and false if it's a copy of a constant string.
2468 Return nonzero if a call to store_by_pieces should succeed. */
2471 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2472 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2473 void *constfundata, unsigned int align, bool memsetp)
2475 unsigned HOST_WIDE_INT l;
2476 unsigned int max_size;
2477 HOST_WIDE_INT offset = 0;
2478 machine_mode mode;
2479 enum insn_code icode;
2480 int reverse;
2481 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2482 rtx cst ATTRIBUTE_UNUSED;
2484 if (len == 0)
2485 return 1;
2487 if (!targetm.use_by_pieces_infrastructure_p (len, align,
2488 memsetp
2489 ? SET_BY_PIECES
2490 : STORE_BY_PIECES,
2491 optimize_insn_for_speed_p ()))
2492 return 0;
2494 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2496 /* We would first store what we can in the largest integer mode, then go to
2497 successively smaller modes. */
2499 for (reverse = 0;
2500 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2501 reverse++)
2503 l = len;
2504 max_size = STORE_MAX_PIECES + 1;
2505 while (max_size > 1 && l > 0)
2507 mode = widest_int_mode_for_size (max_size);
2509 if (mode == VOIDmode)
2510 break;
2512 icode = optab_handler (mov_optab, mode);
2513 if (icode != CODE_FOR_nothing
2514 && align >= GET_MODE_ALIGNMENT (mode))
2516 unsigned int size = GET_MODE_SIZE (mode);
2518 while (l >= size)
2520 if (reverse)
2521 offset -= size;
2523 cst = (*constfun) (constfundata, offset, mode);
2524 if (!targetm.legitimate_constant_p (mode, cst))
2525 return 0;
2527 if (!reverse)
2528 offset += size;
2530 l -= size;
2534 max_size = GET_MODE_SIZE (mode);
2537 /* The code above should have handled everything. */
2538 gcc_assert (!l);
2541 return 1;
2544 /* Generate several move instructions to store LEN bytes generated by
2545 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2546 pointer which will be passed as argument in every CONSTFUN call.
2547 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2548 a memset operation and false if it's a copy of a constant string.
2549 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2550 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2551 stpcpy. */
2554 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2555 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2556 void *constfundata, unsigned int align, bool memsetp, int endp)
2558 machine_mode to_addr_mode = get_address_mode (to);
2559 struct store_by_pieces_d data;
2561 if (len == 0)
2563 gcc_assert (endp != 2);
2564 return to;
2567 gcc_assert (targetm.use_by_pieces_infrastructure_p
2568 (len, align,
2569 memsetp
2570 ? SET_BY_PIECES
2571 : STORE_BY_PIECES,
2572 optimize_insn_for_speed_p ()));
2574 data.constfun = constfun;
2575 data.constfundata = constfundata;
2576 data.len = len;
2577 data.to = to;
2578 store_by_pieces_1 (&data, align);
2579 if (endp)
2581 rtx to1;
2583 gcc_assert (!data.reverse);
2584 if (data.autinc_to)
2586 if (endp == 2)
2588 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2589 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2590 else
2591 data.to_addr = copy_to_mode_reg (to_addr_mode,
2592 plus_constant (to_addr_mode,
2593 data.to_addr,
2594 -1));
2596 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2597 data.offset);
2599 else
2601 if (endp == 2)
2602 --data.offset;
2603 to1 = adjust_address (data.to, QImode, data.offset);
2605 return to1;
2607 else
2608 return data.to;
2611 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2612 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2614 static void
2615 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2617 struct store_by_pieces_d data;
2619 if (len == 0)
2620 return;
2622 data.constfun = clear_by_pieces_1;
2623 data.constfundata = NULL;
2624 data.len = len;
2625 data.to = to;
2626 store_by_pieces_1 (&data, align);
2629 /* Callback routine for clear_by_pieces.
2630 Return const0_rtx unconditionally. */
2632 static rtx
2633 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2634 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2635 machine_mode mode ATTRIBUTE_UNUSED)
2637 return const0_rtx;
2640 /* Subroutine of clear_by_pieces and store_by_pieces.
2641 Generate several move instructions to store LEN bytes of block TO. (A MEM
2642 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2644 static void
2645 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2646 unsigned int align ATTRIBUTE_UNUSED)
2648 machine_mode to_addr_mode = get_address_mode (data->to);
2649 rtx to_addr = XEXP (data->to, 0);
2650 unsigned int max_size = STORE_MAX_PIECES + 1;
2651 enum insn_code icode;
2653 data->offset = 0;
2654 data->to_addr = to_addr;
2655 data->autinc_to
2656 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2657 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2659 data->explicit_inc_to = 0;
2660 data->reverse
2661 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2662 if (data->reverse)
2663 data->offset = data->len;
2665 /* If storing requires more than two move insns,
2666 copy addresses to registers (to make displacements shorter)
2667 and use post-increment if available. */
2668 if (!data->autinc_to
2669 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2671 /* Determine the main mode we'll be using.
2672 MODE might not be used depending on the definitions of the
2673 USE_* macros below. */
2674 machine_mode mode ATTRIBUTE_UNUSED
2675 = widest_int_mode_for_size (max_size);
2677 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2679 data->to_addr = copy_to_mode_reg (to_addr_mode,
2680 plus_constant (to_addr_mode,
2681 to_addr,
2682 data->len));
2683 data->autinc_to = 1;
2684 data->explicit_inc_to = -1;
2687 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2688 && ! data->autinc_to)
2690 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2691 data->autinc_to = 1;
2692 data->explicit_inc_to = 1;
2695 if ( !data->autinc_to && CONSTANT_P (to_addr))
2696 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2699 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2701 /* First store what we can in the largest integer mode, then go to
2702 successively smaller modes. */
2704 while (max_size > 1 && data->len > 0)
2706 machine_mode mode = widest_int_mode_for_size (max_size);
2708 if (mode == VOIDmode)
2709 break;
2711 icode = optab_handler (mov_optab, mode);
2712 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2713 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2715 max_size = GET_MODE_SIZE (mode);
2718 /* The code above should have handled everything. */
2719 gcc_assert (!data->len);
2722 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2723 with move instructions for mode MODE. GENFUN is the gen_... function
2724 to make a move insn for that mode. DATA has all the other info. */
2726 static void
2727 store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
2728 struct store_by_pieces_d *data)
2730 unsigned int size = GET_MODE_SIZE (mode);
2731 rtx to1, cst;
2733 while (data->len >= size)
2735 if (data->reverse)
2736 data->offset -= size;
2738 if (data->autinc_to)
2739 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2740 data->offset);
2741 else
2742 to1 = adjust_address (data->to, mode, data->offset);
2744 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2745 emit_insn (gen_add2_insn (data->to_addr,
2746 gen_int_mode (-(HOST_WIDE_INT) size,
2747 GET_MODE (data->to_addr))));
2749 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2750 emit_insn ((*genfun) (to1, cst));
2752 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2753 emit_insn (gen_add2_insn (data->to_addr,
2754 gen_int_mode (size,
2755 GET_MODE (data->to_addr))));
2757 if (! data->reverse)
2758 data->offset += size;
2760 data->len -= size;
2764 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2765 its length in bytes. */
2768 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2769 unsigned int expected_align, HOST_WIDE_INT expected_size,
2770 unsigned HOST_WIDE_INT min_size,
2771 unsigned HOST_WIDE_INT max_size,
2772 unsigned HOST_WIDE_INT probable_max_size)
2774 machine_mode mode = GET_MODE (object);
2775 unsigned int align;
2777 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2779 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2780 just move a zero. Otherwise, do this a piece at a time. */
2781 if (mode != BLKmode
2782 && CONST_INT_P (size)
2783 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2785 rtx zero = CONST0_RTX (mode);
2786 if (zero != NULL)
2788 emit_move_insn (object, zero);
2789 return NULL;
2792 if (COMPLEX_MODE_P (mode))
2794 zero = CONST0_RTX (GET_MODE_INNER (mode));
2795 if (zero != NULL)
2797 write_complex_part (object, zero, 0);
2798 write_complex_part (object, zero, 1);
2799 return NULL;
2804 if (size == const0_rtx)
2805 return NULL;
2807 align = MEM_ALIGN (object);
2809 if (CONST_INT_P (size)
2810 && targetm.use_by_pieces_infrastructure_p (INTVAL (size), align,
2811 CLEAR_BY_PIECES,
2812 optimize_insn_for_speed_p ()))
2813 clear_by_pieces (object, INTVAL (size), align);
2814 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2815 expected_align, expected_size,
2816 min_size, max_size, probable_max_size))
2818 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2819 return set_storage_via_libcall (object, size, const0_rtx,
2820 method == BLOCK_OP_TAILCALL);
2821 else
2822 gcc_unreachable ();
2824 return NULL;
2828 clear_storage (rtx object, rtx size, enum block_op_methods method)
2830 unsigned HOST_WIDE_INT max, min = 0;
2831 if (GET_CODE (size) == CONST_INT)
2832 min = max = UINTVAL (size);
2833 else
2834 max = GET_MODE_MASK (GET_MODE (size));
2835 return clear_storage_hints (object, size, method, 0, -1, min, max, max);
2839 /* A subroutine of clear_storage. Expand a call to memset.
2840 Return the return value of memset, 0 otherwise. */
2843 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2845 tree call_expr, fn, object_tree, size_tree, val_tree;
2846 machine_mode size_mode;
2847 rtx retval;
2849 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2850 place those into new pseudos into a VAR_DECL and use them later. */
2852 object = copy_addr_to_reg (XEXP (object, 0));
2854 size_mode = TYPE_MODE (sizetype);
2855 size = convert_to_mode (size_mode, size, 1);
2856 size = copy_to_mode_reg (size_mode, size);
2858 /* It is incorrect to use the libcall calling conventions to call
2859 memset in this context. This could be a user call to memset and
2860 the user may wish to examine the return value from memset. For
2861 targets where libcalls and normal calls have different conventions
2862 for returning pointers, we could end up generating incorrect code. */
2864 object_tree = make_tree (ptr_type_node, object);
2865 if (!CONST_INT_P (val))
2866 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2867 size_tree = make_tree (sizetype, size);
2868 val_tree = make_tree (integer_type_node, val);
2870 fn = clear_storage_libcall_fn (true);
2871 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2872 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2874 retval = expand_normal (call_expr);
2876 return retval;
2879 /* A subroutine of set_storage_via_libcall. Create the tree node
2880 for the function we use for block clears. */
2882 tree block_clear_fn;
2884 void
2885 init_block_clear_fn (const char *asmspec)
2887 if (!block_clear_fn)
2889 tree fn, args;
2891 fn = get_identifier ("memset");
2892 args = build_function_type_list (ptr_type_node, ptr_type_node,
2893 integer_type_node, sizetype,
2894 NULL_TREE);
2896 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2897 DECL_EXTERNAL (fn) = 1;
2898 TREE_PUBLIC (fn) = 1;
2899 DECL_ARTIFICIAL (fn) = 1;
2900 TREE_NOTHROW (fn) = 1;
2901 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2902 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2904 block_clear_fn = fn;
2907 if (asmspec)
2908 set_user_assembler_name (block_clear_fn, asmspec);
2911 static tree
2912 clear_storage_libcall_fn (int for_call)
2914 static bool emitted_extern;
2916 if (!block_clear_fn)
2917 init_block_clear_fn (NULL);
2919 if (for_call && !emitted_extern)
2921 emitted_extern = true;
2922 make_decl_rtl (block_clear_fn);
2925 return block_clear_fn;
2928 /* Expand a setmem pattern; return true if successful. */
2930 bool
2931 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2932 unsigned int expected_align, HOST_WIDE_INT expected_size,
2933 unsigned HOST_WIDE_INT min_size,
2934 unsigned HOST_WIDE_INT max_size,
2935 unsigned HOST_WIDE_INT probable_max_size)
2937 /* Try the most limited insn first, because there's no point
2938 including more than one in the machine description unless
2939 the more limited one has some advantage. */
2941 machine_mode mode;
2943 if (expected_align < align)
2944 expected_align = align;
2945 if (expected_size != -1)
2947 if ((unsigned HOST_WIDE_INT)expected_size > max_size)
2948 expected_size = max_size;
2949 if ((unsigned HOST_WIDE_INT)expected_size < min_size)
2950 expected_size = min_size;
2953 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2954 mode = GET_MODE_WIDER_MODE (mode))
2956 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2958 if (code != CODE_FOR_nothing
2959 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2960 here because if SIZE is less than the mode mask, as it is
2961 returned by the macro, it will definitely be less than the
2962 actual mode mask. Since SIZE is within the Pmode address
2963 space, we limit MODE to Pmode. */
2964 && ((CONST_INT_P (size)
2965 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2966 <= (GET_MODE_MASK (mode) >> 1)))
2967 || max_size <= (GET_MODE_MASK (mode) >> 1)
2968 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
2970 struct expand_operand ops[9];
2971 unsigned int nops;
2973 nops = insn_data[(int) code].n_generator_args;
2974 gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
2976 create_fixed_operand (&ops[0], object);
2977 /* The check above guarantees that this size conversion is valid. */
2978 create_convert_operand_to (&ops[1], size, mode, true);
2979 create_convert_operand_from (&ops[2], val, byte_mode, true);
2980 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2981 if (nops >= 6)
2983 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2984 create_integer_operand (&ops[5], expected_size);
2986 if (nops >= 8)
2988 create_integer_operand (&ops[6], min_size);
2989 /* If we can not represent the maximal size,
2990 make parameter NULL. */
2991 if ((HOST_WIDE_INT) max_size != -1)
2992 create_integer_operand (&ops[7], max_size);
2993 else
2994 create_fixed_operand (&ops[7], NULL);
2996 if (nops == 9)
2998 /* If we can not represent the maximal size,
2999 make parameter NULL. */
3000 if ((HOST_WIDE_INT) probable_max_size != -1)
3001 create_integer_operand (&ops[8], probable_max_size);
3002 else
3003 create_fixed_operand (&ops[8], NULL);
3005 if (maybe_expand_insn (code, nops, ops))
3006 return true;
3010 return false;
3014 /* Write to one of the components of the complex value CPLX. Write VAL to
3015 the real part if IMAG_P is false, and the imaginary part if its true. */
3017 void
3018 write_complex_part (rtx cplx, rtx val, bool imag_p)
3020 machine_mode cmode;
3021 machine_mode imode;
3022 unsigned ibitsize;
3024 if (GET_CODE (cplx) == CONCAT)
3026 emit_move_insn (XEXP (cplx, imag_p), val);
3027 return;
3030 cmode = GET_MODE (cplx);
3031 imode = GET_MODE_INNER (cmode);
3032 ibitsize = GET_MODE_BITSIZE (imode);
3034 /* For MEMs simplify_gen_subreg may generate an invalid new address
3035 because, e.g., the original address is considered mode-dependent
3036 by the target, which restricts simplify_subreg from invoking
3037 adjust_address_nv. Instead of preparing fallback support for an
3038 invalid address, we call adjust_address_nv directly. */
3039 if (MEM_P (cplx))
3041 emit_move_insn (adjust_address_nv (cplx, imode,
3042 imag_p ? GET_MODE_SIZE (imode) : 0),
3043 val);
3044 return;
3047 /* If the sub-object is at least word sized, then we know that subregging
3048 will work. This special case is important, since store_bit_field
3049 wants to operate on integer modes, and there's rarely an OImode to
3050 correspond to TCmode. */
3051 if (ibitsize >= BITS_PER_WORD
3052 /* For hard regs we have exact predicates. Assume we can split
3053 the original object if it spans an even number of hard regs.
3054 This special case is important for SCmode on 64-bit platforms
3055 where the natural size of floating-point regs is 32-bit. */
3056 || (REG_P (cplx)
3057 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3058 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3060 rtx part = simplify_gen_subreg (imode, cplx, cmode,
3061 imag_p ? GET_MODE_SIZE (imode) : 0);
3062 if (part)
3064 emit_move_insn (part, val);
3065 return;
3067 else
3068 /* simplify_gen_subreg may fail for sub-word MEMs. */
3069 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3072 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val,
3073 false);
3076 /* Extract one of the components of the complex value CPLX. Extract the
3077 real part if IMAG_P is false, and the imaginary part if it's true. */
3079 static rtx
3080 read_complex_part (rtx cplx, bool imag_p)
3082 machine_mode cmode, imode;
3083 unsigned ibitsize;
3085 if (GET_CODE (cplx) == CONCAT)
3086 return XEXP (cplx, imag_p);
3088 cmode = GET_MODE (cplx);
3089 imode = GET_MODE_INNER (cmode);
3090 ibitsize = GET_MODE_BITSIZE (imode);
3092 /* Special case reads from complex constants that got spilled to memory. */
3093 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3095 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3096 if (decl && TREE_CODE (decl) == COMPLEX_CST)
3098 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3099 if (CONSTANT_CLASS_P (part))
3100 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3104 /* For MEMs simplify_gen_subreg may generate an invalid new address
3105 because, e.g., the original address is considered mode-dependent
3106 by the target, which restricts simplify_subreg from invoking
3107 adjust_address_nv. Instead of preparing fallback support for an
3108 invalid address, we call adjust_address_nv directly. */
3109 if (MEM_P (cplx))
3110 return adjust_address_nv (cplx, imode,
3111 imag_p ? GET_MODE_SIZE (imode) : 0);
3113 /* If the sub-object is at least word sized, then we know that subregging
3114 will work. This special case is important, since extract_bit_field
3115 wants to operate on integer modes, and there's rarely an OImode to
3116 correspond to TCmode. */
3117 if (ibitsize >= BITS_PER_WORD
3118 /* For hard regs we have exact predicates. Assume we can split
3119 the original object if it spans an even number of hard regs.
3120 This special case is important for SCmode on 64-bit platforms
3121 where the natural size of floating-point regs is 32-bit. */
3122 || (REG_P (cplx)
3123 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3124 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3126 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3127 imag_p ? GET_MODE_SIZE (imode) : 0);
3128 if (ret)
3129 return ret;
3130 else
3131 /* simplify_gen_subreg may fail for sub-word MEMs. */
3132 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3135 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3136 true, NULL_RTX, imode, imode, false);
3139 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3140 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3141 represented in NEW_MODE. If FORCE is true, this will never happen, as
3142 we'll force-create a SUBREG if needed. */
3144 static rtx
3145 emit_move_change_mode (machine_mode new_mode,
3146 machine_mode old_mode, rtx x, bool force)
3148 rtx ret;
3150 if (push_operand (x, GET_MODE (x)))
3152 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3153 MEM_COPY_ATTRIBUTES (ret, x);
3155 else if (MEM_P (x))
3157 /* We don't have to worry about changing the address since the
3158 size in bytes is supposed to be the same. */
3159 if (reload_in_progress)
3161 /* Copy the MEM to change the mode and move any
3162 substitutions from the old MEM to the new one. */
3163 ret = adjust_address_nv (x, new_mode, 0);
3164 copy_replacements (x, ret);
3166 else
3167 ret = adjust_address (x, new_mode, 0);
3169 else
3171 /* Note that we do want simplify_subreg's behavior of validating
3172 that the new mode is ok for a hard register. If we were to use
3173 simplify_gen_subreg, we would create the subreg, but would
3174 probably run into the target not being able to implement it. */
3175 /* Except, of course, when FORCE is true, when this is exactly what
3176 we want. Which is needed for CCmodes on some targets. */
3177 if (force)
3178 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3179 else
3180 ret = simplify_subreg (new_mode, x, old_mode, 0);
3183 return ret;
3186 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3187 an integer mode of the same size as MODE. Returns the instruction
3188 emitted, or NULL if such a move could not be generated. */
3190 static rtx_insn *
3191 emit_move_via_integer (machine_mode mode, rtx x, rtx y, bool force)
3193 machine_mode imode;
3194 enum insn_code code;
3196 /* There must exist a mode of the exact size we require. */
3197 imode = int_mode_for_mode (mode);
3198 if (imode == BLKmode)
3199 return NULL;
3201 /* The target must support moves in this mode. */
3202 code = optab_handler (mov_optab, imode);
3203 if (code == CODE_FOR_nothing)
3204 return NULL;
3206 x = emit_move_change_mode (imode, mode, x, force);
3207 if (x == NULL_RTX)
3208 return NULL;
3209 y = emit_move_change_mode (imode, mode, y, force);
3210 if (y == NULL_RTX)
3211 return NULL;
3212 return emit_insn (GEN_FCN (code) (x, y));
3215 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3216 Return an equivalent MEM that does not use an auto-increment. */
3219 emit_move_resolve_push (machine_mode mode, rtx x)
3221 enum rtx_code code = GET_CODE (XEXP (x, 0));
3222 HOST_WIDE_INT adjust;
3223 rtx temp;
3225 adjust = GET_MODE_SIZE (mode);
3226 #ifdef PUSH_ROUNDING
3227 adjust = PUSH_ROUNDING (adjust);
3228 #endif
3229 if (code == PRE_DEC || code == POST_DEC)
3230 adjust = -adjust;
3231 else if (code == PRE_MODIFY || code == POST_MODIFY)
3233 rtx expr = XEXP (XEXP (x, 0), 1);
3234 HOST_WIDE_INT val;
3236 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3237 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3238 val = INTVAL (XEXP (expr, 1));
3239 if (GET_CODE (expr) == MINUS)
3240 val = -val;
3241 gcc_assert (adjust == val || adjust == -val);
3242 adjust = val;
3245 /* Do not use anti_adjust_stack, since we don't want to update
3246 stack_pointer_delta. */
3247 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3248 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3249 0, OPTAB_LIB_WIDEN);
3250 if (temp != stack_pointer_rtx)
3251 emit_move_insn (stack_pointer_rtx, temp);
3253 switch (code)
3255 case PRE_INC:
3256 case PRE_DEC:
3257 case PRE_MODIFY:
3258 temp = stack_pointer_rtx;
3259 break;
3260 case POST_INC:
3261 case POST_DEC:
3262 case POST_MODIFY:
3263 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3264 break;
3265 default:
3266 gcc_unreachable ();
3269 return replace_equiv_address (x, temp);
3272 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3273 X is known to satisfy push_operand, and MODE is known to be complex.
3274 Returns the last instruction emitted. */
3276 rtx_insn *
3277 emit_move_complex_push (machine_mode mode, rtx x, rtx y)
3279 machine_mode submode = GET_MODE_INNER (mode);
3280 bool imag_first;
3282 #ifdef PUSH_ROUNDING
3283 unsigned int submodesize = GET_MODE_SIZE (submode);
3285 /* In case we output to the stack, but the size is smaller than the
3286 machine can push exactly, we need to use move instructions. */
3287 if (PUSH_ROUNDING (submodesize) != submodesize)
3289 x = emit_move_resolve_push (mode, x);
3290 return emit_move_insn (x, y);
3292 #endif
3294 /* Note that the real part always precedes the imag part in memory
3295 regardless of machine's endianness. */
3296 switch (GET_CODE (XEXP (x, 0)))
3298 case PRE_DEC:
3299 case POST_DEC:
3300 imag_first = true;
3301 break;
3302 case PRE_INC:
3303 case POST_INC:
3304 imag_first = false;
3305 break;
3306 default:
3307 gcc_unreachable ();
3310 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3311 read_complex_part (y, imag_first));
3312 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3313 read_complex_part (y, !imag_first));
3316 /* A subroutine of emit_move_complex. Perform the move from Y to X
3317 via two moves of the parts. Returns the last instruction emitted. */
3319 rtx_insn *
3320 emit_move_complex_parts (rtx x, rtx y)
3322 /* Show the output dies here. This is necessary for SUBREGs
3323 of pseudos since we cannot track their lifetimes correctly;
3324 hard regs shouldn't appear here except as return values. */
3325 if (!reload_completed && !reload_in_progress
3326 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3327 emit_clobber (x);
3329 write_complex_part (x, read_complex_part (y, false), false);
3330 write_complex_part (x, read_complex_part (y, true), true);
3332 return get_last_insn ();
3335 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3336 MODE is known to be complex. Returns the last instruction emitted. */
3338 static rtx_insn *
3339 emit_move_complex (machine_mode mode, rtx x, rtx y)
3341 bool try_int;
3343 /* Need to take special care for pushes, to maintain proper ordering
3344 of the data, and possibly extra padding. */
3345 if (push_operand (x, mode))
3346 return emit_move_complex_push (mode, x, y);
3348 /* See if we can coerce the target into moving both values at once, except
3349 for floating point where we favor moving as parts if this is easy. */
3350 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3351 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3352 && !(REG_P (x)
3353 && HARD_REGISTER_P (x)
3354 && hard_regno_nregs[REGNO (x)][mode] == 1)
3355 && !(REG_P (y)
3356 && HARD_REGISTER_P (y)
3357 && hard_regno_nregs[REGNO (y)][mode] == 1))
3358 try_int = false;
3359 /* Not possible if the values are inherently not adjacent. */
3360 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3361 try_int = false;
3362 /* Is possible if both are registers (or subregs of registers). */
3363 else if (register_operand (x, mode) && register_operand (y, mode))
3364 try_int = true;
3365 /* If one of the operands is a memory, and alignment constraints
3366 are friendly enough, we may be able to do combined memory operations.
3367 We do not attempt this if Y is a constant because that combination is
3368 usually better with the by-parts thing below. */
3369 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3370 && (!STRICT_ALIGNMENT
3371 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3372 try_int = true;
3373 else
3374 try_int = false;
3376 if (try_int)
3378 rtx_insn *ret;
3380 /* For memory to memory moves, optimal behavior can be had with the
3381 existing block move logic. */
3382 if (MEM_P (x) && MEM_P (y))
3384 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3385 BLOCK_OP_NO_LIBCALL);
3386 return get_last_insn ();
3389 ret = emit_move_via_integer (mode, x, y, true);
3390 if (ret)
3391 return ret;
3394 return emit_move_complex_parts (x, y);
3397 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3398 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3400 static rtx_insn *
3401 emit_move_ccmode (machine_mode mode, rtx x, rtx y)
3403 rtx_insn *ret;
3405 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3406 if (mode != CCmode)
3408 enum insn_code code = optab_handler (mov_optab, CCmode);
3409 if (code != CODE_FOR_nothing)
3411 x = emit_move_change_mode (CCmode, mode, x, true);
3412 y = emit_move_change_mode (CCmode, mode, y, true);
3413 return emit_insn (GEN_FCN (code) (x, y));
3417 /* Otherwise, find the MODE_INT mode of the same width. */
3418 ret = emit_move_via_integer (mode, x, y, false);
3419 gcc_assert (ret != NULL);
3420 return ret;
3423 /* Return true if word I of OP lies entirely in the
3424 undefined bits of a paradoxical subreg. */
3426 static bool
3427 undefined_operand_subword_p (const_rtx op, int i)
3429 machine_mode innermode, innermostmode;
3430 int offset;
3431 if (GET_CODE (op) != SUBREG)
3432 return false;
3433 innermode = GET_MODE (op);
3434 innermostmode = GET_MODE (SUBREG_REG (op));
3435 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3436 /* The SUBREG_BYTE represents offset, as if the value were stored in
3437 memory, except for a paradoxical subreg where we define
3438 SUBREG_BYTE to be 0; undo this exception as in
3439 simplify_subreg. */
3440 if (SUBREG_BYTE (op) == 0
3441 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3443 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3444 if (WORDS_BIG_ENDIAN)
3445 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3446 if (BYTES_BIG_ENDIAN)
3447 offset += difference % UNITS_PER_WORD;
3449 if (offset >= GET_MODE_SIZE (innermostmode)
3450 || offset <= -GET_MODE_SIZE (word_mode))
3451 return true;
3452 return false;
3455 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3456 MODE is any multi-word or full-word mode that lacks a move_insn
3457 pattern. Note that you will get better code if you define such
3458 patterns, even if they must turn into multiple assembler instructions. */
3460 static rtx_insn *
3461 emit_move_multi_word (machine_mode mode, rtx x, rtx y)
3463 rtx_insn *last_insn = 0;
3464 rtx_insn *seq;
3465 rtx inner;
3466 bool need_clobber;
3467 int i;
3469 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3471 /* If X is a push on the stack, do the push now and replace
3472 X with a reference to the stack pointer. */
3473 if (push_operand (x, mode))
3474 x = emit_move_resolve_push (mode, x);
3476 /* If we are in reload, see if either operand is a MEM whose address
3477 is scheduled for replacement. */
3478 if (reload_in_progress && MEM_P (x)
3479 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3480 x = replace_equiv_address_nv (x, inner);
3481 if (reload_in_progress && MEM_P (y)
3482 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3483 y = replace_equiv_address_nv (y, inner);
3485 start_sequence ();
3487 need_clobber = false;
3488 for (i = 0;
3489 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3490 i++)
3492 rtx xpart = operand_subword (x, i, 1, mode);
3493 rtx ypart;
3495 /* Do not generate code for a move if it would come entirely
3496 from the undefined bits of a paradoxical subreg. */
3497 if (undefined_operand_subword_p (y, i))
3498 continue;
3500 ypart = operand_subword (y, i, 1, mode);
3502 /* If we can't get a part of Y, put Y into memory if it is a
3503 constant. Otherwise, force it into a register. Then we must
3504 be able to get a part of Y. */
3505 if (ypart == 0 && CONSTANT_P (y))
3507 y = use_anchored_address (force_const_mem (mode, y));
3508 ypart = operand_subword (y, i, 1, mode);
3510 else if (ypart == 0)
3511 ypart = operand_subword_force (y, i, mode);
3513 gcc_assert (xpart && ypart);
3515 need_clobber |= (GET_CODE (xpart) == SUBREG);
3517 last_insn = emit_move_insn (xpart, ypart);
3520 seq = get_insns ();
3521 end_sequence ();
3523 /* Show the output dies here. This is necessary for SUBREGs
3524 of pseudos since we cannot track their lifetimes correctly;
3525 hard regs shouldn't appear here except as return values.
3526 We never want to emit such a clobber after reload. */
3527 if (x != y
3528 && ! (reload_in_progress || reload_completed)
3529 && need_clobber != 0)
3530 emit_clobber (x);
3532 emit_insn (seq);
3534 return last_insn;
3537 /* Low level part of emit_move_insn.
3538 Called just like emit_move_insn, but assumes X and Y
3539 are basically valid. */
3541 rtx_insn *
3542 emit_move_insn_1 (rtx x, rtx y)
3544 machine_mode mode = GET_MODE (x);
3545 enum insn_code code;
3547 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3549 code = optab_handler (mov_optab, mode);
3550 if (code != CODE_FOR_nothing)
3551 return emit_insn (GEN_FCN (code) (x, y));
3553 /* Expand complex moves by moving real part and imag part. */
3554 if (COMPLEX_MODE_P (mode))
3555 return emit_move_complex (mode, x, y);
3557 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3558 || ALL_FIXED_POINT_MODE_P (mode))
3560 rtx_insn *result = emit_move_via_integer (mode, x, y, true);
3562 /* If we can't find an integer mode, use multi words. */
3563 if (result)
3564 return result;
3565 else
3566 return emit_move_multi_word (mode, x, y);
3569 if (GET_MODE_CLASS (mode) == MODE_CC)
3570 return emit_move_ccmode (mode, x, y);
3572 /* Try using a move pattern for the corresponding integer mode. This is
3573 only safe when simplify_subreg can convert MODE constants into integer
3574 constants. At present, it can only do this reliably if the value
3575 fits within a HOST_WIDE_INT. */
3576 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3578 rtx_insn *ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3580 if (ret)
3582 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3583 return ret;
3587 return emit_move_multi_word (mode, x, y);
3590 /* Generate code to copy Y into X.
3591 Both Y and X must have the same mode, except that
3592 Y can be a constant with VOIDmode.
3593 This mode cannot be BLKmode; use emit_block_move for that.
3595 Return the last instruction emitted. */
3597 rtx_insn *
3598 emit_move_insn (rtx x, rtx y)
3600 machine_mode mode = GET_MODE (x);
3601 rtx y_cst = NULL_RTX;
3602 rtx_insn *last_insn;
3603 rtx set;
3605 gcc_assert (mode != BLKmode
3606 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3608 if (CONSTANT_P (y))
3610 if (optimize
3611 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3612 && (last_insn = compress_float_constant (x, y)))
3613 return last_insn;
3615 y_cst = y;
3617 if (!targetm.legitimate_constant_p (mode, y))
3619 y = force_const_mem (mode, y);
3621 /* If the target's cannot_force_const_mem prevented the spill,
3622 assume that the target's move expanders will also take care
3623 of the non-legitimate constant. */
3624 if (!y)
3625 y = y_cst;
3626 else
3627 y = use_anchored_address (y);
3631 /* If X or Y are memory references, verify that their addresses are valid
3632 for the machine. */
3633 if (MEM_P (x)
3634 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3635 MEM_ADDR_SPACE (x))
3636 && ! push_operand (x, GET_MODE (x))))
3637 x = validize_mem (x);
3639 if (MEM_P (y)
3640 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3641 MEM_ADDR_SPACE (y)))
3642 y = validize_mem (y);
3644 gcc_assert (mode != BLKmode);
3646 last_insn = emit_move_insn_1 (x, y);
3648 if (y_cst && REG_P (x)
3649 && (set = single_set (last_insn)) != NULL_RTX
3650 && SET_DEST (set) == x
3651 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3652 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3654 return last_insn;
3657 /* Generate the body of an instruction to copy Y into X.
3658 It may be a list of insns, if one insn isn't enough. */
3661 gen_move_insn (rtx x, rtx y)
3663 rtx_insn *seq;
3665 start_sequence ();
3666 emit_move_insn_1 (x, y);
3667 seq = get_insns ();
3668 end_sequence ();
3669 return seq;
3672 /* If Y is representable exactly in a narrower mode, and the target can
3673 perform the extension directly from constant or memory, then emit the
3674 move as an extension. */
3676 static rtx_insn *
3677 compress_float_constant (rtx x, rtx y)
3679 machine_mode dstmode = GET_MODE (x);
3680 machine_mode orig_srcmode = GET_MODE (y);
3681 machine_mode srcmode;
3682 REAL_VALUE_TYPE r;
3683 int oldcost, newcost;
3684 bool speed = optimize_insn_for_speed_p ();
3686 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3688 if (targetm.legitimate_constant_p (dstmode, y))
3689 oldcost = set_src_cost (y, speed);
3690 else
3691 oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3693 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3694 srcmode != orig_srcmode;
3695 srcmode = GET_MODE_WIDER_MODE (srcmode))
3697 enum insn_code ic;
3698 rtx trunc_y;
3699 rtx_insn *last_insn;
3701 /* Skip if the target can't extend this way. */
3702 ic = can_extend_p (dstmode, srcmode, 0);
3703 if (ic == CODE_FOR_nothing)
3704 continue;
3706 /* Skip if the narrowed value isn't exact. */
3707 if (! exact_real_truncate (srcmode, &r))
3708 continue;
3710 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3712 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3714 /* Skip if the target needs extra instructions to perform
3715 the extension. */
3716 if (!insn_operand_matches (ic, 1, trunc_y))
3717 continue;
3718 /* This is valid, but may not be cheaper than the original. */
3719 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3720 speed);
3721 if (oldcost < newcost)
3722 continue;
3724 else if (float_extend_from_mem[dstmode][srcmode])
3726 trunc_y = force_const_mem (srcmode, trunc_y);
3727 /* This is valid, but may not be cheaper than the original. */
3728 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3729 speed);
3730 if (oldcost < newcost)
3731 continue;
3732 trunc_y = validize_mem (trunc_y);
3734 else
3735 continue;
3737 /* For CSE's benefit, force the compressed constant pool entry
3738 into a new pseudo. This constant may be used in different modes,
3739 and if not, combine will put things back together for us. */
3740 trunc_y = force_reg (srcmode, trunc_y);
3742 /* If x is a hard register, perform the extension into a pseudo,
3743 so that e.g. stack realignment code is aware of it. */
3744 rtx target = x;
3745 if (REG_P (x) && HARD_REGISTER_P (x))
3746 target = gen_reg_rtx (dstmode);
3748 emit_unop_insn (ic, target, trunc_y, UNKNOWN);
3749 last_insn = get_last_insn ();
3751 if (REG_P (target))
3752 set_unique_reg_note (last_insn, REG_EQUAL, y);
3754 if (target != x)
3755 return emit_move_insn (x, target);
3756 return last_insn;
3759 return NULL;
3762 /* Pushing data onto the stack. */
3764 /* Push a block of length SIZE (perhaps variable)
3765 and return an rtx to address the beginning of the block.
3766 The value may be virtual_outgoing_args_rtx.
3768 EXTRA is the number of bytes of padding to push in addition to SIZE.
3769 BELOW nonzero means this padding comes at low addresses;
3770 otherwise, the padding comes at high addresses. */
3773 push_block (rtx size, int extra, int below)
3775 rtx temp;
3777 size = convert_modes (Pmode, ptr_mode, size, 1);
3778 if (CONSTANT_P (size))
3779 anti_adjust_stack (plus_constant (Pmode, size, extra));
3780 else if (REG_P (size) && extra == 0)
3781 anti_adjust_stack (size);
3782 else
3784 temp = copy_to_mode_reg (Pmode, size);
3785 if (extra != 0)
3786 temp = expand_binop (Pmode, add_optab, temp,
3787 gen_int_mode (extra, Pmode),
3788 temp, 0, OPTAB_LIB_WIDEN);
3789 anti_adjust_stack (temp);
3792 #ifndef STACK_GROWS_DOWNWARD
3793 if (0)
3794 #else
3795 if (1)
3796 #endif
3798 temp = virtual_outgoing_args_rtx;
3799 if (extra != 0 && below)
3800 temp = plus_constant (Pmode, temp, extra);
3802 else
3804 if (CONST_INT_P (size))
3805 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3806 -INTVAL (size) - (below ? 0 : extra));
3807 else if (extra != 0 && !below)
3808 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3809 negate_rtx (Pmode, plus_constant (Pmode, size,
3810 extra)));
3811 else
3812 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3813 negate_rtx (Pmode, size));
3816 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3819 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3821 static rtx
3822 mem_autoinc_base (rtx mem)
3824 if (MEM_P (mem))
3826 rtx addr = XEXP (mem, 0);
3827 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3828 return XEXP (addr, 0);
3830 return NULL;
3833 /* A utility routine used here, in reload, and in try_split. The insns
3834 after PREV up to and including LAST are known to adjust the stack,
3835 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3836 placing notes as appropriate. PREV may be NULL, indicating the
3837 entire insn sequence prior to LAST should be scanned.
3839 The set of allowed stack pointer modifications is small:
3840 (1) One or more auto-inc style memory references (aka pushes),
3841 (2) One or more addition/subtraction with the SP as destination,
3842 (3) A single move insn with the SP as destination,
3843 (4) A call_pop insn,
3844 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3846 Insns in the sequence that do not modify the SP are ignored,
3847 except for noreturn calls.
3849 The return value is the amount of adjustment that can be trivially
3850 verified, via immediate operand or auto-inc. If the adjustment
3851 cannot be trivially extracted, the return value is INT_MIN. */
3853 HOST_WIDE_INT
3854 find_args_size_adjust (rtx_insn *insn)
3856 rtx dest, set, pat;
3857 int i;
3859 pat = PATTERN (insn);
3860 set = NULL;
3862 /* Look for a call_pop pattern. */
3863 if (CALL_P (insn))
3865 /* We have to allow non-call_pop patterns for the case
3866 of emit_single_push_insn of a TLS address. */
3867 if (GET_CODE (pat) != PARALLEL)
3868 return 0;
3870 /* All call_pop have a stack pointer adjust in the parallel.
3871 The call itself is always first, and the stack adjust is
3872 usually last, so search from the end. */
3873 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3875 set = XVECEXP (pat, 0, i);
3876 if (GET_CODE (set) != SET)
3877 continue;
3878 dest = SET_DEST (set);
3879 if (dest == stack_pointer_rtx)
3880 break;
3882 /* We'd better have found the stack pointer adjust. */
3883 if (i == 0)
3884 return 0;
3885 /* Fall through to process the extracted SET and DEST
3886 as if it was a standalone insn. */
3888 else if (GET_CODE (pat) == SET)
3889 set = pat;
3890 else if ((set = single_set (insn)) != NULL)
3892 else if (GET_CODE (pat) == PARALLEL)
3894 /* ??? Some older ports use a parallel with a stack adjust
3895 and a store for a PUSH_ROUNDING pattern, rather than a
3896 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3897 /* ??? See h8300 and m68k, pushqi1. */
3898 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3900 set = XVECEXP (pat, 0, i);
3901 if (GET_CODE (set) != SET)
3902 continue;
3903 dest = SET_DEST (set);
3904 if (dest == stack_pointer_rtx)
3905 break;
3907 /* We do not expect an auto-inc of the sp in the parallel. */
3908 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3909 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3910 != stack_pointer_rtx);
3912 if (i < 0)
3913 return 0;
3915 else
3916 return 0;
3918 dest = SET_DEST (set);
3920 /* Look for direct modifications of the stack pointer. */
3921 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3923 /* Look for a trivial adjustment, otherwise assume nothing. */
3924 /* Note that the SPU restore_stack_block pattern refers to
3925 the stack pointer in V4SImode. Consider that non-trivial. */
3926 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3927 && GET_CODE (SET_SRC (set)) == PLUS
3928 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3929 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3930 return INTVAL (XEXP (SET_SRC (set), 1));
3931 /* ??? Reload can generate no-op moves, which will be cleaned
3932 up later. Recognize it and continue searching. */
3933 else if (rtx_equal_p (dest, SET_SRC (set)))
3934 return 0;
3935 else
3936 return HOST_WIDE_INT_MIN;
3938 else
3940 rtx mem, addr;
3942 /* Otherwise only think about autoinc patterns. */
3943 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3945 mem = dest;
3946 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3947 != stack_pointer_rtx);
3949 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3950 mem = SET_SRC (set);
3951 else
3952 return 0;
3954 addr = XEXP (mem, 0);
3955 switch (GET_CODE (addr))
3957 case PRE_INC:
3958 case POST_INC:
3959 return GET_MODE_SIZE (GET_MODE (mem));
3960 case PRE_DEC:
3961 case POST_DEC:
3962 return -GET_MODE_SIZE (GET_MODE (mem));
3963 case PRE_MODIFY:
3964 case POST_MODIFY:
3965 addr = XEXP (addr, 1);
3966 gcc_assert (GET_CODE (addr) == PLUS);
3967 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3968 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3969 return INTVAL (XEXP (addr, 1));
3970 default:
3971 gcc_unreachable ();
3977 fixup_args_size_notes (rtx_insn *prev, rtx_insn *last, int end_args_size)
3979 int args_size = end_args_size;
3980 bool saw_unknown = false;
3981 rtx_insn *insn;
3983 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3985 HOST_WIDE_INT this_delta;
3987 if (!NONDEBUG_INSN_P (insn))
3988 continue;
3990 this_delta = find_args_size_adjust (insn);
3991 if (this_delta == 0)
3993 if (!CALL_P (insn)
3994 || ACCUMULATE_OUTGOING_ARGS
3995 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3996 continue;
3999 gcc_assert (!saw_unknown);
4000 if (this_delta == HOST_WIDE_INT_MIN)
4001 saw_unknown = true;
4003 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
4004 #ifdef STACK_GROWS_DOWNWARD
4005 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
4006 #endif
4007 args_size -= this_delta;
4010 return saw_unknown ? INT_MIN : args_size;
4013 #ifdef PUSH_ROUNDING
4014 /* Emit single push insn. */
4016 static void
4017 emit_single_push_insn_1 (machine_mode mode, rtx x, tree type)
4019 rtx dest_addr;
4020 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
4021 rtx dest;
4022 enum insn_code icode;
4024 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
4025 /* If there is push pattern, use it. Otherwise try old way of throwing
4026 MEM representing push operation to move expander. */
4027 icode = optab_handler (push_optab, mode);
4028 if (icode != CODE_FOR_nothing)
4030 struct expand_operand ops[1];
4032 create_input_operand (&ops[0], x, mode);
4033 if (maybe_expand_insn (icode, 1, ops))
4034 return;
4036 if (GET_MODE_SIZE (mode) == rounded_size)
4037 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
4038 /* If we are to pad downward, adjust the stack pointer first and
4039 then store X into the stack location using an offset. This is
4040 because emit_move_insn does not know how to pad; it does not have
4041 access to type. */
4042 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
4044 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
4045 HOST_WIDE_INT offset;
4047 emit_move_insn (stack_pointer_rtx,
4048 expand_binop (Pmode,
4049 #ifdef STACK_GROWS_DOWNWARD
4050 sub_optab,
4051 #else
4052 add_optab,
4053 #endif
4054 stack_pointer_rtx,
4055 gen_int_mode (rounded_size, Pmode),
4056 NULL_RTX, 0, OPTAB_LIB_WIDEN));
4058 offset = (HOST_WIDE_INT) padding_size;
4059 #ifdef STACK_GROWS_DOWNWARD
4060 if (STACK_PUSH_CODE == POST_DEC)
4061 /* We have already decremented the stack pointer, so get the
4062 previous value. */
4063 offset += (HOST_WIDE_INT) rounded_size;
4064 #else
4065 if (STACK_PUSH_CODE == POST_INC)
4066 /* We have already incremented the stack pointer, so get the
4067 previous value. */
4068 offset -= (HOST_WIDE_INT) rounded_size;
4069 #endif
4070 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4071 gen_int_mode (offset, Pmode));
4073 else
4075 #ifdef STACK_GROWS_DOWNWARD
4076 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
4077 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4078 gen_int_mode (-(HOST_WIDE_INT) rounded_size,
4079 Pmode));
4080 #else
4081 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
4082 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4083 gen_int_mode (rounded_size, Pmode));
4084 #endif
4085 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4088 dest = gen_rtx_MEM (mode, dest_addr);
4090 if (type != 0)
4092 set_mem_attributes (dest, type, 1);
4094 if (cfun->tail_call_marked)
4095 /* Function incoming arguments may overlap with sibling call
4096 outgoing arguments and we cannot allow reordering of reads
4097 from function arguments with stores to outgoing arguments
4098 of sibling calls. */
4099 set_mem_alias_set (dest, 0);
4101 emit_move_insn (dest, x);
4104 /* Emit and annotate a single push insn. */
4106 static void
4107 emit_single_push_insn (machine_mode mode, rtx x, tree type)
4109 int delta, old_delta = stack_pointer_delta;
4110 rtx_insn *prev = get_last_insn ();
4111 rtx_insn *last;
4113 emit_single_push_insn_1 (mode, x, type);
4115 last = get_last_insn ();
4117 /* Notice the common case where we emitted exactly one insn. */
4118 if (PREV_INSN (last) == prev)
4120 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4121 return;
4124 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4125 gcc_assert (delta == INT_MIN || delta == old_delta);
4127 #endif
4129 /* Generate code to push X onto the stack, assuming it has mode MODE and
4130 type TYPE.
4131 MODE is redundant except when X is a CONST_INT (since they don't
4132 carry mode info).
4133 SIZE is an rtx for the size of data to be copied (in bytes),
4134 needed only if X is BLKmode.
4136 ALIGN (in bits) is maximum alignment we can assume.
4138 If PARTIAL and REG are both nonzero, then copy that many of the first
4139 bytes of X into registers starting with REG, and push the rest of X.
4140 The amount of space pushed is decreased by PARTIAL bytes.
4141 REG must be a hard register in this case.
4142 If REG is zero but PARTIAL is not, take any all others actions for an
4143 argument partially in registers, but do not actually load any
4144 registers.
4146 EXTRA is the amount in bytes of extra space to leave next to this arg.
4147 This is ignored if an argument block has already been allocated.
4149 On a machine that lacks real push insns, ARGS_ADDR is the address of
4150 the bottom of the argument block for this call. We use indexing off there
4151 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4152 argument block has not been preallocated.
4154 ARGS_SO_FAR is the size of args previously pushed for this call.
4156 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4157 for arguments passed in registers. If nonzero, it will be the number
4158 of bytes required. */
4160 void
4161 emit_push_insn (rtx x, machine_mode mode, tree type, rtx size,
4162 unsigned int align, int partial, rtx reg, int extra,
4163 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4164 rtx alignment_pad)
4166 rtx xinner;
4167 enum direction stack_direction
4168 #ifdef STACK_GROWS_DOWNWARD
4169 = downward;
4170 #else
4171 = upward;
4172 #endif
4174 /* Decide where to pad the argument: `downward' for below,
4175 `upward' for above, or `none' for don't pad it.
4176 Default is below for small data on big-endian machines; else above. */
4177 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4179 /* Invert direction if stack is post-decrement.
4180 FIXME: why? */
4181 if (STACK_PUSH_CODE == POST_DEC)
4182 if (where_pad != none)
4183 where_pad = (where_pad == downward ? upward : downward);
4185 xinner = x;
4187 if (mode == BLKmode
4188 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4190 /* Copy a block into the stack, entirely or partially. */
4192 rtx temp;
4193 int used;
4194 int offset;
4195 int skip;
4197 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4198 used = partial - offset;
4200 if (mode != BLKmode)
4202 /* A value is to be stored in an insufficiently aligned
4203 stack slot; copy via a suitably aligned slot if
4204 necessary. */
4205 size = GEN_INT (GET_MODE_SIZE (mode));
4206 if (!MEM_P (xinner))
4208 temp = assign_temp (type, 1, 1);
4209 emit_move_insn (temp, xinner);
4210 xinner = temp;
4214 gcc_assert (size);
4216 /* USED is now the # of bytes we need not copy to the stack
4217 because registers will take care of them. */
4219 if (partial != 0)
4220 xinner = adjust_address (xinner, BLKmode, used);
4222 /* If the partial register-part of the arg counts in its stack size,
4223 skip the part of stack space corresponding to the registers.
4224 Otherwise, start copying to the beginning of the stack space,
4225 by setting SKIP to 0. */
4226 skip = (reg_parm_stack_space == 0) ? 0 : used;
4228 #ifdef PUSH_ROUNDING
4229 /* Do it with several push insns if that doesn't take lots of insns
4230 and if there is no difficulty with push insns that skip bytes
4231 on the stack for alignment purposes. */
4232 if (args_addr == 0
4233 && PUSH_ARGS
4234 && CONST_INT_P (size)
4235 && skip == 0
4236 && MEM_ALIGN (xinner) >= align
4237 && can_move_by_pieces ((unsigned) INTVAL (size) - used, align)
4238 /* Here we avoid the case of a structure whose weak alignment
4239 forces many pushes of a small amount of data,
4240 and such small pushes do rounding that causes trouble. */
4241 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4242 || align >= BIGGEST_ALIGNMENT
4243 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4244 == (align / BITS_PER_UNIT)))
4245 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4247 /* Push padding now if padding above and stack grows down,
4248 or if padding below and stack grows up.
4249 But if space already allocated, this has already been done. */
4250 if (extra && args_addr == 0
4251 && where_pad != none && where_pad != stack_direction)
4252 anti_adjust_stack (GEN_INT (extra));
4254 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4256 else
4257 #endif /* PUSH_ROUNDING */
4259 rtx target;
4261 /* Otherwise make space on the stack and copy the data
4262 to the address of that space. */
4264 /* Deduct words put into registers from the size we must copy. */
4265 if (partial != 0)
4267 if (CONST_INT_P (size))
4268 size = GEN_INT (INTVAL (size) - used);
4269 else
4270 size = expand_binop (GET_MODE (size), sub_optab, size,
4271 gen_int_mode (used, GET_MODE (size)),
4272 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4275 /* Get the address of the stack space.
4276 In this case, we do not deal with EXTRA separately.
4277 A single stack adjust will do. */
4278 if (! args_addr)
4280 temp = push_block (size, extra, where_pad == downward);
4281 extra = 0;
4283 else if (CONST_INT_P (args_so_far))
4284 temp = memory_address (BLKmode,
4285 plus_constant (Pmode, args_addr,
4286 skip + INTVAL (args_so_far)));
4287 else
4288 temp = memory_address (BLKmode,
4289 plus_constant (Pmode,
4290 gen_rtx_PLUS (Pmode,
4291 args_addr,
4292 args_so_far),
4293 skip));
4295 if (!ACCUMULATE_OUTGOING_ARGS)
4297 /* If the source is referenced relative to the stack pointer,
4298 copy it to another register to stabilize it. We do not need
4299 to do this if we know that we won't be changing sp. */
4301 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4302 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4303 temp = copy_to_reg (temp);
4306 target = gen_rtx_MEM (BLKmode, temp);
4308 /* We do *not* set_mem_attributes here, because incoming arguments
4309 may overlap with sibling call outgoing arguments and we cannot
4310 allow reordering of reads from function arguments with stores
4311 to outgoing arguments of sibling calls. We do, however, want
4312 to record the alignment of the stack slot. */
4313 /* ALIGN may well be better aligned than TYPE, e.g. due to
4314 PARM_BOUNDARY. Assume the caller isn't lying. */
4315 set_mem_align (target, align);
4317 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4320 else if (partial > 0)
4322 /* Scalar partly in registers. */
4324 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4325 int i;
4326 int not_stack;
4327 /* # bytes of start of argument
4328 that we must make space for but need not store. */
4329 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4330 int args_offset = INTVAL (args_so_far);
4331 int skip;
4333 /* Push padding now if padding above and stack grows down,
4334 or if padding below and stack grows up.
4335 But if space already allocated, this has already been done. */
4336 if (extra && args_addr == 0
4337 && where_pad != none && where_pad != stack_direction)
4338 anti_adjust_stack (GEN_INT (extra));
4340 /* If we make space by pushing it, we might as well push
4341 the real data. Otherwise, we can leave OFFSET nonzero
4342 and leave the space uninitialized. */
4343 if (args_addr == 0)
4344 offset = 0;
4346 /* Now NOT_STACK gets the number of words that we don't need to
4347 allocate on the stack. Convert OFFSET to words too. */
4348 not_stack = (partial - offset) / UNITS_PER_WORD;
4349 offset /= UNITS_PER_WORD;
4351 /* If the partial register-part of the arg counts in its stack size,
4352 skip the part of stack space corresponding to the registers.
4353 Otherwise, start copying to the beginning of the stack space,
4354 by setting SKIP to 0. */
4355 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4357 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4358 x = validize_mem (force_const_mem (mode, x));
4360 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4361 SUBREGs of such registers are not allowed. */
4362 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4363 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4364 x = copy_to_reg (x);
4366 /* Loop over all the words allocated on the stack for this arg. */
4367 /* We can do it by words, because any scalar bigger than a word
4368 has a size a multiple of a word. */
4369 for (i = size - 1; i >= not_stack; i--)
4370 if (i >= not_stack + offset)
4371 emit_push_insn (operand_subword_force (x, i, mode),
4372 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4373 0, args_addr,
4374 GEN_INT (args_offset + ((i - not_stack + skip)
4375 * UNITS_PER_WORD)),
4376 reg_parm_stack_space, alignment_pad);
4378 else
4380 rtx addr;
4381 rtx dest;
4383 /* Push padding now if padding above and stack grows down,
4384 or if padding below and stack grows up.
4385 But if space already allocated, this has already been done. */
4386 if (extra && args_addr == 0
4387 && where_pad != none && where_pad != stack_direction)
4388 anti_adjust_stack (GEN_INT (extra));
4390 #ifdef PUSH_ROUNDING
4391 if (args_addr == 0 && PUSH_ARGS)
4392 emit_single_push_insn (mode, x, type);
4393 else
4394 #endif
4396 if (CONST_INT_P (args_so_far))
4397 addr
4398 = memory_address (mode,
4399 plus_constant (Pmode, args_addr,
4400 INTVAL (args_so_far)));
4401 else
4402 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4403 args_so_far));
4404 dest = gen_rtx_MEM (mode, addr);
4406 /* We do *not* set_mem_attributes here, because incoming arguments
4407 may overlap with sibling call outgoing arguments and we cannot
4408 allow reordering of reads from function arguments with stores
4409 to outgoing arguments of sibling calls. We do, however, want
4410 to record the alignment of the stack slot. */
4411 /* ALIGN may well be better aligned than TYPE, e.g. due to
4412 PARM_BOUNDARY. Assume the caller isn't lying. */
4413 set_mem_align (dest, align);
4415 emit_move_insn (dest, x);
4419 /* If part should go in registers, copy that part
4420 into the appropriate registers. Do this now, at the end,
4421 since mem-to-mem copies above may do function calls. */
4422 if (partial > 0 && reg != 0)
4424 /* Handle calls that pass values in multiple non-contiguous locations.
4425 The Irix 6 ABI has examples of this. */
4426 if (GET_CODE (reg) == PARALLEL)
4427 emit_group_load (reg, x, type, -1);
4428 else
4430 gcc_assert (partial % UNITS_PER_WORD == 0);
4431 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4435 if (extra && args_addr == 0 && where_pad == stack_direction)
4436 anti_adjust_stack (GEN_INT (extra));
4438 if (alignment_pad && args_addr == 0)
4439 anti_adjust_stack (alignment_pad);
4442 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4443 operations. */
4445 static rtx
4446 get_subtarget (rtx x)
4448 return (optimize
4449 || x == 0
4450 /* Only registers can be subtargets. */
4451 || !REG_P (x)
4452 /* Don't use hard regs to avoid extending their life. */
4453 || REGNO (x) < FIRST_PSEUDO_REGISTER
4454 ? 0 : x);
4457 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4458 FIELD is a bitfield. Returns true if the optimization was successful,
4459 and there's nothing else to do. */
4461 static bool
4462 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4463 unsigned HOST_WIDE_INT bitpos,
4464 unsigned HOST_WIDE_INT bitregion_start,
4465 unsigned HOST_WIDE_INT bitregion_end,
4466 machine_mode mode1, rtx str_rtx,
4467 tree to, tree src, bool reverse)
4469 machine_mode str_mode = GET_MODE (str_rtx);
4470 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4471 tree op0, op1;
4472 rtx value, result;
4473 optab binop;
4474 gimple srcstmt;
4475 enum tree_code code;
4477 if (mode1 != VOIDmode
4478 || bitsize >= BITS_PER_WORD
4479 || str_bitsize > BITS_PER_WORD
4480 || TREE_SIDE_EFFECTS (to)
4481 || TREE_THIS_VOLATILE (to))
4482 return false;
4484 STRIP_NOPS (src);
4485 if (TREE_CODE (src) != SSA_NAME)
4486 return false;
4487 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4488 return false;
4490 srcstmt = get_gimple_for_ssa_name (src);
4491 if (!srcstmt
4492 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4493 return false;
4495 code = gimple_assign_rhs_code (srcstmt);
4497 op0 = gimple_assign_rhs1 (srcstmt);
4499 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4500 to find its initialization. Hopefully the initialization will
4501 be from a bitfield load. */
4502 if (TREE_CODE (op0) == SSA_NAME)
4504 gimple op0stmt = get_gimple_for_ssa_name (op0);
4506 /* We want to eventually have OP0 be the same as TO, which
4507 should be a bitfield. */
4508 if (!op0stmt
4509 || !is_gimple_assign (op0stmt)
4510 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4511 return false;
4512 op0 = gimple_assign_rhs1 (op0stmt);
4515 op1 = gimple_assign_rhs2 (srcstmt);
4517 if (!operand_equal_p (to, op0, 0))
4518 return false;
4520 if (MEM_P (str_rtx))
4522 unsigned HOST_WIDE_INT offset1;
4524 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4525 str_mode = word_mode;
4526 str_mode = get_best_mode (bitsize, bitpos,
4527 bitregion_start, bitregion_end,
4528 MEM_ALIGN (str_rtx), str_mode, 0);
4529 if (str_mode == VOIDmode)
4530 return false;
4531 str_bitsize = GET_MODE_BITSIZE (str_mode);
4533 offset1 = bitpos;
4534 bitpos %= str_bitsize;
4535 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4536 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4538 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4539 return false;
4540 else
4541 gcc_assert (!reverse);
4543 /* If the bit field covers the whole REG/MEM, store_field
4544 will likely generate better code. */
4545 if (bitsize >= str_bitsize)
4546 return false;
4548 /* We can't handle fields split across multiple entities. */
4549 if (bitpos + bitsize > str_bitsize)
4550 return false;
4552 if (BYTES_BIG_ENDIAN)
4553 bitpos = str_bitsize - bitpos - bitsize;
4555 switch (code)
4557 case PLUS_EXPR:
4558 case MINUS_EXPR:
4559 /* For now, just optimize the case of the topmost bitfield
4560 where we don't need to do any masking and also
4561 1 bit bitfields where xor can be used.
4562 We might win by one instruction for the other bitfields
4563 too if insv/extv instructions aren't used, so that
4564 can be added later. */
4565 if ((reverse || bitpos + bitsize != str_bitsize)
4566 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4567 break;
4569 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4570 value = convert_modes (str_mode,
4571 TYPE_MODE (TREE_TYPE (op1)), value,
4572 TYPE_UNSIGNED (TREE_TYPE (op1)));
4574 /* We may be accessing data outside the field, which means
4575 we can alias adjacent data. */
4576 if (MEM_P (str_rtx))
4578 str_rtx = shallow_copy_rtx (str_rtx);
4579 set_mem_alias_set (str_rtx, 0);
4580 set_mem_expr (str_rtx, 0);
4583 if (bitsize == 1 && (reverse || bitpos + bitsize != str_bitsize))
4585 value = expand_and (str_mode, value, const1_rtx, NULL);
4586 binop = xor_optab;
4588 else
4589 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4591 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4592 if (reverse)
4593 value = flip_storage_order (str_mode, value);
4594 result = expand_binop (str_mode, binop, str_rtx,
4595 value, str_rtx, 1, OPTAB_WIDEN);
4596 if (result != str_rtx)
4597 emit_move_insn (str_rtx, result);
4598 return true;
4600 case BIT_IOR_EXPR:
4601 case BIT_XOR_EXPR:
4602 if (TREE_CODE (op1) != INTEGER_CST)
4603 break;
4604 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4605 value = convert_modes (str_mode,
4606 TYPE_MODE (TREE_TYPE (op1)), value,
4607 TYPE_UNSIGNED (TREE_TYPE (op1)));
4609 /* We may be accessing data outside the field, which means
4610 we can alias adjacent data. */
4611 if (MEM_P (str_rtx))
4613 str_rtx = shallow_copy_rtx (str_rtx);
4614 set_mem_alias_set (str_rtx, 0);
4615 set_mem_expr (str_rtx, 0);
4618 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4619 if (bitpos + bitsize != str_bitsize)
4621 rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4622 str_mode);
4623 value = expand_and (str_mode, value, mask, NULL_RTX);
4625 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4626 if (reverse)
4627 value = flip_storage_order (str_mode, value);
4628 result = expand_binop (str_mode, binop, str_rtx,
4629 value, str_rtx, 1, OPTAB_WIDEN);
4630 if (result != str_rtx)
4631 emit_move_insn (str_rtx, result);
4632 return true;
4634 default:
4635 break;
4638 return false;
4641 /* In the C++ memory model, consecutive bit fields in a structure are
4642 considered one memory location.
4644 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4645 returns the bit range of consecutive bits in which this COMPONENT_REF
4646 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4647 and *OFFSET may be adjusted in the process.
4649 If the access does not need to be restricted, 0 is returned in both
4650 *BITSTART and *BITEND. */
4652 static void
4653 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4654 unsigned HOST_WIDE_INT *bitend,
4655 tree exp,
4656 HOST_WIDE_INT *bitpos,
4657 tree *offset)
4659 HOST_WIDE_INT bitoffset;
4660 tree field, repr;
4662 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4664 field = TREE_OPERAND (exp, 1);
4665 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4666 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4667 need to limit the range we can access. */
4668 if (!repr)
4670 *bitstart = *bitend = 0;
4671 return;
4674 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4675 part of a larger bit field, then the representative does not serve any
4676 useful purpose. This can occur in Ada. */
4677 if (handled_component_p (TREE_OPERAND (exp, 0)))
4679 machine_mode rmode;
4680 HOST_WIDE_INT rbitsize, rbitpos;
4681 tree roffset;
4682 int unsignedp, reversep, volatilep = 0;
4683 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4684 &roffset, &rmode, &unsignedp, &reversep,
4685 &volatilep, false);
4686 if ((rbitpos % BITS_PER_UNIT) != 0)
4688 *bitstart = *bitend = 0;
4689 return;
4693 /* Compute the adjustment to bitpos from the offset of the field
4694 relative to the representative. DECL_FIELD_OFFSET of field and
4695 repr are the same by construction if they are not constants,
4696 see finish_bitfield_layout. */
4697 if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
4698 && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
4699 bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
4700 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
4701 else
4702 bitoffset = 0;
4703 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4704 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4706 /* If the adjustment is larger than bitpos, we would have a negative bit
4707 position for the lower bound and this may wreak havoc later. Adjust
4708 offset and bitpos to make the lower bound non-negative in that case. */
4709 if (bitoffset > *bitpos)
4711 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4712 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4714 *bitpos += adjust;
4715 if (*offset == NULL_TREE)
4716 *offset = size_int (-adjust / BITS_PER_UNIT);
4717 else
4718 *offset
4719 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4720 *bitstart = 0;
4722 else
4723 *bitstart = *bitpos - bitoffset;
4725 *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4728 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4729 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4730 DECL_RTL was not set yet, return NORTL. */
4732 static inline bool
4733 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4735 if (TREE_CODE (addr) != ADDR_EXPR)
4736 return false;
4738 tree base = TREE_OPERAND (addr, 0);
4740 if (!DECL_P (base)
4741 || TREE_ADDRESSABLE (base)
4742 || DECL_MODE (base) == BLKmode)
4743 return false;
4745 if (!DECL_RTL_SET_P (base))
4746 return nortl;
4748 return (!MEM_P (DECL_RTL (base)));
4751 /* Returns true if the MEM_REF REF refers to an object that does not
4752 reside in memory and has non-BLKmode. */
4754 static inline bool
4755 mem_ref_refers_to_non_mem_p (tree ref)
4757 tree base = TREE_OPERAND (ref, 0);
4758 return addr_expr_of_non_mem_decl_p_1 (base, false);
4761 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4762 is true, try generating a nontemporal store. */
4764 void
4765 expand_assignment (tree to, tree from, bool nontemporal)
4767 rtx to_rtx = 0;
4768 rtx result;
4769 machine_mode mode;
4770 unsigned int align;
4771 enum insn_code icode;
4773 /* Don't crash if the lhs of the assignment was erroneous. */
4774 if (TREE_CODE (to) == ERROR_MARK)
4776 expand_normal (from);
4777 return;
4780 /* Optimize away no-op moves without side-effects. */
4781 if (operand_equal_p (to, from, 0))
4782 return;
4784 /* Handle misaligned stores. */
4785 mode = TYPE_MODE (TREE_TYPE (to));
4786 if ((TREE_CODE (to) == MEM_REF
4787 || TREE_CODE (to) == TARGET_MEM_REF)
4788 && mode != BLKmode
4789 && !mem_ref_refers_to_non_mem_p (to)
4790 && ((align = get_object_alignment (to))
4791 < GET_MODE_ALIGNMENT (mode))
4792 && (((icode = optab_handler (movmisalign_optab, mode))
4793 != CODE_FOR_nothing)
4794 || SLOW_UNALIGNED_ACCESS (mode, align)))
4796 rtx reg, mem;
4798 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4799 reg = force_not_mem (reg);
4800 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4801 if (TREE_CODE (to) == MEM_REF && REF_REVERSE_STORAGE_ORDER (to))
4802 reg = flip_storage_order (mode, reg);
4804 if (icode != CODE_FOR_nothing)
4806 struct expand_operand ops[2];
4808 create_fixed_operand (&ops[0], mem);
4809 create_input_operand (&ops[1], reg, mode);
4810 /* The movmisalign<mode> pattern cannot fail, else the assignment
4811 would silently be omitted. */
4812 expand_insn (icode, 2, ops);
4814 else
4815 store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg,
4816 false);
4817 return;
4820 /* Assignment of a structure component needs special treatment
4821 if the structure component's rtx is not simply a MEM.
4822 Assignment of an array element at a constant index, and assignment of
4823 an array element in an unaligned packed structure field, has the same
4824 problem. Same for (partially) storing into a non-memory object. */
4825 if (handled_component_p (to)
4826 || (TREE_CODE (to) == MEM_REF
4827 && (REF_REVERSE_STORAGE_ORDER (to)
4828 || mem_ref_refers_to_non_mem_p (to)))
4829 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4831 machine_mode mode1;
4832 HOST_WIDE_INT bitsize, bitpos;
4833 unsigned HOST_WIDE_INT bitregion_start = 0;
4834 unsigned HOST_WIDE_INT bitregion_end = 0;
4835 tree offset;
4836 int unsignedp, reversep, volatilep = 0;
4837 tree tem;
4839 push_temp_slots ();
4840 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4841 &unsignedp, &reversep, &volatilep, true);
4843 /* Make sure bitpos is not negative, it can wreak havoc later. */
4844 if (bitpos < 0)
4846 gcc_assert (offset == NULL_TREE);
4847 offset = size_int (bitpos >> (BITS_PER_UNIT == 8
4848 ? 3 : exact_log2 (BITS_PER_UNIT)));
4849 bitpos &= BITS_PER_UNIT - 1;
4852 if (TREE_CODE (to) == COMPONENT_REF
4853 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4854 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4855 /* The C++ memory model naturally applies to byte-aligned fields.
4856 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4857 BITSIZE are not byte-aligned, there is no need to limit the range
4858 we can access. This can occur with packed structures in Ada. */
4859 else if (bitsize > 0
4860 && bitsize % BITS_PER_UNIT == 0
4861 && bitpos % BITS_PER_UNIT == 0)
4863 bitregion_start = bitpos;
4864 bitregion_end = bitpos + bitsize - 1;
4867 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4869 /* If the field has a mode, we want to access it in the
4870 field's mode, not the computed mode.
4871 If a MEM has VOIDmode (external with incomplete type),
4872 use BLKmode for it instead. */
4873 if (MEM_P (to_rtx))
4875 if (mode1 != VOIDmode)
4876 to_rtx = adjust_address (to_rtx, mode1, 0);
4877 else if (GET_MODE (to_rtx) == VOIDmode)
4878 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4881 if (offset != 0)
4883 machine_mode address_mode;
4884 rtx offset_rtx;
4886 if (!MEM_P (to_rtx))
4888 /* We can get constant negative offsets into arrays with broken
4889 user code. Translate this to a trap instead of ICEing. */
4890 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4891 expand_builtin_trap ();
4892 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4895 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4896 address_mode = get_address_mode (to_rtx);
4897 if (GET_MODE (offset_rtx) != address_mode)
4899 /* We cannot be sure that the RTL in offset_rtx is valid outside
4900 of a memory address context, so force it into a register
4901 before attempting to convert it to the desired mode. */
4902 offset_rtx = force_operand (offset_rtx, NULL_RTX);
4903 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4906 /* If we have an expression in OFFSET_RTX and a non-zero
4907 byte offset in BITPOS, adding the byte offset before the
4908 OFFSET_RTX results in better intermediate code, which makes
4909 later rtl optimization passes perform better.
4911 We prefer intermediate code like this:
4913 r124:DI=r123:DI+0x18
4914 [r124:DI]=r121:DI
4916 ... instead of ...
4918 r124:DI=r123:DI+0x10
4919 [r124:DI+0x8]=r121:DI
4921 This is only done for aligned data values, as these can
4922 be expected to result in single move instructions. */
4923 if (mode1 != VOIDmode
4924 && bitpos != 0
4925 && bitsize > 0
4926 && (bitpos % bitsize) == 0
4927 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4928 && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
4930 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4931 bitregion_start = 0;
4932 if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos)
4933 bitregion_end -= bitpos;
4934 bitpos = 0;
4937 to_rtx = offset_address (to_rtx, offset_rtx,
4938 highest_pow2_factor_for_target (to,
4939 offset));
4942 /* No action is needed if the target is not a memory and the field
4943 lies completely outside that target. This can occur if the source
4944 code contains an out-of-bounds access to a small array. */
4945 if (!MEM_P (to_rtx)
4946 && GET_MODE (to_rtx) != BLKmode
4947 && (unsigned HOST_WIDE_INT) bitpos
4948 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4950 expand_normal (from);
4951 result = NULL;
4953 /* Handle expand_expr of a complex value returning a CONCAT. */
4954 else if (GET_CODE (to_rtx) == CONCAT)
4956 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4957 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4958 && bitpos == 0
4959 && bitsize == mode_bitsize)
4960 result = store_expr (from, to_rtx, false, nontemporal, reversep);
4961 else if (bitsize == mode_bitsize / 2
4962 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4963 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4964 nontemporal, reversep);
4965 else if (bitpos + bitsize <= mode_bitsize / 2)
4966 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4967 bitregion_start, bitregion_end,
4968 mode1, from, get_alias_set (to),
4969 nontemporal, reversep);
4970 else if (bitpos >= mode_bitsize / 2)
4971 result = store_field (XEXP (to_rtx, 1), bitsize,
4972 bitpos - mode_bitsize / 2,
4973 bitregion_start, bitregion_end,
4974 mode1, from, get_alias_set (to),
4975 nontemporal, reversep);
4976 else if (bitpos == 0 && bitsize == mode_bitsize)
4978 rtx from_rtx;
4979 result = expand_normal (from);
4980 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4981 TYPE_MODE (TREE_TYPE (from)), 0);
4982 emit_move_insn (XEXP (to_rtx, 0),
4983 read_complex_part (from_rtx, false));
4984 emit_move_insn (XEXP (to_rtx, 1),
4985 read_complex_part (from_rtx, true));
4987 else
4989 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4990 GET_MODE_SIZE (GET_MODE (to_rtx)));
4991 write_complex_part (temp, XEXP (to_rtx, 0), false);
4992 write_complex_part (temp, XEXP (to_rtx, 1), true);
4993 result = store_field (temp, bitsize, bitpos,
4994 bitregion_start, bitregion_end,
4995 mode1, from, get_alias_set (to),
4996 nontemporal, reversep);
4997 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4998 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
5001 else
5003 if (MEM_P (to_rtx))
5005 /* If the field is at offset zero, we could have been given the
5006 DECL_RTX of the parent struct. Don't munge it. */
5007 to_rtx = shallow_copy_rtx (to_rtx);
5008 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
5009 if (volatilep)
5010 MEM_VOLATILE_P (to_rtx) = 1;
5013 if (optimize_bitfield_assignment_op (bitsize, bitpos,
5014 bitregion_start, bitregion_end,
5015 mode1, to_rtx, to, from,
5016 reversep))
5017 result = NULL;
5018 else
5019 result = store_field (to_rtx, bitsize, bitpos,
5020 bitregion_start, bitregion_end,
5021 mode1, from, get_alias_set (to),
5022 nontemporal, reversep);
5025 if (result)
5026 preserve_temp_slots (result);
5027 pop_temp_slots ();
5028 return;
5031 /* If the rhs is a function call and its value is not an aggregate,
5032 call the function before we start to compute the lhs.
5033 This is needed for correct code for cases such as
5034 val = setjmp (buf) on machines where reference to val
5035 requires loading up part of an address in a separate insn.
5037 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5038 since it might be a promoted variable where the zero- or sign- extension
5039 needs to be done. Handling this in the normal way is safe because no
5040 computation is done before the call. The same is true for SSA names. */
5041 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
5042 && COMPLETE_TYPE_P (TREE_TYPE (from))
5043 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
5044 && ! (((TREE_CODE (to) == VAR_DECL
5045 || TREE_CODE (to) == PARM_DECL
5046 || TREE_CODE (to) == RESULT_DECL)
5047 && REG_P (DECL_RTL (to)))
5048 || TREE_CODE (to) == SSA_NAME))
5050 rtx value;
5051 rtx bounds;
5053 push_temp_slots ();
5054 value = expand_normal (from);
5056 /* Split value and bounds to store them separately. */
5057 chkp_split_slot (value, &value, &bounds);
5059 if (to_rtx == 0)
5060 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5062 /* Handle calls that return values in multiple non-contiguous locations.
5063 The Irix 6 ABI has examples of this. */
5064 if (GET_CODE (to_rtx) == PARALLEL)
5066 if (GET_CODE (value) == PARALLEL)
5067 emit_group_move (to_rtx, value);
5068 else
5069 emit_group_load (to_rtx, value, TREE_TYPE (from),
5070 int_size_in_bytes (TREE_TYPE (from)));
5072 else if (GET_CODE (value) == PARALLEL)
5073 emit_group_store (to_rtx, value, TREE_TYPE (from),
5074 int_size_in_bytes (TREE_TYPE (from)));
5075 else if (GET_MODE (to_rtx) == BLKmode)
5077 /* Handle calls that return BLKmode values in registers. */
5078 if (REG_P (value))
5079 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5080 else
5081 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5083 else
5085 if (POINTER_TYPE_P (TREE_TYPE (to)))
5086 value = convert_memory_address_addr_space
5087 (GET_MODE (to_rtx), value,
5088 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5090 emit_move_insn (to_rtx, value);
5093 /* Store bounds if required. */
5094 if (bounds
5095 && (BOUNDED_P (to) || chkp_type_has_pointer (TREE_TYPE (to))))
5097 gcc_assert (MEM_P (to_rtx));
5098 chkp_emit_bounds_store (bounds, value, to_rtx);
5101 preserve_temp_slots (to_rtx);
5102 pop_temp_slots ();
5103 return;
5106 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
5107 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5109 /* Don't move directly into a return register. */
5110 if (TREE_CODE (to) == RESULT_DECL
5111 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5113 rtx temp;
5115 push_temp_slots ();
5117 /* If the source is itself a return value, it still is in a pseudo at
5118 this point so we can move it back to the return register directly. */
5119 if (REG_P (to_rtx)
5120 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5121 && TREE_CODE (from) != CALL_EXPR)
5122 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5123 else
5124 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5126 /* Handle calls that return values in multiple non-contiguous locations.
5127 The Irix 6 ABI has examples of this. */
5128 if (GET_CODE (to_rtx) == PARALLEL)
5130 if (GET_CODE (temp) == PARALLEL)
5131 emit_group_move (to_rtx, temp);
5132 else
5133 emit_group_load (to_rtx, temp, TREE_TYPE (from),
5134 int_size_in_bytes (TREE_TYPE (from)));
5136 else if (temp)
5137 emit_move_insn (to_rtx, temp);
5139 preserve_temp_slots (to_rtx);
5140 pop_temp_slots ();
5141 return;
5144 /* In case we are returning the contents of an object which overlaps
5145 the place the value is being stored, use a safe function when copying
5146 a value through a pointer into a structure value return block. */
5147 if (TREE_CODE (to) == RESULT_DECL
5148 && TREE_CODE (from) == INDIRECT_REF
5149 && ADDR_SPACE_GENERIC_P
5150 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5151 && refs_may_alias_p (to, from)
5152 && cfun->returns_struct
5153 && !cfun->returns_pcc_struct)
5155 rtx from_rtx, size;
5157 push_temp_slots ();
5158 size = expr_size (from);
5159 from_rtx = expand_normal (from);
5161 emit_library_call (memmove_libfunc, LCT_NORMAL,
5162 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
5163 XEXP (from_rtx, 0), Pmode,
5164 convert_to_mode (TYPE_MODE (sizetype),
5165 size, TYPE_UNSIGNED (sizetype)),
5166 TYPE_MODE (sizetype));
5168 preserve_temp_slots (to_rtx);
5169 pop_temp_slots ();
5170 return;
5173 /* Compute FROM and store the value in the rtx we got. */
5175 push_temp_slots ();
5176 result = store_expr_with_bounds (from, to_rtx, 0, nontemporal, false, to);
5177 preserve_temp_slots (result);
5178 pop_temp_slots ();
5179 return;
5182 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5183 succeeded, false otherwise. */
5185 bool
5186 emit_storent_insn (rtx to, rtx from)
5188 struct expand_operand ops[2];
5189 machine_mode mode = GET_MODE (to);
5190 enum insn_code code = optab_handler (storent_optab, mode);
5192 if (code == CODE_FOR_nothing)
5193 return false;
5195 create_fixed_operand (&ops[0], to);
5196 create_input_operand (&ops[1], from, mode);
5197 return maybe_expand_insn (code, 2, ops);
5200 /* Generate code for computing expression EXP,
5201 and storing the value into TARGET.
5203 If the mode is BLKmode then we may return TARGET itself.
5204 It turns out that in BLKmode it doesn't cause a problem.
5205 because C has no operators that could combine two different
5206 assignments into the same BLKmode object with different values
5207 with no sequence point. Will other languages need this to
5208 be more thorough?
5210 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5211 stack, and block moves may need to be treated specially.
5213 If NONTEMPORAL is true, try using a nontemporal store instruction.
5215 If REVERSE is true, the store is to be done in reverse order.
5217 If BTARGET is not NULL then computed bounds of EXP are
5218 associated with BTARGET. */
5221 store_expr_with_bounds (tree exp, rtx target, int call_param_p,
5222 bool nontemporal, bool reverse, tree btarget)
5224 rtx temp;
5225 rtx alt_rtl = NULL_RTX;
5226 location_t loc = curr_insn_location ();
5228 if (VOID_TYPE_P (TREE_TYPE (exp)))
5230 /* C++ can generate ?: expressions with a throw expression in one
5231 branch and an rvalue in the other. Here, we resolve attempts to
5232 store the throw expression's nonexistent result. */
5233 gcc_assert (!call_param_p);
5234 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5235 return NULL_RTX;
5237 if (TREE_CODE (exp) == COMPOUND_EXPR)
5239 /* Perform first part of compound expression, then assign from second
5240 part. */
5241 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5242 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5243 return store_expr_with_bounds (TREE_OPERAND (exp, 1), target,
5244 call_param_p, nontemporal, reverse,
5245 btarget);
5247 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5249 /* For conditional expression, get safe form of the target. Then
5250 test the condition, doing the appropriate assignment on either
5251 side. This avoids the creation of unnecessary temporaries.
5252 For non-BLKmode, it is more efficient not to do this. */
5254 rtx_code_label *lab1 = gen_label_rtx (), *lab2 = gen_label_rtx ();
5256 do_pending_stack_adjust ();
5257 NO_DEFER_POP;
5258 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5259 store_expr_with_bounds (TREE_OPERAND (exp, 1), target, call_param_p,
5260 nontemporal, reverse, btarget);
5261 emit_jump_insn (gen_jump (lab2));
5262 emit_barrier ();
5263 emit_label (lab1);
5264 store_expr_with_bounds (TREE_OPERAND (exp, 2), target, call_param_p,
5265 nontemporal, reverse, btarget);
5266 emit_label (lab2);
5267 OK_DEFER_POP;
5269 return NULL_RTX;
5271 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5272 /* If this is a scalar in a register that is stored in a wider mode
5273 than the declared mode, compute the result into its declared mode
5274 and then convert to the wider mode. Our value is the computed
5275 expression. */
5277 rtx inner_target = 0;
5279 /* We can do the conversion inside EXP, which will often result
5280 in some optimizations. Do the conversion in two steps: first
5281 change the signedness, if needed, then the extend. But don't
5282 do this if the type of EXP is a subtype of something else
5283 since then the conversion might involve more than just
5284 converting modes. */
5285 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5286 && TREE_TYPE (TREE_TYPE (exp)) == 0
5287 && GET_MODE_PRECISION (GET_MODE (target))
5288 == TYPE_PRECISION (TREE_TYPE (exp)))
5290 if (!SUBREG_CHECK_PROMOTED_SIGN (target,
5291 TYPE_UNSIGNED (TREE_TYPE (exp))))
5293 /* Some types, e.g. Fortran's logical*4, won't have a signed
5294 version, so use the mode instead. */
5295 tree ntype
5296 = (signed_or_unsigned_type_for
5297 (SUBREG_PROMOTED_SIGN (target), TREE_TYPE (exp)));
5298 if (ntype == NULL)
5299 ntype = lang_hooks.types.type_for_mode
5300 (TYPE_MODE (TREE_TYPE (exp)),
5301 SUBREG_PROMOTED_SIGN (target));
5303 exp = fold_convert_loc (loc, ntype, exp);
5306 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5307 (GET_MODE (SUBREG_REG (target)),
5308 SUBREG_PROMOTED_SIGN (target)),
5309 exp);
5311 inner_target = SUBREG_REG (target);
5314 temp = expand_expr (exp, inner_target, VOIDmode,
5315 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5317 /* Handle bounds returned by call. */
5318 if (TREE_CODE (exp) == CALL_EXPR)
5320 rtx bounds;
5321 chkp_split_slot (temp, &temp, &bounds);
5322 if (bounds && btarget)
5324 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5325 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5326 chkp_set_rtl_bounds (btarget, tmp);
5330 /* If TEMP is a VOIDmode constant, use convert_modes to make
5331 sure that we properly convert it. */
5332 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5334 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5335 temp, SUBREG_PROMOTED_SIGN (target));
5336 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5337 GET_MODE (target), temp,
5338 SUBREG_PROMOTED_SIGN (target));
5341 convert_move (SUBREG_REG (target), temp,
5342 SUBREG_PROMOTED_SIGN (target));
5344 return NULL_RTX;
5346 else if ((TREE_CODE (exp) == STRING_CST
5347 || (TREE_CODE (exp) == MEM_REF
5348 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5349 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5350 == STRING_CST
5351 && integer_zerop (TREE_OPERAND (exp, 1))))
5352 && !nontemporal && !call_param_p
5353 && MEM_P (target))
5355 /* Optimize initialization of an array with a STRING_CST. */
5356 HOST_WIDE_INT exp_len, str_copy_len;
5357 rtx dest_mem;
5358 tree str = TREE_CODE (exp) == STRING_CST
5359 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5361 exp_len = int_expr_size (exp);
5362 if (exp_len <= 0)
5363 goto normal_expr;
5365 if (TREE_STRING_LENGTH (str) <= 0)
5366 goto normal_expr;
5368 str_copy_len = strlen (TREE_STRING_POINTER (str));
5369 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5370 goto normal_expr;
5372 str_copy_len = TREE_STRING_LENGTH (str);
5373 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5374 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5376 str_copy_len += STORE_MAX_PIECES - 1;
5377 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5379 str_copy_len = MIN (str_copy_len, exp_len);
5380 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5381 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5382 MEM_ALIGN (target), false))
5383 goto normal_expr;
5385 dest_mem = target;
5387 dest_mem = store_by_pieces (dest_mem,
5388 str_copy_len, builtin_strncpy_read_str,
5389 CONST_CAST (char *,
5390 TREE_STRING_POINTER (str)),
5391 MEM_ALIGN (target), false,
5392 exp_len > str_copy_len ? 1 : 0);
5393 if (exp_len > str_copy_len)
5394 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5395 GEN_INT (exp_len - str_copy_len),
5396 BLOCK_OP_NORMAL);
5397 return NULL_RTX;
5399 else
5401 rtx tmp_target;
5403 normal_expr:
5404 /* If we want to use a nontemporal or a reverse order store, force the
5405 value into a register first. */
5406 tmp_target = nontemporal || reverse ? NULL_RTX : target;
5407 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5408 (call_param_p
5409 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5410 &alt_rtl, false);
5412 /* Handle bounds returned by call. */
5413 if (TREE_CODE (exp) == CALL_EXPR)
5415 rtx bounds;
5416 chkp_split_slot (temp, &temp, &bounds);
5417 if (bounds && btarget)
5419 gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5420 rtx tmp = targetm.calls.load_returned_bounds (bounds);
5421 chkp_set_rtl_bounds (btarget, tmp);
5426 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5427 the same as that of TARGET, adjust the constant. This is needed, for
5428 example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5429 only a word-sized value. */
5430 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5431 && TREE_CODE (exp) != ERROR_MARK
5432 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5433 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5434 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5436 /* If value was not generated in the target, store it there.
5437 Convert the value to TARGET's type first if necessary and emit the
5438 pending incrementations that have been queued when expanding EXP.
5439 Note that we cannot emit the whole queue blindly because this will
5440 effectively disable the POST_INC optimization later.
5442 If TEMP and TARGET compare equal according to rtx_equal_p, but
5443 one or both of them are volatile memory refs, we have to distinguish
5444 two cases:
5445 - expand_expr has used TARGET. In this case, we must not generate
5446 another copy. This can be detected by TARGET being equal according
5447 to == .
5448 - expand_expr has not used TARGET - that means that the source just
5449 happens to have the same RTX form. Since temp will have been created
5450 by expand_expr, it will compare unequal according to == .
5451 We must generate a copy in this case, to reach the correct number
5452 of volatile memory references. */
5454 if ((! rtx_equal_p (temp, target)
5455 || (temp != target && (side_effects_p (temp)
5456 || side_effects_p (target))))
5457 && TREE_CODE (exp) != ERROR_MARK
5458 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5459 but TARGET is not valid memory reference, TEMP will differ
5460 from TARGET although it is really the same location. */
5461 && !(alt_rtl
5462 && rtx_equal_p (alt_rtl, target)
5463 && !side_effects_p (alt_rtl)
5464 && !side_effects_p (target))
5465 /* If there's nothing to copy, don't bother. Don't call
5466 expr_size unless necessary, because some front-ends (C++)
5467 expr_size-hook must not be given objects that are not
5468 supposed to be bit-copied or bit-initialized. */
5469 && expr_size (exp) != const0_rtx)
5471 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5473 if (GET_MODE (target) == BLKmode)
5475 /* Handle calls that return BLKmode values in registers. */
5476 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5477 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5478 else
5479 store_bit_field (target,
5480 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5481 0, 0, 0, GET_MODE (temp), temp, reverse);
5483 else
5484 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5487 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5489 /* Handle copying a string constant into an array. The string
5490 constant may be shorter than the array. So copy just the string's
5491 actual length, and clear the rest. First get the size of the data
5492 type of the string, which is actually the size of the target. */
5493 rtx size = expr_size (exp);
5495 if (CONST_INT_P (size)
5496 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5497 emit_block_move (target, temp, size,
5498 (call_param_p
5499 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5500 else
5502 machine_mode pointer_mode
5503 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5504 machine_mode address_mode = get_address_mode (target);
5506 /* Compute the size of the data to copy from the string. */
5507 tree copy_size
5508 = size_binop_loc (loc, MIN_EXPR,
5509 make_tree (sizetype, size),
5510 size_int (TREE_STRING_LENGTH (exp)));
5511 rtx copy_size_rtx
5512 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5513 (call_param_p
5514 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5515 rtx_code_label *label = 0;
5517 /* Copy that much. */
5518 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5519 TYPE_UNSIGNED (sizetype));
5520 emit_block_move (target, temp, copy_size_rtx,
5521 (call_param_p
5522 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5524 /* Figure out how much is left in TARGET that we have to clear.
5525 Do all calculations in pointer_mode. */
5526 if (CONST_INT_P (copy_size_rtx))
5528 size = plus_constant (address_mode, size,
5529 -INTVAL (copy_size_rtx));
5530 target = adjust_address (target, BLKmode,
5531 INTVAL (copy_size_rtx));
5533 else
5535 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5536 copy_size_rtx, NULL_RTX, 0,
5537 OPTAB_LIB_WIDEN);
5539 if (GET_MODE (copy_size_rtx) != address_mode)
5540 copy_size_rtx = convert_to_mode (address_mode,
5541 copy_size_rtx,
5542 TYPE_UNSIGNED (sizetype));
5544 target = offset_address (target, copy_size_rtx,
5545 highest_pow2_factor (copy_size));
5546 label = gen_label_rtx ();
5547 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5548 GET_MODE (size), 0, label);
5551 if (size != const0_rtx)
5552 clear_storage (target, size, BLOCK_OP_NORMAL);
5554 if (label)
5555 emit_label (label);
5558 /* Handle calls that return values in multiple non-contiguous locations.
5559 The Irix 6 ABI has examples of this. */
5560 else if (GET_CODE (target) == PARALLEL)
5562 if (GET_CODE (temp) == PARALLEL)
5563 emit_group_move (target, temp);
5564 else
5565 emit_group_load (target, temp, TREE_TYPE (exp),
5566 int_size_in_bytes (TREE_TYPE (exp)));
5568 else if (GET_CODE (temp) == PARALLEL)
5569 emit_group_store (target, temp, TREE_TYPE (exp),
5570 int_size_in_bytes (TREE_TYPE (exp)));
5571 else if (GET_MODE (temp) == BLKmode)
5572 emit_block_move (target, temp, expr_size (exp),
5573 (call_param_p
5574 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5575 /* If we emit a nontemporal store, there is nothing else to do. */
5576 else if (nontemporal && emit_storent_insn (target, temp))
5578 else
5580 if (reverse)
5581 temp = flip_storage_order (GET_MODE (target), temp);
5582 temp = force_operand (temp, target);
5583 if (temp != target)
5584 emit_move_insn (target, temp);
5588 return NULL_RTX;
5591 /* Same as store_expr_with_bounds but ignoring bounds of EXP. */
5593 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal,
5594 bool reverse)
5596 return store_expr_with_bounds (exp, target, call_param_p, nontemporal,
5597 reverse, NULL);
5600 /* Return true if field F of structure TYPE is a flexible array. */
5602 static bool
5603 flexible_array_member_p (const_tree f, const_tree type)
5605 const_tree tf;
5607 tf = TREE_TYPE (f);
5608 return (DECL_CHAIN (f) == NULL
5609 && TREE_CODE (tf) == ARRAY_TYPE
5610 && TYPE_DOMAIN (tf)
5611 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5612 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5613 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5614 && int_size_in_bytes (type) >= 0);
5617 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5618 must have in order for it to completely initialize a value of type TYPE.
5619 Return -1 if the number isn't known.
5621 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5623 static HOST_WIDE_INT
5624 count_type_elements (const_tree type, bool for_ctor_p)
5626 switch (TREE_CODE (type))
5628 case ARRAY_TYPE:
5630 tree nelts;
5632 nelts = array_type_nelts (type);
5633 if (nelts && tree_fits_uhwi_p (nelts))
5635 unsigned HOST_WIDE_INT n;
5637 n = tree_to_uhwi (nelts) + 1;
5638 if (n == 0 || for_ctor_p)
5639 return n;
5640 else
5641 return n * count_type_elements (TREE_TYPE (type), false);
5643 return for_ctor_p ? -1 : 1;
5646 case RECORD_TYPE:
5648 unsigned HOST_WIDE_INT n;
5649 tree f;
5651 n = 0;
5652 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5653 if (TREE_CODE (f) == FIELD_DECL)
5655 if (!for_ctor_p)
5656 n += count_type_elements (TREE_TYPE (f), false);
5657 else if (!flexible_array_member_p (f, type))
5658 /* Don't count flexible arrays, which are not supposed
5659 to be initialized. */
5660 n += 1;
5663 return n;
5666 case UNION_TYPE:
5667 case QUAL_UNION_TYPE:
5669 tree f;
5670 HOST_WIDE_INT n, m;
5672 gcc_assert (!for_ctor_p);
5673 /* Estimate the number of scalars in each field and pick the
5674 maximum. Other estimates would do instead; the idea is simply
5675 to make sure that the estimate is not sensitive to the ordering
5676 of the fields. */
5677 n = 1;
5678 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5679 if (TREE_CODE (f) == FIELD_DECL)
5681 m = count_type_elements (TREE_TYPE (f), false);
5682 /* If the field doesn't span the whole union, add an extra
5683 scalar for the rest. */
5684 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5685 TYPE_SIZE (type)) != 1)
5686 m++;
5687 if (n < m)
5688 n = m;
5690 return n;
5693 case COMPLEX_TYPE:
5694 return 2;
5696 case VECTOR_TYPE:
5697 return TYPE_VECTOR_SUBPARTS (type);
5699 case INTEGER_TYPE:
5700 case REAL_TYPE:
5701 case FIXED_POINT_TYPE:
5702 case ENUMERAL_TYPE:
5703 case BOOLEAN_TYPE:
5704 case POINTER_TYPE:
5705 case OFFSET_TYPE:
5706 case REFERENCE_TYPE:
5707 case NULLPTR_TYPE:
5708 return 1;
5710 case ERROR_MARK:
5711 return 0;
5713 case VOID_TYPE:
5714 case METHOD_TYPE:
5715 case FUNCTION_TYPE:
5716 case LANG_TYPE:
5717 default:
5718 gcc_unreachable ();
5722 /* Helper for categorize_ctor_elements. Identical interface. */
5724 static bool
5725 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5726 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5728 unsigned HOST_WIDE_INT idx;
5729 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5730 tree value, purpose, elt_type;
5732 /* Whether CTOR is a valid constant initializer, in accordance with what
5733 initializer_constant_valid_p does. If inferred from the constructor
5734 elements, true until proven otherwise. */
5735 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5736 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5738 nz_elts = 0;
5739 init_elts = 0;
5740 num_fields = 0;
5741 elt_type = NULL_TREE;
5743 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5745 HOST_WIDE_INT mult = 1;
5747 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5749 tree lo_index = TREE_OPERAND (purpose, 0);
5750 tree hi_index = TREE_OPERAND (purpose, 1);
5752 if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
5753 mult = (tree_to_uhwi (hi_index)
5754 - tree_to_uhwi (lo_index) + 1);
5756 num_fields += mult;
5757 elt_type = TREE_TYPE (value);
5759 switch (TREE_CODE (value))
5761 case CONSTRUCTOR:
5763 HOST_WIDE_INT nz = 0, ic = 0;
5765 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5766 p_complete);
5768 nz_elts += mult * nz;
5769 init_elts += mult * ic;
5771 if (const_from_elts_p && const_p)
5772 const_p = const_elt_p;
5774 break;
5776 case INTEGER_CST:
5777 case REAL_CST:
5778 case FIXED_CST:
5779 if (!initializer_zerop (value))
5780 nz_elts += mult;
5781 init_elts += mult;
5782 break;
5784 case STRING_CST:
5785 nz_elts += mult * TREE_STRING_LENGTH (value);
5786 init_elts += mult * TREE_STRING_LENGTH (value);
5787 break;
5789 case COMPLEX_CST:
5790 if (!initializer_zerop (TREE_REALPART (value)))
5791 nz_elts += mult;
5792 if (!initializer_zerop (TREE_IMAGPART (value)))
5793 nz_elts += mult;
5794 init_elts += mult;
5795 break;
5797 case VECTOR_CST:
5799 unsigned i;
5800 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5802 tree v = VECTOR_CST_ELT (value, i);
5803 if (!initializer_zerop (v))
5804 nz_elts += mult;
5805 init_elts += mult;
5808 break;
5810 default:
5812 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5813 nz_elts += mult * tc;
5814 init_elts += mult * tc;
5816 if (const_from_elts_p && const_p)
5817 const_p = initializer_constant_valid_p (value, elt_type)
5818 != NULL_TREE;
5820 break;
5824 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5825 num_fields, elt_type))
5826 *p_complete = false;
5828 *p_nz_elts += nz_elts;
5829 *p_init_elts += init_elts;
5831 return const_p;
5834 /* Examine CTOR to discover:
5835 * how many scalar fields are set to nonzero values,
5836 and place it in *P_NZ_ELTS;
5837 * how many scalar fields in total are in CTOR,
5838 and place it in *P_ELT_COUNT.
5839 * whether the constructor is complete -- in the sense that every
5840 meaningful byte is explicitly given a value --
5841 and place it in *P_COMPLETE.
5843 Return whether or not CTOR is a valid static constant initializer, the same
5844 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5846 bool
5847 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5848 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5850 *p_nz_elts = 0;
5851 *p_init_elts = 0;
5852 *p_complete = true;
5854 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5857 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5858 of which had type LAST_TYPE. Each element was itself a complete
5859 initializer, in the sense that every meaningful byte was explicitly
5860 given a value. Return true if the same is true for the constructor
5861 as a whole. */
5863 bool
5864 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5865 const_tree last_type)
5867 if (TREE_CODE (type) == UNION_TYPE
5868 || TREE_CODE (type) == QUAL_UNION_TYPE)
5870 if (num_elts == 0)
5871 return false;
5873 gcc_assert (num_elts == 1 && last_type);
5875 /* ??? We could look at each element of the union, and find the
5876 largest element. Which would avoid comparing the size of the
5877 initialized element against any tail padding in the union.
5878 Doesn't seem worth the effort... */
5879 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5882 return count_type_elements (type, true) == num_elts;
5885 /* Return 1 if EXP contains mostly (3/4) zeros. */
5887 static int
5888 mostly_zeros_p (const_tree exp)
5890 if (TREE_CODE (exp) == CONSTRUCTOR)
5892 HOST_WIDE_INT nz_elts, init_elts;
5893 bool complete_p;
5895 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5896 return !complete_p || nz_elts < init_elts / 4;
5899 return initializer_zerop (exp);
5902 /* Return 1 if EXP contains all zeros. */
5904 static int
5905 all_zeros_p (const_tree exp)
5907 if (TREE_CODE (exp) == CONSTRUCTOR)
5909 HOST_WIDE_INT nz_elts, init_elts;
5910 bool complete_p;
5912 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5913 return nz_elts == 0;
5916 return initializer_zerop (exp);
5919 /* Helper function for store_constructor.
5920 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5921 CLEARED is as for store_constructor.
5922 ALIAS_SET is the alias set to use for any stores.
5923 If REVERSE is true, the store is to be done in reverse order.
5925 This provides a recursive shortcut back to store_constructor when it isn't
5926 necessary to go through store_field. This is so that we can pass through
5927 the cleared field to let store_constructor know that we may not have to
5928 clear a substructure if the outer structure has already been cleared. */
5930 static void
5931 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5932 HOST_WIDE_INT bitpos, machine_mode mode,
5933 tree exp, int cleared,
5934 alias_set_type alias_set, bool reverse)
5936 if (TREE_CODE (exp) == CONSTRUCTOR
5937 /* We can only call store_constructor recursively if the size and
5938 bit position are on a byte boundary. */
5939 && bitpos % BITS_PER_UNIT == 0
5940 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5941 /* If we have a nonzero bitpos for a register target, then we just
5942 let store_field do the bitfield handling. This is unlikely to
5943 generate unnecessary clear instructions anyways. */
5944 && (bitpos == 0 || MEM_P (target)))
5946 if (MEM_P (target))
5947 target
5948 = adjust_address (target,
5949 GET_MODE (target) == BLKmode
5950 || 0 != (bitpos
5951 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5952 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5955 /* Update the alias set, if required. */
5956 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5957 && MEM_ALIAS_SET (target) != 0)
5959 target = copy_rtx (target);
5960 set_mem_alias_set (target, alias_set);
5963 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT,
5964 reverse);
5966 else
5967 store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false,
5968 reverse);
5972 /* Returns the number of FIELD_DECLs in TYPE. */
5974 static int
5975 fields_length (const_tree type)
5977 tree t = TYPE_FIELDS (type);
5978 int count = 0;
5980 for (; t; t = DECL_CHAIN (t))
5981 if (TREE_CODE (t) == FIELD_DECL)
5982 ++count;
5984 return count;
5988 /* Store the value of constructor EXP into the rtx TARGET.
5989 TARGET is either a REG or a MEM; we know it cannot conflict, since
5990 safe_from_p has been called.
5991 CLEARED is true if TARGET is known to have been zero'd.
5992 SIZE is the number of bytes of TARGET we are allowed to modify: this
5993 may not be the same as the size of EXP if we are assigning to a field
5994 which has been packed to exclude padding bits.
5995 If REVERSE is true, the store is to be done in reverse order. */
5997 static void
5998 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size,
5999 bool reverse)
6001 tree type = TREE_TYPE (exp);
6002 #ifdef WORD_REGISTER_OPERATIONS
6003 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
6004 #endif
6006 switch (TREE_CODE (type))
6008 case RECORD_TYPE:
6009 case UNION_TYPE:
6010 case QUAL_UNION_TYPE:
6012 unsigned HOST_WIDE_INT idx;
6013 tree field, value;
6015 /* The storage order is specified for every aggregate type. */
6016 reverse = TYPE_REVERSE_STORAGE_ORDER (type);
6018 /* If size is zero or the target is already cleared, do nothing. */
6019 if (size == 0 || cleared)
6020 cleared = 1;
6021 /* We either clear the aggregate or indicate the value is dead. */
6022 else if ((TREE_CODE (type) == UNION_TYPE
6023 || TREE_CODE (type) == QUAL_UNION_TYPE)
6024 && ! CONSTRUCTOR_ELTS (exp))
6025 /* If the constructor is empty, clear the union. */
6027 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6028 cleared = 1;
6031 /* If we are building a static constructor into a register,
6032 set the initial value as zero so we can fold the value into
6033 a constant. But if more than one register is involved,
6034 this probably loses. */
6035 else if (REG_P (target) && TREE_STATIC (exp)
6036 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
6038 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6039 cleared = 1;
6042 /* If the constructor has fewer fields than the structure or
6043 if we are initializing the structure to mostly zeros, clear
6044 the whole structure first. Don't do this if TARGET is a
6045 register whose mode size isn't equal to SIZE since
6046 clear_storage can't handle this case. */
6047 else if (size > 0
6048 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
6049 != fields_length (type))
6050 || mostly_zeros_p (exp))
6051 && (!REG_P (target)
6052 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
6053 == size)))
6055 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6056 cleared = 1;
6059 if (REG_P (target) && !cleared)
6060 emit_clobber (target);
6062 /* Store each element of the constructor into the
6063 corresponding field of TARGET. */
6064 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
6066 machine_mode mode;
6067 HOST_WIDE_INT bitsize;
6068 HOST_WIDE_INT bitpos = 0;
6069 tree offset;
6070 rtx to_rtx = target;
6072 /* Just ignore missing fields. We cleared the whole
6073 structure, above, if any fields are missing. */
6074 if (field == 0)
6075 continue;
6077 if (cleared && initializer_zerop (value))
6078 continue;
6080 if (tree_fits_uhwi_p (DECL_SIZE (field)))
6081 bitsize = tree_to_uhwi (DECL_SIZE (field));
6082 else
6083 bitsize = -1;
6085 mode = DECL_MODE (field);
6086 if (DECL_BIT_FIELD (field))
6087 mode = VOIDmode;
6089 offset = DECL_FIELD_OFFSET (field);
6090 if (tree_fits_shwi_p (offset)
6091 && tree_fits_shwi_p (bit_position (field)))
6093 bitpos = int_bit_position (field);
6094 offset = 0;
6096 else
6097 bitpos = tree_to_shwi (DECL_FIELD_BIT_OFFSET (field));
6099 if (offset)
6101 machine_mode address_mode;
6102 rtx offset_rtx;
6104 offset
6105 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
6106 make_tree (TREE_TYPE (exp),
6107 target));
6109 offset_rtx = expand_normal (offset);
6110 gcc_assert (MEM_P (to_rtx));
6112 address_mode = get_address_mode (to_rtx);
6113 if (GET_MODE (offset_rtx) != address_mode)
6114 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
6116 to_rtx = offset_address (to_rtx, offset_rtx,
6117 highest_pow2_factor (offset));
6120 #ifdef WORD_REGISTER_OPERATIONS
6121 /* If this initializes a field that is smaller than a
6122 word, at the start of a word, try to widen it to a full
6123 word. This special case allows us to output C++ member
6124 function initializations in a form that the optimizers
6125 can understand. */
6126 if (REG_P (target)
6127 && bitsize < BITS_PER_WORD
6128 && bitpos % BITS_PER_WORD == 0
6129 && GET_MODE_CLASS (mode) == MODE_INT
6130 && TREE_CODE (value) == INTEGER_CST
6131 && exp_size >= 0
6132 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6134 tree type = TREE_TYPE (value);
6136 if (TYPE_PRECISION (type) < BITS_PER_WORD)
6138 type = lang_hooks.types.type_for_mode
6139 (word_mode, TYPE_UNSIGNED (type));
6140 value = fold_convert (type, value);
6143 if (BYTES_BIG_ENDIAN)
6144 value
6145 = fold_build2 (LSHIFT_EXPR, type, value,
6146 build_int_cst (type,
6147 BITS_PER_WORD - bitsize));
6148 bitsize = BITS_PER_WORD;
6149 mode = word_mode;
6151 #endif
6153 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6154 && DECL_NONADDRESSABLE_P (field))
6156 to_rtx = copy_rtx (to_rtx);
6157 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6160 store_constructor_field (to_rtx, bitsize, bitpos, mode,
6161 value, cleared,
6162 get_alias_set (TREE_TYPE (field)),
6163 reverse);
6165 break;
6167 case ARRAY_TYPE:
6169 tree value, index;
6170 unsigned HOST_WIDE_INT i;
6171 int need_to_clear;
6172 tree domain;
6173 tree elttype = TREE_TYPE (type);
6174 int const_bounds_p;
6175 HOST_WIDE_INT minelt = 0;
6176 HOST_WIDE_INT maxelt = 0;
6178 /* The storage order is specified for every aggregate type. */
6179 reverse = TYPE_REVERSE_STORAGE_ORDER (type);
6181 domain = TYPE_DOMAIN (type);
6182 const_bounds_p = (TYPE_MIN_VALUE (domain)
6183 && TYPE_MAX_VALUE (domain)
6184 && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6185 && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6187 /* If we have constant bounds for the range of the type, get them. */
6188 if (const_bounds_p)
6190 minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6191 maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6194 /* If the constructor has fewer elements than the array, clear
6195 the whole array first. Similarly if this is static
6196 constructor of a non-BLKmode object. */
6197 if (cleared)
6198 need_to_clear = 0;
6199 else if (REG_P (target) && TREE_STATIC (exp))
6200 need_to_clear = 1;
6201 else
6203 unsigned HOST_WIDE_INT idx;
6204 tree index, value;
6205 HOST_WIDE_INT count = 0, zero_count = 0;
6206 need_to_clear = ! const_bounds_p;
6208 /* This loop is a more accurate version of the loop in
6209 mostly_zeros_p (it handles RANGE_EXPR in an index). It
6210 is also needed to check for missing elements. */
6211 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6213 HOST_WIDE_INT this_node_count;
6215 if (need_to_clear)
6216 break;
6218 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6220 tree lo_index = TREE_OPERAND (index, 0);
6221 tree hi_index = TREE_OPERAND (index, 1);
6223 if (! tree_fits_uhwi_p (lo_index)
6224 || ! tree_fits_uhwi_p (hi_index))
6226 need_to_clear = 1;
6227 break;
6230 this_node_count = (tree_to_uhwi (hi_index)
6231 - tree_to_uhwi (lo_index) + 1);
6233 else
6234 this_node_count = 1;
6236 count += this_node_count;
6237 if (mostly_zeros_p (value))
6238 zero_count += this_node_count;
6241 /* Clear the entire array first if there are any missing
6242 elements, or if the incidence of zero elements is >=
6243 75%. */
6244 if (! need_to_clear
6245 && (count < maxelt - minelt + 1
6246 || 4 * zero_count >= 3 * count))
6247 need_to_clear = 1;
6250 if (need_to_clear && size > 0)
6252 if (REG_P (target))
6253 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6254 else
6255 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6256 cleared = 1;
6259 if (!cleared && REG_P (target))
6260 /* Inform later passes that the old value is dead. */
6261 emit_clobber (target);
6263 /* Store each element of the constructor into the
6264 corresponding element of TARGET, determined by counting the
6265 elements. */
6266 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6268 machine_mode mode;
6269 HOST_WIDE_INT bitsize;
6270 HOST_WIDE_INT bitpos;
6271 rtx xtarget = target;
6273 if (cleared && initializer_zerop (value))
6274 continue;
6276 mode = TYPE_MODE (elttype);
6277 if (mode == BLKmode)
6278 bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6279 ? tree_to_uhwi (TYPE_SIZE (elttype))
6280 : -1);
6281 else
6282 bitsize = GET_MODE_BITSIZE (mode);
6284 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6286 tree lo_index = TREE_OPERAND (index, 0);
6287 tree hi_index = TREE_OPERAND (index, 1);
6288 rtx index_r, pos_rtx;
6289 HOST_WIDE_INT lo, hi, count;
6290 tree position;
6292 /* If the range is constant and "small", unroll the loop. */
6293 if (const_bounds_p
6294 && tree_fits_shwi_p (lo_index)
6295 && tree_fits_shwi_p (hi_index)
6296 && (lo = tree_to_shwi (lo_index),
6297 hi = tree_to_shwi (hi_index),
6298 count = hi - lo + 1,
6299 (!MEM_P (target)
6300 || count <= 2
6301 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6302 && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6303 <= 40 * 8)))))
6305 lo -= minelt; hi -= minelt;
6306 for (; lo <= hi; lo++)
6308 bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6310 if (MEM_P (target)
6311 && !MEM_KEEP_ALIAS_SET_P (target)
6312 && TREE_CODE (type) == ARRAY_TYPE
6313 && TYPE_NONALIASED_COMPONENT (type))
6315 target = copy_rtx (target);
6316 MEM_KEEP_ALIAS_SET_P (target) = 1;
6319 store_constructor_field
6320 (target, bitsize, bitpos, mode, value, cleared,
6321 get_alias_set (elttype), reverse);
6324 else
6326 rtx_code_label *loop_start = gen_label_rtx ();
6327 rtx_code_label *loop_end = gen_label_rtx ();
6328 tree exit_cond;
6330 expand_normal (hi_index);
6332 index = build_decl (EXPR_LOCATION (exp),
6333 VAR_DECL, NULL_TREE, domain);
6334 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6335 SET_DECL_RTL (index, index_r);
6336 store_expr (lo_index, index_r, 0, false, reverse);
6338 /* Build the head of the loop. */
6339 do_pending_stack_adjust ();
6340 emit_label (loop_start);
6342 /* Assign value to element index. */
6343 position =
6344 fold_convert (ssizetype,
6345 fold_build2 (MINUS_EXPR,
6346 TREE_TYPE (index),
6347 index,
6348 TYPE_MIN_VALUE (domain)));
6350 position =
6351 size_binop (MULT_EXPR, position,
6352 fold_convert (ssizetype,
6353 TYPE_SIZE_UNIT (elttype)));
6355 pos_rtx = expand_normal (position);
6356 xtarget = offset_address (target, pos_rtx,
6357 highest_pow2_factor (position));
6358 xtarget = adjust_address (xtarget, mode, 0);
6359 if (TREE_CODE (value) == CONSTRUCTOR)
6360 store_constructor (value, xtarget, cleared,
6361 bitsize / BITS_PER_UNIT, reverse);
6362 else
6363 store_expr (value, xtarget, 0, false, reverse);
6365 /* Generate a conditional jump to exit the loop. */
6366 exit_cond = build2 (LT_EXPR, integer_type_node,
6367 index, hi_index);
6368 jumpif (exit_cond, loop_end, -1);
6370 /* Update the loop counter, and jump to the head of
6371 the loop. */
6372 expand_assignment (index,
6373 build2 (PLUS_EXPR, TREE_TYPE (index),
6374 index, integer_one_node),
6375 false);
6377 emit_jump (loop_start);
6379 /* Build the end of the loop. */
6380 emit_label (loop_end);
6383 else if ((index != 0 && ! tree_fits_shwi_p (index))
6384 || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6386 tree position;
6388 if (index == 0)
6389 index = ssize_int (1);
6391 if (minelt)
6392 index = fold_convert (ssizetype,
6393 fold_build2 (MINUS_EXPR,
6394 TREE_TYPE (index),
6395 index,
6396 TYPE_MIN_VALUE (domain)));
6398 position =
6399 size_binop (MULT_EXPR, index,
6400 fold_convert (ssizetype,
6401 TYPE_SIZE_UNIT (elttype)));
6402 xtarget = offset_address (target,
6403 expand_normal (position),
6404 highest_pow2_factor (position));
6405 xtarget = adjust_address (xtarget, mode, 0);
6406 store_expr (value, xtarget, 0, false, reverse);
6408 else
6410 if (index != 0)
6411 bitpos = ((tree_to_shwi (index) - minelt)
6412 * tree_to_uhwi (TYPE_SIZE (elttype)));
6413 else
6414 bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6416 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6417 && TREE_CODE (type) == ARRAY_TYPE
6418 && TYPE_NONALIASED_COMPONENT (type))
6420 target = copy_rtx (target);
6421 MEM_KEEP_ALIAS_SET_P (target) = 1;
6423 store_constructor_field (target, bitsize, bitpos, mode, value,
6424 cleared, get_alias_set (elttype),
6425 reverse);
6428 break;
6431 case VECTOR_TYPE:
6433 unsigned HOST_WIDE_INT idx;
6434 constructor_elt *ce;
6435 int i;
6436 int need_to_clear;
6437 int icode = CODE_FOR_nothing;
6438 tree elttype = TREE_TYPE (type);
6439 int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6440 machine_mode eltmode = TYPE_MODE (elttype);
6441 HOST_WIDE_INT bitsize;
6442 HOST_WIDE_INT bitpos;
6443 rtvec vector = NULL;
6444 unsigned n_elts;
6445 alias_set_type alias;
6447 gcc_assert (eltmode != BLKmode);
6449 n_elts = TYPE_VECTOR_SUBPARTS (type);
6450 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6452 machine_mode mode = GET_MODE (target);
6454 icode = (int) optab_handler (vec_init_optab, mode);
6455 /* Don't use vec_init<mode> if some elements have VECTOR_TYPE. */
6456 if (icode != CODE_FOR_nothing)
6458 tree value;
6460 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6461 if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
6463 icode = CODE_FOR_nothing;
6464 break;
6467 if (icode != CODE_FOR_nothing)
6469 unsigned int i;
6471 vector = rtvec_alloc (n_elts);
6472 for (i = 0; i < n_elts; i++)
6473 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6477 /* If the constructor has fewer elements than the vector,
6478 clear the whole array first. Similarly if this is static
6479 constructor of a non-BLKmode object. */
6480 if (cleared)
6481 need_to_clear = 0;
6482 else if (REG_P (target) && TREE_STATIC (exp))
6483 need_to_clear = 1;
6484 else
6486 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6487 tree value;
6489 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6491 int n_elts_here = tree_to_uhwi
6492 (int_const_binop (TRUNC_DIV_EXPR,
6493 TYPE_SIZE (TREE_TYPE (value)),
6494 TYPE_SIZE (elttype)));
6496 count += n_elts_here;
6497 if (mostly_zeros_p (value))
6498 zero_count += n_elts_here;
6501 /* Clear the entire vector first if there are any missing elements,
6502 or if the incidence of zero elements is >= 75%. */
6503 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6506 if (need_to_clear && size > 0 && !vector)
6508 if (REG_P (target))
6509 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6510 else
6511 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6512 cleared = 1;
6515 /* Inform later passes that the old value is dead. */
6516 if (!cleared && !vector && REG_P (target))
6517 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6519 if (MEM_P (target))
6520 alias = MEM_ALIAS_SET (target);
6521 else
6522 alias = get_alias_set (elttype);
6524 /* Store each element of the constructor into the corresponding
6525 element of TARGET, determined by counting the elements. */
6526 for (idx = 0, i = 0;
6527 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6528 idx++, i += bitsize / elt_size)
6530 HOST_WIDE_INT eltpos;
6531 tree value = ce->value;
6533 bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6534 if (cleared && initializer_zerop (value))
6535 continue;
6537 if (ce->index)
6538 eltpos = tree_to_uhwi (ce->index);
6539 else
6540 eltpos = i;
6542 if (vector)
6544 /* vec_init<mode> should not be used if there are VECTOR_TYPE
6545 elements. */
6546 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6547 RTVEC_ELT (vector, eltpos)
6548 = expand_normal (value);
6550 else
6552 machine_mode value_mode =
6553 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6554 ? TYPE_MODE (TREE_TYPE (value))
6555 : eltmode;
6556 bitpos = eltpos * elt_size;
6557 store_constructor_field (target, bitsize, bitpos, value_mode,
6558 value, cleared, alias, reverse);
6562 if (vector)
6563 emit_insn (GEN_FCN (icode)
6564 (target,
6565 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6566 break;
6569 default:
6570 gcc_unreachable ();
6574 /* Store the value of EXP (an expression tree)
6575 into a subfield of TARGET which has mode MODE and occupies
6576 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6577 If MODE is VOIDmode, it means that we are storing into a bit-field.
6579 BITREGION_START is bitpos of the first bitfield in this region.
6580 BITREGION_END is the bitpos of the ending bitfield in this region.
6581 These two fields are 0, if the C++ memory model does not apply,
6582 or we are not interested in keeping track of bitfield regions.
6584 Always return const0_rtx unless we have something particular to
6585 return.
6587 ALIAS_SET is the alias set for the destination. This value will
6588 (in general) be different from that for TARGET, since TARGET is a
6589 reference to the containing structure.
6591 If NONTEMPORAL is true, try generating a nontemporal store.
6593 If REVERSE is true, the store is to be done in reverse order. */
6595 static rtx
6596 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6597 unsigned HOST_WIDE_INT bitregion_start,
6598 unsigned HOST_WIDE_INT bitregion_end,
6599 machine_mode mode, tree exp,
6600 alias_set_type alias_set, bool nontemporal, bool reverse)
6602 if (TREE_CODE (exp) == ERROR_MARK)
6603 return const0_rtx;
6605 /* If we have nothing to store, do nothing unless the expression has
6606 side-effects. */
6607 if (bitsize == 0)
6608 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6610 if (GET_CODE (target) == CONCAT)
6612 /* We're storing into a struct containing a single __complex. */
6614 gcc_assert (!bitpos);
6615 return store_expr (exp, target, 0, nontemporal, reverse);
6618 /* If the structure is in a register or if the component
6619 is a bit field, we cannot use addressing to access it.
6620 Use bit-field techniques or SUBREG to store in it. */
6622 if (mode == VOIDmode
6623 || (mode != BLKmode && ! direct_store[(int) mode]
6624 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6625 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6626 || REG_P (target)
6627 || GET_CODE (target) == SUBREG
6628 /* If the field isn't aligned enough to store as an ordinary memref,
6629 store it as a bit field. */
6630 || (mode != BLKmode
6631 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6632 || bitpos % GET_MODE_ALIGNMENT (mode))
6633 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6634 || (bitpos % BITS_PER_UNIT != 0)))
6635 || (bitsize >= 0 && mode != BLKmode
6636 && GET_MODE_BITSIZE (mode) > bitsize)
6637 /* If the RHS and field are a constant size and the size of the
6638 RHS isn't the same size as the bitfield, we must use bitfield
6639 operations. */
6640 || (bitsize >= 0
6641 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6642 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6643 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6644 decl we must use bitfield operations. */
6645 || (bitsize >= 0
6646 && TREE_CODE (exp) == MEM_REF
6647 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6648 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6649 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6650 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6652 rtx temp;
6653 gimple nop_def;
6655 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6656 implies a mask operation. If the precision is the same size as
6657 the field we're storing into, that mask is redundant. This is
6658 particularly common with bit field assignments generated by the
6659 C front end. */
6660 nop_def = get_def_for_expr (exp, NOP_EXPR);
6661 if (nop_def)
6663 tree type = TREE_TYPE (exp);
6664 if (INTEGRAL_TYPE_P (type)
6665 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6666 && bitsize == TYPE_PRECISION (type))
6668 tree op = gimple_assign_rhs1 (nop_def);
6669 type = TREE_TYPE (op);
6670 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6671 exp = op;
6675 temp = expand_normal (exp);
6677 /* If the value has a record type and an integral mode then, if BITSIZE
6678 is narrower than this mode and this is a big-endian machine, we must
6679 first put the value into the low-order bits. Moreover, the field may
6680 be not aligned on a byte boundary; in this case, if it has reverse
6681 storage order, it needs to be accessed as a scalar field with reverse
6682 storage order and we must first put the value into target order. */
6683 if (TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
6684 && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT)
6686 HOST_WIDE_INT size = GET_MODE_BITSIZE (GET_MODE (temp));
6688 reverse = TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (exp));
6690 if (reverse)
6691 temp = flip_storage_order (GET_MODE (temp), temp);
6693 if (bitsize < size
6694 && reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6695 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6696 size - bitsize, NULL_RTX, 1);
6699 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6700 if (mode != VOIDmode && mode != BLKmode
6701 && mode != TYPE_MODE (TREE_TYPE (exp)))
6702 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6704 /* If TEMP is not a PARALLEL (see below) and its mode and that of TARGET
6705 are both BLKmode, both must be in memory and BITPOS must be aligned
6706 on a byte boundary. If so, we simply do a block copy. Likewise for
6707 a BLKmode-like TARGET. */
6708 if (GET_CODE (temp) != PARALLEL
6709 && GET_MODE (temp) == BLKmode
6710 && (GET_MODE (target) == BLKmode
6711 || (MEM_P (target)
6712 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6713 && (bitpos % BITS_PER_UNIT) == 0
6714 && (bitsize % BITS_PER_UNIT) == 0)))
6716 gcc_assert (MEM_P (target) && MEM_P (temp)
6717 && (bitpos % BITS_PER_UNIT) == 0);
6719 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6720 emit_block_move (target, temp,
6721 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6722 / BITS_PER_UNIT),
6723 BLOCK_OP_NORMAL);
6725 return const0_rtx;
6728 /* Handle calls that return values in multiple non-contiguous locations.
6729 The Irix 6 ABI has examples of this. */
6730 if (GET_CODE (temp) == PARALLEL)
6732 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6733 rtx temp_target;
6734 if (mode == BLKmode || mode == VOIDmode)
6735 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6736 temp_target = gen_reg_rtx (mode);
6737 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6738 temp = temp_target;
6740 else if (mode == BLKmode)
6742 /* Handle calls that return BLKmode values in registers. */
6743 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6745 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6746 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6747 temp = temp_target;
6749 else
6751 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6752 rtx temp_target;
6753 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6754 temp_target = gen_reg_rtx (mode);
6755 temp_target
6756 = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
6757 temp_target, mode, mode, false);
6758 temp = temp_target;
6762 /* Store the value in the bitfield. */
6763 store_bit_field (target, bitsize, bitpos,
6764 bitregion_start, bitregion_end,
6765 mode, temp, reverse);
6767 return const0_rtx;
6769 else
6771 /* Now build a reference to just the desired component. */
6772 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6774 if (to_rtx == target)
6775 to_rtx = copy_rtx (to_rtx);
6777 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6778 set_mem_alias_set (to_rtx, alias_set);
6780 return store_expr (exp, to_rtx, 0, nontemporal, reverse);
6784 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6785 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6786 codes and find the ultimate containing object, which we return.
6788 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6789 bit position, *PUNSIGNEDP to the signedness and *PREVERSEP to the
6790 storage order of the field.
6791 If the position of the field is variable, we store a tree
6792 giving the variable offset (in units) in *POFFSET.
6793 This offset is in addition to the bit position.
6794 If the position is not variable, we store 0 in *POFFSET.
6796 If any of the extraction expressions is volatile,
6797 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6799 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6800 Otherwise, it is a mode that can be used to access the field.
6802 If the field describes a variable-sized object, *PMODE is set to
6803 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6804 this case, but the address of the object can be found.
6806 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6807 look through nodes that serve as markers of a greater alignment than
6808 the one that can be deduced from the expression. These nodes make it
6809 possible for front-ends to prevent temporaries from being created by
6810 the middle-end on alignment considerations. For that purpose, the
6811 normal operating mode at high-level is to always pass FALSE so that
6812 the ultimate containing object is really returned; moreover, the
6813 associated predicate handled_component_p will always return TRUE
6814 on these nodes, thus indicating that they are essentially handled
6815 by get_inner_reference. TRUE should only be passed when the caller
6816 is scanning the expression in order to build another representation
6817 and specifically knows how to handle these nodes; as such, this is
6818 the normal operating mode in the RTL expanders. */
6820 tree
6821 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6822 HOST_WIDE_INT *pbitpos, tree *poffset,
6823 machine_mode *pmode, int *punsignedp,
6824 int *preversep, int *pvolatilep, bool keep_aligning)
6826 tree size_tree = 0;
6827 machine_mode mode = VOIDmode;
6828 bool blkmode_bitfield = false;
6829 tree offset = size_zero_node;
6830 offset_int bit_offset = 0;
6832 /* First get the mode, signedness, storage order and size. We do this from
6833 just the outermost expression. */
6834 *pbitsize = -1;
6835 if (TREE_CODE (exp) == COMPONENT_REF)
6837 tree field = TREE_OPERAND (exp, 1);
6838 size_tree = DECL_SIZE (field);
6839 if (flag_strict_volatile_bitfields > 0
6840 && TREE_THIS_VOLATILE (exp)
6841 && DECL_BIT_FIELD_TYPE (field)
6842 && DECL_MODE (field) != BLKmode)
6843 /* Volatile bitfields should be accessed in the mode of the
6844 field's type, not the mode computed based on the bit
6845 size. */
6846 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6847 else if (!DECL_BIT_FIELD (field))
6848 mode = DECL_MODE (field);
6849 else if (DECL_MODE (field) == BLKmode)
6850 blkmode_bitfield = true;
6852 *punsignedp = DECL_UNSIGNED (field);
6853 *preversep
6854 = TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_OPERAND (exp, 0)))
6855 && !AGGREGATE_TYPE_P (TREE_TYPE (exp));
6857 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6859 size_tree = TREE_OPERAND (exp, 1);
6860 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6861 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6862 *preversep = REF_REVERSE_STORAGE_ORDER (exp);
6864 /* For vector types, with the correct size of access, use the mode of
6865 inner type. */
6866 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6867 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6868 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6869 mode = TYPE_MODE (TREE_TYPE (exp));
6871 else
6873 mode = TYPE_MODE (TREE_TYPE (exp));
6874 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6875 *preversep
6876 = ((TREE_CODE (exp) == ARRAY_REF
6877 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_OPERAND (exp, 0))))
6878 || (TREE_CODE (exp) == MEM_REF
6879 && REF_REVERSE_STORAGE_ORDER (exp)))
6880 && !AGGREGATE_TYPE_P (TREE_TYPE (exp));
6882 if (mode == BLKmode)
6883 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6884 else
6885 *pbitsize = GET_MODE_BITSIZE (mode);
6888 if (size_tree != 0)
6890 if (! tree_fits_uhwi_p (size_tree))
6891 mode = BLKmode, *pbitsize = -1;
6892 else
6893 *pbitsize = tree_to_uhwi (size_tree);
6896 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6897 and find the ultimate containing object. */
6898 while (1)
6900 switch (TREE_CODE (exp))
6902 case BIT_FIELD_REF:
6903 bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
6904 break;
6906 case COMPONENT_REF:
6908 tree field = TREE_OPERAND (exp, 1);
6909 tree this_offset = component_ref_field_offset (exp);
6911 /* If this field hasn't been filled in yet, don't go past it.
6912 This should only happen when folding expressions made during
6913 type construction. */
6914 if (this_offset == 0)
6915 break;
6917 offset = size_binop (PLUS_EXPR, offset, this_offset);
6918 bit_offset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
6920 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6922 break;
6924 case ARRAY_REF:
6925 case ARRAY_RANGE_REF:
6927 tree index = TREE_OPERAND (exp, 1);
6928 tree low_bound = array_ref_low_bound (exp);
6929 tree unit_size = array_ref_element_size (exp);
6931 /* We assume all arrays have sizes that are a multiple of a byte.
6932 First subtract the lower bound, if any, in the type of the
6933 index, then convert to sizetype and multiply by the size of
6934 the array element. */
6935 if (! integer_zerop (low_bound))
6936 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6937 index, low_bound);
6939 offset = size_binop (PLUS_EXPR, offset,
6940 size_binop (MULT_EXPR,
6941 fold_convert (sizetype, index),
6942 unit_size));
6944 break;
6946 case REALPART_EXPR:
6947 break;
6949 case IMAGPART_EXPR:
6950 bit_offset += *pbitsize;
6951 break;
6953 case VIEW_CONVERT_EXPR:
6954 if (keep_aligning && STRICT_ALIGNMENT
6955 && (TYPE_ALIGN (TREE_TYPE (exp))
6956 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6957 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6958 < BIGGEST_ALIGNMENT)
6959 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6960 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6961 goto done;
6962 break;
6964 case MEM_REF:
6965 /* Hand back the decl for MEM[&decl, off]. */
6966 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6968 tree off = TREE_OPERAND (exp, 1);
6969 if (!integer_zerop (off))
6971 offset_int boff, coff = mem_ref_offset (exp);
6972 boff = wi::lshift (coff, LOG2_BITS_PER_UNIT);
6973 bit_offset += boff;
6975 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6977 goto done;
6979 default:
6980 goto done;
6983 /* If any reference in the chain is volatile, the effect is volatile. */
6984 if (TREE_THIS_VOLATILE (exp))
6985 *pvolatilep = 1;
6987 exp = TREE_OPERAND (exp, 0);
6989 done:
6991 /* If OFFSET is constant, see if we can return the whole thing as a
6992 constant bit position. Make sure to handle overflow during
6993 this conversion. */
6994 if (TREE_CODE (offset) == INTEGER_CST)
6996 offset_int tem = wi::sext (wi::to_offset (offset),
6997 TYPE_PRECISION (sizetype));
6998 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
6999 tem += bit_offset;
7000 if (wi::fits_shwi_p (tem))
7002 *pbitpos = tem.to_shwi ();
7003 *poffset = offset = NULL_TREE;
7007 /* Otherwise, split it up. */
7008 if (offset)
7010 /* Avoid returning a negative bitpos as this may wreak havoc later. */
7011 if (wi::neg_p (bit_offset) || !wi::fits_shwi_p (bit_offset))
7013 offset_int mask = wi::mask <offset_int> (LOG2_BITS_PER_UNIT, false);
7014 offset_int tem = bit_offset.and_not (mask);
7015 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
7016 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
7017 bit_offset -= tem;
7018 tem = wi::arshift (tem, LOG2_BITS_PER_UNIT);
7019 offset = size_binop (PLUS_EXPR, offset,
7020 wide_int_to_tree (sizetype, tem));
7023 *pbitpos = bit_offset.to_shwi ();
7024 *poffset = offset;
7027 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
7028 if (mode == VOIDmode
7029 && blkmode_bitfield
7030 && (*pbitpos % BITS_PER_UNIT) == 0
7031 && (*pbitsize % BITS_PER_UNIT) == 0)
7032 *pmode = BLKmode;
7033 else
7034 *pmode = mode;
7036 return exp;
7039 /* Return a tree of sizetype representing the size, in bytes, of the element
7040 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
7042 tree
7043 array_ref_element_size (tree exp)
7045 tree aligned_size = TREE_OPERAND (exp, 3);
7046 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7047 location_t loc = EXPR_LOCATION (exp);
7049 /* If a size was specified in the ARRAY_REF, it's the size measured
7050 in alignment units of the element type. So multiply by that value. */
7051 if (aligned_size)
7053 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
7054 sizetype from another type of the same width and signedness. */
7055 if (TREE_TYPE (aligned_size) != sizetype)
7056 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
7057 return size_binop_loc (loc, MULT_EXPR, aligned_size,
7058 size_int (TYPE_ALIGN_UNIT (elmt_type)));
7061 /* Otherwise, take the size from that of the element type. Substitute
7062 any PLACEHOLDER_EXPR that we have. */
7063 else
7064 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
7067 /* Return a tree representing the lower bound of the array mentioned in
7068 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
7070 tree
7071 array_ref_low_bound (tree exp)
7073 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
7075 /* If a lower bound is specified in EXP, use it. */
7076 if (TREE_OPERAND (exp, 2))
7077 return TREE_OPERAND (exp, 2);
7079 /* Otherwise, if there is a domain type and it has a lower bound, use it,
7080 substituting for a PLACEHOLDER_EXPR as needed. */
7081 if (domain_type && TYPE_MIN_VALUE (domain_type))
7082 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
7084 /* Otherwise, return a zero of the appropriate type. */
7085 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
7088 /* Returns true if REF is an array reference to an array at the end of
7089 a structure. If this is the case, the array may be allocated larger
7090 than its upper bound implies. */
7092 bool
7093 array_at_struct_end_p (tree ref)
7095 if (TREE_CODE (ref) != ARRAY_REF
7096 && TREE_CODE (ref) != ARRAY_RANGE_REF)
7097 return false;
7099 while (handled_component_p (ref))
7101 /* If the reference chain contains a component reference to a
7102 non-union type and there follows another field the reference
7103 is not at the end of a structure. */
7104 if (TREE_CODE (ref) == COMPONENT_REF
7105 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
7107 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
7108 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
7109 nextf = DECL_CHAIN (nextf);
7110 if (nextf)
7111 return false;
7114 ref = TREE_OPERAND (ref, 0);
7117 /* If the reference is based on a declared entity, the size of the array
7118 is constrained by its given domain. */
7119 if (DECL_P (ref))
7120 return false;
7122 return true;
7125 /* Return a tree representing the upper bound of the array mentioned in
7126 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
7128 tree
7129 array_ref_up_bound (tree exp)
7131 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
7133 /* If there is a domain type and it has an upper bound, use it, substituting
7134 for a PLACEHOLDER_EXPR as needed. */
7135 if (domain_type && TYPE_MAX_VALUE (domain_type))
7136 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
7138 /* Otherwise fail. */
7139 return NULL_TREE;
7142 /* Return a tree representing the offset, in bytes, of the field referenced
7143 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
7145 tree
7146 component_ref_field_offset (tree exp)
7148 tree aligned_offset = TREE_OPERAND (exp, 2);
7149 tree field = TREE_OPERAND (exp, 1);
7150 location_t loc = EXPR_LOCATION (exp);
7152 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
7153 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
7154 value. */
7155 if (aligned_offset)
7157 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
7158 sizetype from another type of the same width and signedness. */
7159 if (TREE_TYPE (aligned_offset) != sizetype)
7160 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
7161 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
7162 size_int (DECL_OFFSET_ALIGN (field)
7163 / BITS_PER_UNIT));
7166 /* Otherwise, take the offset from that of the field. Substitute
7167 any PLACEHOLDER_EXPR that we have. */
7168 else
7169 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
7172 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
7174 static unsigned HOST_WIDE_INT
7175 target_align (const_tree target)
7177 /* We might have a chain of nested references with intermediate misaligning
7178 bitfields components, so need to recurse to find out. */
7180 unsigned HOST_WIDE_INT this_align, outer_align;
7182 switch (TREE_CODE (target))
7184 case BIT_FIELD_REF:
7185 return 1;
7187 case COMPONENT_REF:
7188 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7189 outer_align = target_align (TREE_OPERAND (target, 0));
7190 return MIN (this_align, outer_align);
7192 case ARRAY_REF:
7193 case ARRAY_RANGE_REF:
7194 this_align = TYPE_ALIGN (TREE_TYPE (target));
7195 outer_align = target_align (TREE_OPERAND (target, 0));
7196 return MIN (this_align, outer_align);
7198 CASE_CONVERT:
7199 case NON_LVALUE_EXPR:
7200 case VIEW_CONVERT_EXPR:
7201 this_align = TYPE_ALIGN (TREE_TYPE (target));
7202 outer_align = target_align (TREE_OPERAND (target, 0));
7203 return MAX (this_align, outer_align);
7205 default:
7206 return TYPE_ALIGN (TREE_TYPE (target));
7211 /* Given an rtx VALUE that may contain additions and multiplications, return
7212 an equivalent value that just refers to a register, memory, or constant.
7213 This is done by generating instructions to perform the arithmetic and
7214 returning a pseudo-register containing the value.
7216 The returned value may be a REG, SUBREG, MEM or constant. */
7219 force_operand (rtx value, rtx target)
7221 rtx op1, op2;
7222 /* Use subtarget as the target for operand 0 of a binary operation. */
7223 rtx subtarget = get_subtarget (target);
7224 enum rtx_code code = GET_CODE (value);
7226 /* Check for subreg applied to an expression produced by loop optimizer. */
7227 if (code == SUBREG
7228 && !REG_P (SUBREG_REG (value))
7229 && !MEM_P (SUBREG_REG (value)))
7231 value
7232 = simplify_gen_subreg (GET_MODE (value),
7233 force_reg (GET_MODE (SUBREG_REG (value)),
7234 force_operand (SUBREG_REG (value),
7235 NULL_RTX)),
7236 GET_MODE (SUBREG_REG (value)),
7237 SUBREG_BYTE (value));
7238 code = GET_CODE (value);
7241 /* Check for a PIC address load. */
7242 if ((code == PLUS || code == MINUS)
7243 && XEXP (value, 0) == pic_offset_table_rtx
7244 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7245 || GET_CODE (XEXP (value, 1)) == LABEL_REF
7246 || GET_CODE (XEXP (value, 1)) == CONST))
7248 if (!subtarget)
7249 subtarget = gen_reg_rtx (GET_MODE (value));
7250 emit_move_insn (subtarget, value);
7251 return subtarget;
7254 if (ARITHMETIC_P (value))
7256 op2 = XEXP (value, 1);
7257 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7258 subtarget = 0;
7259 if (code == MINUS && CONST_INT_P (op2))
7261 code = PLUS;
7262 op2 = negate_rtx (GET_MODE (value), op2);
7265 /* Check for an addition with OP2 a constant integer and our first
7266 operand a PLUS of a virtual register and something else. In that
7267 case, we want to emit the sum of the virtual register and the
7268 constant first and then add the other value. This allows virtual
7269 register instantiation to simply modify the constant rather than
7270 creating another one around this addition. */
7271 if (code == PLUS && CONST_INT_P (op2)
7272 && GET_CODE (XEXP (value, 0)) == PLUS
7273 && REG_P (XEXP (XEXP (value, 0), 0))
7274 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7275 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7277 rtx temp = expand_simple_binop (GET_MODE (value), code,
7278 XEXP (XEXP (value, 0), 0), op2,
7279 subtarget, 0, OPTAB_LIB_WIDEN);
7280 return expand_simple_binop (GET_MODE (value), code, temp,
7281 force_operand (XEXP (XEXP (value,
7282 0), 1), 0),
7283 target, 0, OPTAB_LIB_WIDEN);
7286 op1 = force_operand (XEXP (value, 0), subtarget);
7287 op2 = force_operand (op2, NULL_RTX);
7288 switch (code)
7290 case MULT:
7291 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7292 case DIV:
7293 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7294 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7295 target, 1, OPTAB_LIB_WIDEN);
7296 else
7297 return expand_divmod (0,
7298 FLOAT_MODE_P (GET_MODE (value))
7299 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7300 GET_MODE (value), op1, op2, target, 0);
7301 case MOD:
7302 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7303 target, 0);
7304 case UDIV:
7305 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7306 target, 1);
7307 case UMOD:
7308 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7309 target, 1);
7310 case ASHIFTRT:
7311 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7312 target, 0, OPTAB_LIB_WIDEN);
7313 default:
7314 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7315 target, 1, OPTAB_LIB_WIDEN);
7318 if (UNARY_P (value))
7320 if (!target)
7321 target = gen_reg_rtx (GET_MODE (value));
7322 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7323 switch (code)
7325 case ZERO_EXTEND:
7326 case SIGN_EXTEND:
7327 case TRUNCATE:
7328 case FLOAT_EXTEND:
7329 case FLOAT_TRUNCATE:
7330 convert_move (target, op1, code == ZERO_EXTEND);
7331 return target;
7333 case FIX:
7334 case UNSIGNED_FIX:
7335 expand_fix (target, op1, code == UNSIGNED_FIX);
7336 return target;
7338 case FLOAT:
7339 case UNSIGNED_FLOAT:
7340 expand_float (target, op1, code == UNSIGNED_FLOAT);
7341 return target;
7343 default:
7344 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7348 #ifdef INSN_SCHEDULING
7349 /* On machines that have insn scheduling, we want all memory reference to be
7350 explicit, so we need to deal with such paradoxical SUBREGs. */
7351 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7352 value
7353 = simplify_gen_subreg (GET_MODE (value),
7354 force_reg (GET_MODE (SUBREG_REG (value)),
7355 force_operand (SUBREG_REG (value),
7356 NULL_RTX)),
7357 GET_MODE (SUBREG_REG (value)),
7358 SUBREG_BYTE (value));
7359 #endif
7361 return value;
7364 /* Subroutine of expand_expr: return nonzero iff there is no way that
7365 EXP can reference X, which is being modified. TOP_P is nonzero if this
7366 call is going to be used to determine whether we need a temporary
7367 for EXP, as opposed to a recursive call to this function.
7369 It is always safe for this routine to return zero since it merely
7370 searches for optimization opportunities. */
7373 safe_from_p (const_rtx x, tree exp, int top_p)
7375 rtx exp_rtl = 0;
7376 int i, nops;
7378 if (x == 0
7379 /* If EXP has varying size, we MUST use a target since we currently
7380 have no way of allocating temporaries of variable size
7381 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7382 So we assume here that something at a higher level has prevented a
7383 clash. This is somewhat bogus, but the best we can do. Only
7384 do this when X is BLKmode and when we are at the top level. */
7385 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7386 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7387 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7388 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7389 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7390 != INTEGER_CST)
7391 && GET_MODE (x) == BLKmode)
7392 /* If X is in the outgoing argument area, it is always safe. */
7393 || (MEM_P (x)
7394 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7395 || (GET_CODE (XEXP (x, 0)) == PLUS
7396 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7397 return 1;
7399 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7400 find the underlying pseudo. */
7401 if (GET_CODE (x) == SUBREG)
7403 x = SUBREG_REG (x);
7404 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7405 return 0;
7408 /* Now look at our tree code and possibly recurse. */
7409 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7411 case tcc_declaration:
7412 exp_rtl = DECL_RTL_IF_SET (exp);
7413 break;
7415 case tcc_constant:
7416 return 1;
7418 case tcc_exceptional:
7419 if (TREE_CODE (exp) == TREE_LIST)
7421 while (1)
7423 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7424 return 0;
7425 exp = TREE_CHAIN (exp);
7426 if (!exp)
7427 return 1;
7428 if (TREE_CODE (exp) != TREE_LIST)
7429 return safe_from_p (x, exp, 0);
7432 else if (TREE_CODE (exp) == CONSTRUCTOR)
7434 constructor_elt *ce;
7435 unsigned HOST_WIDE_INT idx;
7437 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7438 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7439 || !safe_from_p (x, ce->value, 0))
7440 return 0;
7441 return 1;
7443 else if (TREE_CODE (exp) == ERROR_MARK)
7444 return 1; /* An already-visited SAVE_EXPR? */
7445 else
7446 return 0;
7448 case tcc_statement:
7449 /* The only case we look at here is the DECL_INITIAL inside a
7450 DECL_EXPR. */
7451 return (TREE_CODE (exp) != DECL_EXPR
7452 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7453 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7454 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7456 case tcc_binary:
7457 case tcc_comparison:
7458 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7459 return 0;
7460 /* Fall through. */
7462 case tcc_unary:
7463 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7465 case tcc_expression:
7466 case tcc_reference:
7467 case tcc_vl_exp:
7468 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7469 the expression. If it is set, we conflict iff we are that rtx or
7470 both are in memory. Otherwise, we check all operands of the
7471 expression recursively. */
7473 switch (TREE_CODE (exp))
7475 case ADDR_EXPR:
7476 /* If the operand is static or we are static, we can't conflict.
7477 Likewise if we don't conflict with the operand at all. */
7478 if (staticp (TREE_OPERAND (exp, 0))
7479 || TREE_STATIC (exp)
7480 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7481 return 1;
7483 /* Otherwise, the only way this can conflict is if we are taking
7484 the address of a DECL a that address if part of X, which is
7485 very rare. */
7486 exp = TREE_OPERAND (exp, 0);
7487 if (DECL_P (exp))
7489 if (!DECL_RTL_SET_P (exp)
7490 || !MEM_P (DECL_RTL (exp)))
7491 return 0;
7492 else
7493 exp_rtl = XEXP (DECL_RTL (exp), 0);
7495 break;
7497 case MEM_REF:
7498 if (MEM_P (x)
7499 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7500 get_alias_set (exp)))
7501 return 0;
7502 break;
7504 case CALL_EXPR:
7505 /* Assume that the call will clobber all hard registers and
7506 all of memory. */
7507 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7508 || MEM_P (x))
7509 return 0;
7510 break;
7512 case WITH_CLEANUP_EXPR:
7513 case CLEANUP_POINT_EXPR:
7514 /* Lowered by gimplify.c. */
7515 gcc_unreachable ();
7517 case SAVE_EXPR:
7518 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7520 default:
7521 break;
7524 /* If we have an rtx, we do not need to scan our operands. */
7525 if (exp_rtl)
7526 break;
7528 nops = TREE_OPERAND_LENGTH (exp);
7529 for (i = 0; i < nops; i++)
7530 if (TREE_OPERAND (exp, i) != 0
7531 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7532 return 0;
7534 break;
7536 case tcc_type:
7537 /* Should never get a type here. */
7538 gcc_unreachable ();
7541 /* If we have an rtl, find any enclosed object. Then see if we conflict
7542 with it. */
7543 if (exp_rtl)
7545 if (GET_CODE (exp_rtl) == SUBREG)
7547 exp_rtl = SUBREG_REG (exp_rtl);
7548 if (REG_P (exp_rtl)
7549 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7550 return 0;
7553 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7554 are memory and they conflict. */
7555 return ! (rtx_equal_p (x, exp_rtl)
7556 || (MEM_P (x) && MEM_P (exp_rtl)
7557 && true_dependence (exp_rtl, VOIDmode, x)));
7560 /* If we reach here, it is safe. */
7561 return 1;
7565 /* Return the highest power of two that EXP is known to be a multiple of.
7566 This is used in updating alignment of MEMs in array references. */
7568 unsigned HOST_WIDE_INT
7569 highest_pow2_factor (const_tree exp)
7571 unsigned HOST_WIDE_INT ret;
7572 int trailing_zeros = tree_ctz (exp);
7573 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7574 return BIGGEST_ALIGNMENT;
7575 ret = (unsigned HOST_WIDE_INT) 1 << trailing_zeros;
7576 if (ret > BIGGEST_ALIGNMENT)
7577 return BIGGEST_ALIGNMENT;
7578 return ret;
7581 /* Similar, except that the alignment requirements of TARGET are
7582 taken into account. Assume it is at least as aligned as its
7583 type, unless it is a COMPONENT_REF in which case the layout of
7584 the structure gives the alignment. */
7586 static unsigned HOST_WIDE_INT
7587 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7589 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7590 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7592 return MAX (factor, talign);
7595 #ifdef HAVE_conditional_move
7596 /* Convert the tree comparison code TCODE to the rtl one where the
7597 signedness is UNSIGNEDP. */
7599 static enum rtx_code
7600 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7602 enum rtx_code code;
7603 switch (tcode)
7605 case EQ_EXPR:
7606 code = EQ;
7607 break;
7608 case NE_EXPR:
7609 code = NE;
7610 break;
7611 case LT_EXPR:
7612 code = unsignedp ? LTU : LT;
7613 break;
7614 case LE_EXPR:
7615 code = unsignedp ? LEU : LE;
7616 break;
7617 case GT_EXPR:
7618 code = unsignedp ? GTU : GT;
7619 break;
7620 case GE_EXPR:
7621 code = unsignedp ? GEU : GE;
7622 break;
7623 case UNORDERED_EXPR:
7624 code = UNORDERED;
7625 break;
7626 case ORDERED_EXPR:
7627 code = ORDERED;
7628 break;
7629 case UNLT_EXPR:
7630 code = UNLT;
7631 break;
7632 case UNLE_EXPR:
7633 code = UNLE;
7634 break;
7635 case UNGT_EXPR:
7636 code = UNGT;
7637 break;
7638 case UNGE_EXPR:
7639 code = UNGE;
7640 break;
7641 case UNEQ_EXPR:
7642 code = UNEQ;
7643 break;
7644 case LTGT_EXPR:
7645 code = LTGT;
7646 break;
7648 default:
7649 gcc_unreachable ();
7651 return code;
7653 #endif
7655 /* Subroutine of expand_expr. Expand the two operands of a binary
7656 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7657 The value may be stored in TARGET if TARGET is nonzero. The
7658 MODIFIER argument is as documented by expand_expr. */
7660 void
7661 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7662 enum expand_modifier modifier)
7664 if (! safe_from_p (target, exp1, 1))
7665 target = 0;
7666 if (operand_equal_p (exp0, exp1, 0))
7668 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7669 *op1 = copy_rtx (*op0);
7671 else
7673 /* If we need to preserve evaluation order, copy exp0 into its own
7674 temporary variable so that it can't be clobbered by exp1. */
7675 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7676 exp0 = save_expr (exp0);
7677 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7678 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7683 /* Return a MEM that contains constant EXP. DEFER is as for
7684 output_constant_def and MODIFIER is as for expand_expr. */
7686 static rtx
7687 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7689 rtx mem;
7691 mem = output_constant_def (exp, defer);
7692 if (modifier != EXPAND_INITIALIZER)
7693 mem = use_anchored_address (mem);
7694 return mem;
7697 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7698 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7700 static rtx
7701 expand_expr_addr_expr_1 (tree exp, rtx target, machine_mode tmode,
7702 enum expand_modifier modifier, addr_space_t as)
7704 rtx result, subtarget;
7705 tree inner, offset;
7706 HOST_WIDE_INT bitsize, bitpos;
7707 int unsignedp, reversep, volatilep = 0;
7708 machine_mode mode1;
7710 /* If we are taking the address of a constant and are at the top level,
7711 we have to use output_constant_def since we can't call force_const_mem
7712 at top level. */
7713 /* ??? This should be considered a front-end bug. We should not be
7714 generating ADDR_EXPR of something that isn't an LVALUE. The only
7715 exception here is STRING_CST. */
7716 if (CONSTANT_CLASS_P (exp))
7718 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7719 if (modifier < EXPAND_SUM)
7720 result = force_operand (result, target);
7721 return result;
7724 /* Everything must be something allowed by is_gimple_addressable. */
7725 switch (TREE_CODE (exp))
7727 case INDIRECT_REF:
7728 /* This case will happen via recursion for &a->b. */
7729 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7731 case MEM_REF:
7733 tree tem = TREE_OPERAND (exp, 0);
7734 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7735 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7736 return expand_expr (tem, target, tmode, modifier);
7739 case CONST_DECL:
7740 /* Expand the initializer like constants above. */
7741 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7742 0, modifier), 0);
7743 if (modifier < EXPAND_SUM)
7744 result = force_operand (result, target);
7745 return result;
7747 case REALPART_EXPR:
7748 /* The real part of the complex number is always first, therefore
7749 the address is the same as the address of the parent object. */
7750 offset = 0;
7751 bitpos = 0;
7752 inner = TREE_OPERAND (exp, 0);
7753 break;
7755 case IMAGPART_EXPR:
7756 /* The imaginary part of the complex number is always second.
7757 The expression is therefore always offset by the size of the
7758 scalar type. */
7759 offset = 0;
7760 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7761 inner = TREE_OPERAND (exp, 0);
7762 break;
7764 case COMPOUND_LITERAL_EXPR:
7765 /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
7766 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
7767 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
7768 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
7769 the initializers aren't gimplified. */
7770 if (COMPOUND_LITERAL_EXPR_DECL (exp)
7771 && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp)))
7772 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7773 target, tmode, modifier, as);
7774 /* FALLTHRU */
7775 default:
7776 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7777 expand_expr, as that can have various side effects; LABEL_DECLs for
7778 example, may not have their DECL_RTL set yet. Expand the rtl of
7779 CONSTRUCTORs too, which should yield a memory reference for the
7780 constructor's contents. Assume language specific tree nodes can
7781 be expanded in some interesting way. */
7782 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7783 if (DECL_P (exp)
7784 || TREE_CODE (exp) == CONSTRUCTOR
7785 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7787 result = expand_expr (exp, target, tmode,
7788 modifier == EXPAND_INITIALIZER
7789 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7791 /* If the DECL isn't in memory, then the DECL wasn't properly
7792 marked TREE_ADDRESSABLE, which will be either a front-end
7793 or a tree optimizer bug. */
7795 if (TREE_ADDRESSABLE (exp)
7796 && ! MEM_P (result)
7797 && ! targetm.calls.allocate_stack_slots_for_args ())
7799 error ("local frame unavailable (naked function?)");
7800 return result;
7802 else
7803 gcc_assert (MEM_P (result));
7804 result = XEXP (result, 0);
7806 /* ??? Is this needed anymore? */
7807 if (DECL_P (exp))
7808 TREE_USED (exp) = 1;
7810 if (modifier != EXPAND_INITIALIZER
7811 && modifier != EXPAND_CONST_ADDRESS
7812 && modifier != EXPAND_SUM)
7813 result = force_operand (result, target);
7814 return result;
7817 /* Pass FALSE as the last argument to get_inner_reference although
7818 we are expanding to RTL. The rationale is that we know how to
7819 handle "aligning nodes" here: we can just bypass them because
7820 they won't change the final object whose address will be returned
7821 (they actually exist only for that purpose). */
7822 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
7823 &unsignedp, &reversep, &volatilep, false);
7824 break;
7827 /* We must have made progress. */
7828 gcc_assert (inner != exp);
7830 subtarget = offset || bitpos ? NULL_RTX : target;
7831 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7832 inner alignment, force the inner to be sufficiently aligned. */
7833 if (CONSTANT_CLASS_P (inner)
7834 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7836 inner = copy_node (inner);
7837 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7838 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7839 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7841 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7843 if (offset)
7845 rtx tmp;
7847 if (modifier != EXPAND_NORMAL)
7848 result = force_operand (result, NULL);
7849 tmp = expand_expr (offset, NULL_RTX, tmode,
7850 modifier == EXPAND_INITIALIZER
7851 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7853 /* expand_expr is allowed to return an object in a mode other
7854 than TMODE. If it did, we need to convert. */
7855 if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
7856 tmp = convert_modes (tmode, GET_MODE (tmp),
7857 tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
7858 result = convert_memory_address_addr_space (tmode, result, as);
7859 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7861 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7862 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7863 else
7865 subtarget = bitpos ? NULL_RTX : target;
7866 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7867 1, OPTAB_LIB_WIDEN);
7871 if (bitpos)
7873 /* Someone beforehand should have rejected taking the address
7874 of such an object. */
7875 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7877 result = convert_memory_address_addr_space (tmode, result, as);
7878 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7879 if (modifier < EXPAND_SUM)
7880 result = force_operand (result, target);
7883 return result;
7886 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7887 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7889 static rtx
7890 expand_expr_addr_expr (tree exp, rtx target, machine_mode tmode,
7891 enum expand_modifier modifier)
7893 addr_space_t as = ADDR_SPACE_GENERIC;
7894 machine_mode address_mode = Pmode;
7895 machine_mode pointer_mode = ptr_mode;
7896 machine_mode rmode;
7897 rtx result;
7899 /* Target mode of VOIDmode says "whatever's natural". */
7900 if (tmode == VOIDmode)
7901 tmode = TYPE_MODE (TREE_TYPE (exp));
7903 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7905 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7906 address_mode = targetm.addr_space.address_mode (as);
7907 pointer_mode = targetm.addr_space.pointer_mode (as);
7910 /* We can get called with some Weird Things if the user does silliness
7911 like "(short) &a". In that case, convert_memory_address won't do
7912 the right thing, so ignore the given target mode. */
7913 if (tmode != address_mode && tmode != pointer_mode)
7914 tmode = address_mode;
7916 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7917 tmode, modifier, as);
7919 /* Despite expand_expr claims concerning ignoring TMODE when not
7920 strictly convenient, stuff breaks if we don't honor it. Note
7921 that combined with the above, we only do this for pointer modes. */
7922 rmode = GET_MODE (result);
7923 if (rmode == VOIDmode)
7924 rmode = tmode;
7925 if (rmode != tmode)
7926 result = convert_memory_address_addr_space (tmode, result, as);
7928 return result;
7931 /* Generate code for computing CONSTRUCTOR EXP.
7932 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7933 is TRUE, instead of creating a temporary variable in memory
7934 NULL is returned and the caller needs to handle it differently. */
7936 static rtx
7937 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7938 bool avoid_temp_mem)
7940 tree type = TREE_TYPE (exp);
7941 machine_mode mode = TYPE_MODE (type);
7943 /* Try to avoid creating a temporary at all. This is possible
7944 if all of the initializer is zero.
7945 FIXME: try to handle all [0..255] initializers we can handle
7946 with memset. */
7947 if (TREE_STATIC (exp)
7948 && !TREE_ADDRESSABLE (exp)
7949 && target != 0 && mode == BLKmode
7950 && all_zeros_p (exp))
7952 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7953 return target;
7956 /* All elts simple constants => refer to a constant in memory. But
7957 if this is a non-BLKmode mode, let it store a field at a time
7958 since that should make a CONST_INT, CONST_WIDE_INT or
7959 CONST_DOUBLE when we fold. Likewise, if we have a target we can
7960 use, it is best to store directly into the target unless the type
7961 is large enough that memcpy will be used. If we are making an
7962 initializer and all operands are constant, put it in memory as
7963 well.
7965 FIXME: Avoid trying to fill vector constructors piece-meal.
7966 Output them with output_constant_def below unless we're sure
7967 they're zeros. This should go away when vector initializers
7968 are treated like VECTOR_CST instead of arrays. */
7969 if ((TREE_STATIC (exp)
7970 && ((mode == BLKmode
7971 && ! (target != 0 && safe_from_p (target, exp, 1)))
7972 || TREE_ADDRESSABLE (exp)
7973 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
7974 && (! can_move_by_pieces
7975 (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
7976 TYPE_ALIGN (type)))
7977 && ! mostly_zeros_p (exp))))
7978 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7979 && TREE_CONSTANT (exp)))
7981 rtx constructor;
7983 if (avoid_temp_mem)
7984 return NULL_RTX;
7986 constructor = expand_expr_constant (exp, 1, modifier);
7988 if (modifier != EXPAND_CONST_ADDRESS
7989 && modifier != EXPAND_INITIALIZER
7990 && modifier != EXPAND_SUM)
7991 constructor = validize_mem (constructor);
7993 return constructor;
7996 /* Handle calls that pass values in multiple non-contiguous
7997 locations. The Irix 6 ABI has examples of this. */
7998 if (target == 0 || ! safe_from_p (target, exp, 1)
7999 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
8001 if (avoid_temp_mem)
8002 return NULL_RTX;
8004 target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
8007 store_constructor (exp, target, 0, int_expr_size (exp), false);
8008 return target;
8012 /* expand_expr: generate code for computing expression EXP.
8013 An rtx for the computed value is returned. The value is never null.
8014 In the case of a void EXP, const0_rtx is returned.
8016 The value may be stored in TARGET if TARGET is nonzero.
8017 TARGET is just a suggestion; callers must assume that
8018 the rtx returned may not be the same as TARGET.
8020 If TARGET is CONST0_RTX, it means that the value will be ignored.
8022 If TMODE is not VOIDmode, it suggests generating the
8023 result in mode TMODE. But this is done only when convenient.
8024 Otherwise, TMODE is ignored and the value generated in its natural mode.
8025 TMODE is just a suggestion; callers must assume that
8026 the rtx returned may not have mode TMODE.
8028 Note that TARGET may have neither TMODE nor MODE. In that case, it
8029 probably will not be used.
8031 If MODIFIER is EXPAND_SUM then when EXP is an addition
8032 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
8033 or a nest of (PLUS ...) and (MINUS ...) where the terms are
8034 products as above, or REG or MEM, or constant.
8035 Ordinarily in such cases we would output mul or add instructions
8036 and then return a pseudo reg containing the sum.
8038 EXPAND_INITIALIZER is much like EXPAND_SUM except that
8039 it also marks a label as absolutely required (it can't be dead).
8040 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
8041 This is used for outputting expressions used in initializers.
8043 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
8044 with a constant address even if that address is not normally legitimate.
8045 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
8047 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
8048 a call parameter. Such targets require special care as we haven't yet
8049 marked TARGET so that it's safe from being trashed by libcalls. We
8050 don't want to use TARGET for anything but the final result;
8051 Intermediate values must go elsewhere. Additionally, calls to
8052 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
8054 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
8055 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
8056 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
8057 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
8058 recursively.
8060 If INNER_REFERENCE_P is true, we are expanding an inner reference.
8061 In this case, we don't adjust a returned MEM rtx that wouldn't be
8062 sufficiently aligned for its mode; instead, it's up to the caller
8063 to deal with it afterwards. This is used to make sure that unaligned
8064 base objects for which out-of-bounds accesses are supported, for
8065 example record types with trailing arrays, aren't realigned behind
8066 the back of the caller.
8067 The normal operating mode is to pass FALSE for this parameter. */
8070 expand_expr_real (tree exp, rtx target, machine_mode tmode,
8071 enum expand_modifier modifier, rtx *alt_rtl,
8072 bool inner_reference_p)
8074 rtx ret;
8076 /* Handle ERROR_MARK before anybody tries to access its type. */
8077 if (TREE_CODE (exp) == ERROR_MARK
8078 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
8080 ret = CONST0_RTX (tmode);
8081 return ret ? ret : const0_rtx;
8084 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
8085 inner_reference_p);
8086 return ret;
8089 /* Try to expand the conditional expression which is represented by
8090 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
8091 return the rtl reg which repsents the result. Otherwise return
8092 NULL_RTL. */
8094 static rtx
8095 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
8096 tree treeop1 ATTRIBUTE_UNUSED,
8097 tree treeop2 ATTRIBUTE_UNUSED)
8099 #ifdef HAVE_conditional_move
8100 rtx insn;
8101 rtx op00, op01, op1, op2;
8102 enum rtx_code comparison_code;
8103 machine_mode comparison_mode;
8104 gimple srcstmt;
8105 rtx temp;
8106 tree type = TREE_TYPE (treeop1);
8107 int unsignedp = TYPE_UNSIGNED (type);
8108 machine_mode mode = TYPE_MODE (type);
8109 machine_mode orig_mode = mode;
8111 /* If we cannot do a conditional move on the mode, try doing it
8112 with the promoted mode. */
8113 if (!can_conditionally_move_p (mode))
8115 mode = promote_mode (type, mode, &unsignedp);
8116 if (!can_conditionally_move_p (mode))
8117 return NULL_RTX;
8118 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
8120 else
8121 temp = assign_temp (type, 0, 1);
8123 start_sequence ();
8124 expand_operands (treeop1, treeop2,
8125 temp, &op1, &op2, EXPAND_NORMAL);
8127 if (TREE_CODE (treeop0) == SSA_NAME
8128 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
8130 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
8131 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
8132 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
8133 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
8134 comparison_mode = TYPE_MODE (type);
8135 unsignedp = TYPE_UNSIGNED (type);
8136 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8138 else if (TREE_CODE_CLASS (TREE_CODE (treeop0)) == tcc_comparison)
8140 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
8141 enum tree_code cmpcode = TREE_CODE (treeop0);
8142 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
8143 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
8144 unsignedp = TYPE_UNSIGNED (type);
8145 comparison_mode = TYPE_MODE (type);
8146 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8148 else
8150 op00 = expand_normal (treeop0);
8151 op01 = const0_rtx;
8152 comparison_code = NE;
8153 comparison_mode = GET_MODE (op00);
8154 if (comparison_mode == VOIDmode)
8155 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
8158 if (GET_MODE (op1) != mode)
8159 op1 = gen_lowpart (mode, op1);
8161 if (GET_MODE (op2) != mode)
8162 op2 = gen_lowpart (mode, op2);
8164 /* Try to emit the conditional move. */
8165 insn = emit_conditional_move (temp, comparison_code,
8166 op00, op01, comparison_mode,
8167 op1, op2, mode,
8168 unsignedp);
8170 /* If we could do the conditional move, emit the sequence,
8171 and return. */
8172 if (insn)
8174 rtx_insn *seq = get_insns ();
8175 end_sequence ();
8176 emit_insn (seq);
8177 return convert_modes (orig_mode, mode, temp, 0);
8180 /* Otherwise discard the sequence and fall back to code with
8181 branches. */
8182 end_sequence ();
8183 #endif
8184 return NULL_RTX;
8188 expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
8189 enum expand_modifier modifier)
8191 rtx op0, op1, op2, temp;
8192 tree type;
8193 int unsignedp;
8194 machine_mode mode;
8195 enum tree_code code = ops->code;
8196 optab this_optab;
8197 rtx subtarget, original_target;
8198 int ignore;
8199 bool reduce_bit_field;
8200 location_t loc = ops->location;
8201 tree treeop0, treeop1, treeop2;
8202 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
8203 ? reduce_to_bit_field_precision ((expr), \
8204 target, \
8205 type) \
8206 : (expr))
8208 type = ops->type;
8209 mode = TYPE_MODE (type);
8210 unsignedp = TYPE_UNSIGNED (type);
8212 treeop0 = ops->op0;
8213 treeop1 = ops->op1;
8214 treeop2 = ops->op2;
8216 /* We should be called only on simple (binary or unary) expressions,
8217 exactly those that are valid in gimple expressions that aren't
8218 GIMPLE_SINGLE_RHS (or invalid). */
8219 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8220 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8221 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8223 ignore = (target == const0_rtx
8224 || ((CONVERT_EXPR_CODE_P (code)
8225 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8226 && TREE_CODE (type) == VOID_TYPE));
8228 /* We should be called only if we need the result. */
8229 gcc_assert (!ignore);
8231 /* An operation in what may be a bit-field type needs the
8232 result to be reduced to the precision of the bit-field type,
8233 which is narrower than that of the type's mode. */
8234 reduce_bit_field = (INTEGRAL_TYPE_P (type)
8235 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8237 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8238 target = 0;
8240 /* Use subtarget as the target for operand 0 of a binary operation. */
8241 subtarget = get_subtarget (target);
8242 original_target = target;
8244 switch (code)
8246 case NON_LVALUE_EXPR:
8247 case PAREN_EXPR:
8248 CASE_CONVERT:
8249 if (treeop0 == error_mark_node)
8250 return const0_rtx;
8252 if (TREE_CODE (type) == UNION_TYPE)
8254 tree valtype = TREE_TYPE (treeop0);
8256 /* If both input and output are BLKmode, this conversion isn't doing
8257 anything except possibly changing memory attribute. */
8258 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8260 rtx result = expand_expr (treeop0, target, tmode,
8261 modifier);
8263 result = copy_rtx (result);
8264 set_mem_attributes (result, type, 0);
8265 return result;
8268 if (target == 0)
8270 if (TYPE_MODE (type) != BLKmode)
8271 target = gen_reg_rtx (TYPE_MODE (type));
8272 else
8273 target = assign_temp (type, 1, 1);
8276 if (MEM_P (target))
8277 /* Store data into beginning of memory target. */
8278 store_expr (treeop0,
8279 adjust_address (target, TYPE_MODE (valtype), 0),
8280 modifier == EXPAND_STACK_PARM,
8281 false, TYPE_REVERSE_STORAGE_ORDER (type));
8283 else
8285 gcc_assert (REG_P (target)
8286 && !TYPE_REVERSE_STORAGE_ORDER (type));
8288 /* Store this field into a union of the proper type. */
8289 store_field (target,
8290 MIN ((int_size_in_bytes (TREE_TYPE
8291 (treeop0))
8292 * BITS_PER_UNIT),
8293 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8294 0, 0, 0, TYPE_MODE (valtype), treeop0, 0,
8295 false, false);
8298 /* Return the entire union. */
8299 return target;
8302 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8304 op0 = expand_expr (treeop0, target, VOIDmode,
8305 modifier);
8307 /* If the signedness of the conversion differs and OP0 is
8308 a promoted SUBREG, clear that indication since we now
8309 have to do the proper extension. */
8310 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8311 && GET_CODE (op0) == SUBREG)
8312 SUBREG_PROMOTED_VAR_P (op0) = 0;
8314 return REDUCE_BIT_FIELD (op0);
8317 op0 = expand_expr (treeop0, NULL_RTX, mode,
8318 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8319 if (GET_MODE (op0) == mode)
8322 /* If OP0 is a constant, just convert it into the proper mode. */
8323 else if (CONSTANT_P (op0))
8325 tree inner_type = TREE_TYPE (treeop0);
8326 machine_mode inner_mode = GET_MODE (op0);
8328 if (inner_mode == VOIDmode)
8329 inner_mode = TYPE_MODE (inner_type);
8331 if (modifier == EXPAND_INITIALIZER)
8332 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8333 subreg_lowpart_offset (mode,
8334 inner_mode));
8335 else
8336 op0= convert_modes (mode, inner_mode, op0,
8337 TYPE_UNSIGNED (inner_type));
8340 else if (modifier == EXPAND_INITIALIZER)
8341 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8343 else if (target == 0)
8344 op0 = convert_to_mode (mode, op0,
8345 TYPE_UNSIGNED (TREE_TYPE
8346 (treeop0)));
8347 else
8349 convert_move (target, op0,
8350 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8351 op0 = target;
8354 return REDUCE_BIT_FIELD (op0);
8356 case ADDR_SPACE_CONVERT_EXPR:
8358 tree treeop0_type = TREE_TYPE (treeop0);
8359 addr_space_t as_to;
8360 addr_space_t as_from;
8362 gcc_assert (POINTER_TYPE_P (type));
8363 gcc_assert (POINTER_TYPE_P (treeop0_type));
8365 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8366 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8368 /* Conversions between pointers to the same address space should
8369 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8370 gcc_assert (as_to != as_from);
8372 /* Ask target code to handle conversion between pointers
8373 to overlapping address spaces. */
8374 if (targetm.addr_space.subset_p (as_to, as_from)
8375 || targetm.addr_space.subset_p (as_from, as_to))
8377 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8378 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8379 gcc_assert (op0);
8380 return op0;
8383 /* For disjoint address spaces, converting anything but
8384 a null pointer invokes undefined behaviour. We simply
8385 always return a null pointer here. */
8386 return CONST0_RTX (mode);
8389 case POINTER_PLUS_EXPR:
8390 /* Even though the sizetype mode and the pointer's mode can be different
8391 expand is able to handle this correctly and get the correct result out
8392 of the PLUS_EXPR code. */
8393 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8394 if sizetype precision is smaller than pointer precision. */
8395 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8396 treeop1 = fold_convert_loc (loc, type,
8397 fold_convert_loc (loc, ssizetype,
8398 treeop1));
8399 /* If sizetype precision is larger than pointer precision, truncate the
8400 offset to have matching modes. */
8401 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8402 treeop1 = fold_convert_loc (loc, type, treeop1);
8404 case PLUS_EXPR:
8405 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8406 something else, make sure we add the register to the constant and
8407 then to the other thing. This case can occur during strength
8408 reduction and doing it this way will produce better code if the
8409 frame pointer or argument pointer is eliminated.
8411 fold-const.c will ensure that the constant is always in the inner
8412 PLUS_EXPR, so the only case we need to do anything about is if
8413 sp, ap, or fp is our second argument, in which case we must swap
8414 the innermost first argument and our second argument. */
8416 if (TREE_CODE (treeop0) == PLUS_EXPR
8417 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8418 && TREE_CODE (treeop1) == VAR_DECL
8419 && (DECL_RTL (treeop1) == frame_pointer_rtx
8420 || DECL_RTL (treeop1) == stack_pointer_rtx
8421 || DECL_RTL (treeop1) == arg_pointer_rtx))
8423 gcc_unreachable ();
8426 /* If the result is to be ptr_mode and we are adding an integer to
8427 something, we might be forming a constant. So try to use
8428 plus_constant. If it produces a sum and we can't accept it,
8429 use force_operand. This allows P = &ARR[const] to generate
8430 efficient code on machines where a SYMBOL_REF is not a valid
8431 address.
8433 If this is an EXPAND_SUM call, always return the sum. */
8434 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8435 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8437 if (modifier == EXPAND_STACK_PARM)
8438 target = 0;
8439 if (TREE_CODE (treeop0) == INTEGER_CST
8440 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8441 && TREE_CONSTANT (treeop1))
8443 rtx constant_part;
8444 HOST_WIDE_INT wc;
8445 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
8447 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8448 EXPAND_SUM);
8449 /* Use wi::shwi to ensure that the constant is
8450 truncated according to the mode of OP1, then sign extended
8451 to a HOST_WIDE_INT. Using the constant directly can result
8452 in non-canonical RTL in a 64x32 cross compile. */
8453 wc = TREE_INT_CST_LOW (treeop0);
8454 constant_part =
8455 immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8456 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8457 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8458 op1 = force_operand (op1, target);
8459 return REDUCE_BIT_FIELD (op1);
8462 else if (TREE_CODE (treeop1) == INTEGER_CST
8463 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8464 && TREE_CONSTANT (treeop0))
8466 rtx constant_part;
8467 HOST_WIDE_INT wc;
8468 machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
8470 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8471 (modifier == EXPAND_INITIALIZER
8472 ? EXPAND_INITIALIZER : EXPAND_SUM));
8473 if (! CONSTANT_P (op0))
8475 op1 = expand_expr (treeop1, NULL_RTX,
8476 VOIDmode, modifier);
8477 /* Return a PLUS if modifier says it's OK. */
8478 if (modifier == EXPAND_SUM
8479 || modifier == EXPAND_INITIALIZER)
8480 return simplify_gen_binary (PLUS, mode, op0, op1);
8481 goto binop2;
8483 /* Use wi::shwi to ensure that the constant is
8484 truncated according to the mode of OP1, then sign extended
8485 to a HOST_WIDE_INT. Using the constant directly can result
8486 in non-canonical RTL in a 64x32 cross compile. */
8487 wc = TREE_INT_CST_LOW (treeop1);
8488 constant_part
8489 = immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8490 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8491 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8492 op0 = force_operand (op0, target);
8493 return REDUCE_BIT_FIELD (op0);
8497 /* Use TER to expand pointer addition of a negated value
8498 as pointer subtraction. */
8499 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8500 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8501 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8502 && TREE_CODE (treeop1) == SSA_NAME
8503 && TYPE_MODE (TREE_TYPE (treeop0))
8504 == TYPE_MODE (TREE_TYPE (treeop1)))
8506 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8507 if (def)
8509 treeop1 = gimple_assign_rhs1 (def);
8510 code = MINUS_EXPR;
8511 goto do_minus;
8515 /* No sense saving up arithmetic to be done
8516 if it's all in the wrong mode to form part of an address.
8517 And force_operand won't know whether to sign-extend or
8518 zero-extend. */
8519 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8520 || mode != ptr_mode)
8522 expand_operands (treeop0, treeop1,
8523 subtarget, &op0, &op1, EXPAND_NORMAL);
8524 if (op0 == const0_rtx)
8525 return op1;
8526 if (op1 == const0_rtx)
8527 return op0;
8528 goto binop2;
8531 expand_operands (treeop0, treeop1,
8532 subtarget, &op0, &op1, modifier);
8533 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8535 case MINUS_EXPR:
8536 do_minus:
8537 /* For initializers, we are allowed to return a MINUS of two
8538 symbolic constants. Here we handle all cases when both operands
8539 are constant. */
8540 /* Handle difference of two symbolic constants,
8541 for the sake of an initializer. */
8542 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8543 && really_constant_p (treeop0)
8544 && really_constant_p (treeop1))
8546 expand_operands (treeop0, treeop1,
8547 NULL_RTX, &op0, &op1, modifier);
8549 /* If the last operand is a CONST_INT, use plus_constant of
8550 the negated constant. Else make the MINUS. */
8551 if (CONST_INT_P (op1))
8552 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8553 -INTVAL (op1)));
8554 else
8555 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8558 /* No sense saving up arithmetic to be done
8559 if it's all in the wrong mode to form part of an address.
8560 And force_operand won't know whether to sign-extend or
8561 zero-extend. */
8562 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8563 || mode != ptr_mode)
8564 goto binop;
8566 expand_operands (treeop0, treeop1,
8567 subtarget, &op0, &op1, modifier);
8569 /* Convert A - const to A + (-const). */
8570 if (CONST_INT_P (op1))
8572 op1 = negate_rtx (mode, op1);
8573 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8576 goto binop2;
8578 case WIDEN_MULT_PLUS_EXPR:
8579 case WIDEN_MULT_MINUS_EXPR:
8580 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8581 op2 = expand_normal (treeop2);
8582 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8583 target, unsignedp);
8584 return target;
8586 case WIDEN_MULT_EXPR:
8587 /* If first operand is constant, swap them.
8588 Thus the following special case checks need only
8589 check the second operand. */
8590 if (TREE_CODE (treeop0) == INTEGER_CST)
8592 tree t1 = treeop0;
8593 treeop0 = treeop1;
8594 treeop1 = t1;
8597 /* First, check if we have a multiplication of one signed and one
8598 unsigned operand. */
8599 if (TREE_CODE (treeop1) != INTEGER_CST
8600 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8601 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8603 machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8604 this_optab = usmul_widen_optab;
8605 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8606 != CODE_FOR_nothing)
8608 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8609 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8610 EXPAND_NORMAL);
8611 else
8612 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8613 EXPAND_NORMAL);
8614 /* op0 and op1 might still be constant, despite the above
8615 != INTEGER_CST check. Handle it. */
8616 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8618 op0 = convert_modes (innermode, mode, op0, true);
8619 op1 = convert_modes (innermode, mode, op1, false);
8620 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8621 target, unsignedp));
8623 goto binop3;
8626 /* Check for a multiplication with matching signedness. */
8627 else if ((TREE_CODE (treeop1) == INTEGER_CST
8628 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8629 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8630 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8632 tree op0type = TREE_TYPE (treeop0);
8633 machine_mode innermode = TYPE_MODE (op0type);
8634 bool zextend_p = TYPE_UNSIGNED (op0type);
8635 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8636 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8638 if (TREE_CODE (treeop0) != INTEGER_CST)
8640 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8641 != CODE_FOR_nothing)
8643 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8644 EXPAND_NORMAL);
8645 /* op0 and op1 might still be constant, despite the above
8646 != INTEGER_CST check. Handle it. */
8647 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8649 widen_mult_const:
8650 op0 = convert_modes (innermode, mode, op0, zextend_p);
8652 = convert_modes (innermode, mode, op1,
8653 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8654 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8655 target,
8656 unsignedp));
8658 temp = expand_widening_mult (mode, op0, op1, target,
8659 unsignedp, this_optab);
8660 return REDUCE_BIT_FIELD (temp);
8662 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8663 != CODE_FOR_nothing
8664 && innermode == word_mode)
8666 rtx htem, hipart;
8667 op0 = expand_normal (treeop0);
8668 if (TREE_CODE (treeop1) == INTEGER_CST)
8669 op1 = convert_modes (innermode, mode,
8670 expand_normal (treeop1),
8671 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8672 else
8673 op1 = expand_normal (treeop1);
8674 /* op0 and op1 might still be constant, despite the above
8675 != INTEGER_CST check. Handle it. */
8676 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8677 goto widen_mult_const;
8678 temp = expand_binop (mode, other_optab, op0, op1, target,
8679 unsignedp, OPTAB_LIB_WIDEN);
8680 hipart = gen_highpart (innermode, temp);
8681 htem = expand_mult_highpart_adjust (innermode, hipart,
8682 op0, op1, hipart,
8683 zextend_p);
8684 if (htem != hipart)
8685 emit_move_insn (hipart, htem);
8686 return REDUCE_BIT_FIELD (temp);
8690 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8691 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8692 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8693 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8695 case FMA_EXPR:
8697 optab opt = fma_optab;
8698 gimple def0, def2;
8700 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8701 call. */
8702 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8704 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8705 tree call_expr;
8707 gcc_assert (fn != NULL_TREE);
8708 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8709 return expand_builtin (call_expr, target, subtarget, mode, false);
8712 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8713 /* The multiplication is commutative - look at its 2nd operand
8714 if the first isn't fed by a negate. */
8715 if (!def0)
8717 def0 = get_def_for_expr (treeop1, NEGATE_EXPR);
8718 /* Swap operands if the 2nd operand is fed by a negate. */
8719 if (def0)
8721 tree tem = treeop0;
8722 treeop0 = treeop1;
8723 treeop1 = tem;
8726 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8728 op0 = op2 = NULL;
8730 if (def0 && def2
8731 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8733 opt = fnms_optab;
8734 op0 = expand_normal (gimple_assign_rhs1 (def0));
8735 op2 = expand_normal (gimple_assign_rhs1 (def2));
8737 else if (def0
8738 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8740 opt = fnma_optab;
8741 op0 = expand_normal (gimple_assign_rhs1 (def0));
8743 else if (def2
8744 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8746 opt = fms_optab;
8747 op2 = expand_normal (gimple_assign_rhs1 (def2));
8750 if (op0 == NULL)
8751 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8752 if (op2 == NULL)
8753 op2 = expand_normal (treeop2);
8754 op1 = expand_normal (treeop1);
8756 return expand_ternary_op (TYPE_MODE (type), opt,
8757 op0, op1, op2, target, 0);
8760 case MULT_EXPR:
8761 /* If this is a fixed-point operation, then we cannot use the code
8762 below because "expand_mult" doesn't support sat/no-sat fixed-point
8763 multiplications. */
8764 if (ALL_FIXED_POINT_MODE_P (mode))
8765 goto binop;
8767 /* If first operand is constant, swap them.
8768 Thus the following special case checks need only
8769 check the second operand. */
8770 if (TREE_CODE (treeop0) == INTEGER_CST)
8772 tree t1 = treeop0;
8773 treeop0 = treeop1;
8774 treeop1 = t1;
8777 /* Attempt to return something suitable for generating an
8778 indexed address, for machines that support that. */
8780 if (modifier == EXPAND_SUM && mode == ptr_mode
8781 && tree_fits_shwi_p (treeop1))
8783 tree exp1 = treeop1;
8785 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8786 EXPAND_SUM);
8788 if (!REG_P (op0))
8789 op0 = force_operand (op0, NULL_RTX);
8790 if (!REG_P (op0))
8791 op0 = copy_to_mode_reg (mode, op0);
8793 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8794 gen_int_mode (tree_to_shwi (exp1),
8795 TYPE_MODE (TREE_TYPE (exp1)))));
8798 if (modifier == EXPAND_STACK_PARM)
8799 target = 0;
8801 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8802 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8804 case TRUNC_DIV_EXPR:
8805 case FLOOR_DIV_EXPR:
8806 case CEIL_DIV_EXPR:
8807 case ROUND_DIV_EXPR:
8808 case EXACT_DIV_EXPR:
8809 /* If this is a fixed-point operation, then we cannot use the code
8810 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8811 divisions. */
8812 if (ALL_FIXED_POINT_MODE_P (mode))
8813 goto binop;
8815 if (modifier == EXPAND_STACK_PARM)
8816 target = 0;
8817 /* Possible optimization: compute the dividend with EXPAND_SUM
8818 then if the divisor is constant can optimize the case
8819 where some terms of the dividend have coeffs divisible by it. */
8820 expand_operands (treeop0, treeop1,
8821 subtarget, &op0, &op1, EXPAND_NORMAL);
8822 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8824 case RDIV_EXPR:
8825 goto binop;
8827 case MULT_HIGHPART_EXPR:
8828 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8829 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8830 gcc_assert (temp);
8831 return temp;
8833 case TRUNC_MOD_EXPR:
8834 case FLOOR_MOD_EXPR:
8835 case CEIL_MOD_EXPR:
8836 case ROUND_MOD_EXPR:
8837 if (modifier == EXPAND_STACK_PARM)
8838 target = 0;
8839 expand_operands (treeop0, treeop1,
8840 subtarget, &op0, &op1, EXPAND_NORMAL);
8841 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8843 case FIXED_CONVERT_EXPR:
8844 op0 = expand_normal (treeop0);
8845 if (target == 0 || modifier == EXPAND_STACK_PARM)
8846 target = gen_reg_rtx (mode);
8848 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8849 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8850 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8851 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8852 else
8853 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8854 return target;
8856 case FIX_TRUNC_EXPR:
8857 op0 = expand_normal (treeop0);
8858 if (target == 0 || modifier == EXPAND_STACK_PARM)
8859 target = gen_reg_rtx (mode);
8860 expand_fix (target, op0, unsignedp);
8861 return target;
8863 case FLOAT_EXPR:
8864 op0 = expand_normal (treeop0);
8865 if (target == 0 || modifier == EXPAND_STACK_PARM)
8866 target = gen_reg_rtx (mode);
8867 /* expand_float can't figure out what to do if FROM has VOIDmode.
8868 So give it the correct mode. With -O, cse will optimize this. */
8869 if (GET_MODE (op0) == VOIDmode)
8870 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8871 op0);
8872 expand_float (target, op0,
8873 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8874 return target;
8876 case NEGATE_EXPR:
8877 op0 = expand_expr (treeop0, subtarget,
8878 VOIDmode, EXPAND_NORMAL);
8879 if (modifier == EXPAND_STACK_PARM)
8880 target = 0;
8881 temp = expand_unop (mode,
8882 optab_for_tree_code (NEGATE_EXPR, type,
8883 optab_default),
8884 op0, target, 0);
8885 gcc_assert (temp);
8886 return REDUCE_BIT_FIELD (temp);
8888 case ABS_EXPR:
8889 op0 = expand_expr (treeop0, subtarget,
8890 VOIDmode, EXPAND_NORMAL);
8891 if (modifier == EXPAND_STACK_PARM)
8892 target = 0;
8894 /* ABS_EXPR is not valid for complex arguments. */
8895 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8896 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8898 /* Unsigned abs is simply the operand. Testing here means we don't
8899 risk generating incorrect code below. */
8900 if (TYPE_UNSIGNED (type))
8901 return op0;
8903 return expand_abs (mode, op0, target, unsignedp,
8904 safe_from_p (target, treeop0, 1));
8906 case MAX_EXPR:
8907 case MIN_EXPR:
8908 target = original_target;
8909 if (target == 0
8910 || modifier == EXPAND_STACK_PARM
8911 || (MEM_P (target) && MEM_VOLATILE_P (target))
8912 || GET_MODE (target) != mode
8913 || (REG_P (target)
8914 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8915 target = gen_reg_rtx (mode);
8916 expand_operands (treeop0, treeop1,
8917 target, &op0, &op1, EXPAND_NORMAL);
8919 /* First try to do it with a special MIN or MAX instruction.
8920 If that does not win, use a conditional jump to select the proper
8921 value. */
8922 this_optab = optab_for_tree_code (code, type, optab_default);
8923 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8924 OPTAB_WIDEN);
8925 if (temp != 0)
8926 return temp;
8928 /* At this point, a MEM target is no longer useful; we will get better
8929 code without it. */
8931 if (! REG_P (target))
8932 target = gen_reg_rtx (mode);
8934 /* If op1 was placed in target, swap op0 and op1. */
8935 if (target != op0 && target == op1)
8936 std::swap (op0, op1);
8938 /* We generate better code and avoid problems with op1 mentioning
8939 target by forcing op1 into a pseudo if it isn't a constant. */
8940 if (! CONSTANT_P (op1))
8941 op1 = force_reg (mode, op1);
8944 enum rtx_code comparison_code;
8945 rtx cmpop1 = op1;
8947 if (code == MAX_EXPR)
8948 comparison_code = unsignedp ? GEU : GE;
8949 else
8950 comparison_code = unsignedp ? LEU : LE;
8952 /* Canonicalize to comparisons against 0. */
8953 if (op1 == const1_rtx)
8955 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8956 or (a != 0 ? a : 1) for unsigned.
8957 For MIN we are safe converting (a <= 1 ? a : 1)
8958 into (a <= 0 ? a : 1) */
8959 cmpop1 = const0_rtx;
8960 if (code == MAX_EXPR)
8961 comparison_code = unsignedp ? NE : GT;
8963 if (op1 == constm1_rtx && !unsignedp)
8965 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8966 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8967 cmpop1 = const0_rtx;
8968 if (code == MIN_EXPR)
8969 comparison_code = LT;
8971 #ifdef HAVE_conditional_move
8972 /* Use a conditional move if possible. */
8973 if (can_conditionally_move_p (mode))
8975 rtx insn;
8977 start_sequence ();
8979 /* Try to emit the conditional move. */
8980 insn = emit_conditional_move (target, comparison_code,
8981 op0, cmpop1, mode,
8982 op0, op1, mode,
8983 unsignedp);
8985 /* If we could do the conditional move, emit the sequence,
8986 and return. */
8987 if (insn)
8989 rtx_insn *seq = get_insns ();
8990 end_sequence ();
8991 emit_insn (seq);
8992 return target;
8995 /* Otherwise discard the sequence and fall back to code with
8996 branches. */
8997 end_sequence ();
8999 #endif
9000 if (target != op0)
9001 emit_move_insn (target, op0);
9003 temp = gen_label_rtx ();
9004 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
9005 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
9006 -1);
9008 emit_move_insn (target, op1);
9009 emit_label (temp);
9010 return target;
9012 case BIT_NOT_EXPR:
9013 op0 = expand_expr (treeop0, subtarget,
9014 VOIDmode, EXPAND_NORMAL);
9015 if (modifier == EXPAND_STACK_PARM)
9016 target = 0;
9017 /* In case we have to reduce the result to bitfield precision
9018 for unsigned bitfield expand this as XOR with a proper constant
9019 instead. */
9020 if (reduce_bit_field && TYPE_UNSIGNED (type))
9022 wide_int mask = wi::mask (TYPE_PRECISION (type),
9023 false, GET_MODE_PRECISION (mode));
9025 temp = expand_binop (mode, xor_optab, op0,
9026 immed_wide_int_const (mask, mode),
9027 target, 1, OPTAB_LIB_WIDEN);
9029 else
9030 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
9031 gcc_assert (temp);
9032 return temp;
9034 /* ??? Can optimize bitwise operations with one arg constant.
9035 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
9036 and (a bitwise1 b) bitwise2 b (etc)
9037 but that is probably not worth while. */
9039 case BIT_AND_EXPR:
9040 case BIT_IOR_EXPR:
9041 case BIT_XOR_EXPR:
9042 goto binop;
9044 case LROTATE_EXPR:
9045 case RROTATE_EXPR:
9046 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
9047 || (GET_MODE_PRECISION (TYPE_MODE (type))
9048 == TYPE_PRECISION (type)));
9049 /* fall through */
9051 case LSHIFT_EXPR:
9052 case RSHIFT_EXPR:
9053 /* If this is a fixed-point operation, then we cannot use the code
9054 below because "expand_shift" doesn't support sat/no-sat fixed-point
9055 shifts. */
9056 if (ALL_FIXED_POINT_MODE_P (mode))
9057 goto binop;
9059 if (! safe_from_p (subtarget, treeop1, 1))
9060 subtarget = 0;
9061 if (modifier == EXPAND_STACK_PARM)
9062 target = 0;
9063 op0 = expand_expr (treeop0, subtarget,
9064 VOIDmode, EXPAND_NORMAL);
9065 temp = expand_variable_shift (code, mode, op0, treeop1, target,
9066 unsignedp);
9067 if (code == LSHIFT_EXPR)
9068 temp = REDUCE_BIT_FIELD (temp);
9069 return temp;
9071 /* Could determine the answer when only additive constants differ. Also,
9072 the addition of one can be handled by changing the condition. */
9073 case LT_EXPR:
9074 case LE_EXPR:
9075 case GT_EXPR:
9076 case GE_EXPR:
9077 case EQ_EXPR:
9078 case NE_EXPR:
9079 case UNORDERED_EXPR:
9080 case ORDERED_EXPR:
9081 case UNLT_EXPR:
9082 case UNLE_EXPR:
9083 case UNGT_EXPR:
9084 case UNGE_EXPR:
9085 case UNEQ_EXPR:
9086 case LTGT_EXPR:
9087 temp = do_store_flag (ops,
9088 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
9089 tmode != VOIDmode ? tmode : mode);
9090 if (temp)
9091 return temp;
9093 /* Use a compare and a jump for BLKmode comparisons, or for function
9094 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
9096 if ((target == 0
9097 || modifier == EXPAND_STACK_PARM
9098 || ! safe_from_p (target, treeop0, 1)
9099 || ! safe_from_p (target, treeop1, 1)
9100 /* Make sure we don't have a hard reg (such as function's return
9101 value) live across basic blocks, if not optimizing. */
9102 || (!optimize && REG_P (target)
9103 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9104 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9106 emit_move_insn (target, const0_rtx);
9108 op1 = gen_label_rtx ();
9109 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
9111 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
9112 emit_move_insn (target, constm1_rtx);
9113 else
9114 emit_move_insn (target, const1_rtx);
9116 emit_label (op1);
9117 return target;
9119 case COMPLEX_EXPR:
9120 /* Get the rtx code of the operands. */
9121 op0 = expand_normal (treeop0);
9122 op1 = expand_normal (treeop1);
9124 if (!target)
9125 target = gen_reg_rtx (TYPE_MODE (type));
9126 else
9127 /* If target overlaps with op1, then either we need to force
9128 op1 into a pseudo (if target also overlaps with op0),
9129 or write the complex parts in reverse order. */
9130 switch (GET_CODE (target))
9132 case CONCAT:
9133 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
9135 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
9137 complex_expr_force_op1:
9138 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
9139 emit_move_insn (temp, op1);
9140 op1 = temp;
9141 break;
9143 complex_expr_swap_order:
9144 /* Move the imaginary (op1) and real (op0) parts to their
9145 location. */
9146 write_complex_part (target, op1, true);
9147 write_complex_part (target, op0, false);
9149 return target;
9151 break;
9152 case MEM:
9153 temp = adjust_address_nv (target,
9154 GET_MODE_INNER (GET_MODE (target)), 0);
9155 if (reg_overlap_mentioned_p (temp, op1))
9157 machine_mode imode = GET_MODE_INNER (GET_MODE (target));
9158 temp = adjust_address_nv (target, imode,
9159 GET_MODE_SIZE (imode));
9160 if (reg_overlap_mentioned_p (temp, op0))
9161 goto complex_expr_force_op1;
9162 goto complex_expr_swap_order;
9164 break;
9165 default:
9166 if (reg_overlap_mentioned_p (target, op1))
9168 if (reg_overlap_mentioned_p (target, op0))
9169 goto complex_expr_force_op1;
9170 goto complex_expr_swap_order;
9172 break;
9175 /* Move the real (op0) and imaginary (op1) parts to their location. */
9176 write_complex_part (target, op0, false);
9177 write_complex_part (target, op1, true);
9179 return target;
9181 case WIDEN_SUM_EXPR:
9183 tree oprnd0 = treeop0;
9184 tree oprnd1 = treeop1;
9186 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9187 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
9188 target, unsignedp);
9189 return target;
9192 case REDUC_MAX_EXPR:
9193 case REDUC_MIN_EXPR:
9194 case REDUC_PLUS_EXPR:
9196 op0 = expand_normal (treeop0);
9197 this_optab = optab_for_tree_code (code, type, optab_default);
9198 machine_mode vec_mode = TYPE_MODE (TREE_TYPE (treeop0));
9200 if (optab_handler (this_optab, vec_mode) != CODE_FOR_nothing)
9202 struct expand_operand ops[2];
9203 enum insn_code icode = optab_handler (this_optab, vec_mode);
9205 create_output_operand (&ops[0], target, mode);
9206 create_input_operand (&ops[1], op0, vec_mode);
9207 if (maybe_expand_insn (icode, 2, ops))
9209 target = ops[0].value;
9210 if (GET_MODE (target) != mode)
9211 return gen_lowpart (tmode, target);
9212 return target;
9215 /* Fall back to optab with vector result, and then extract scalar. */
9216 this_optab = scalar_reduc_to_vector (this_optab, type);
9217 temp = expand_unop (vec_mode, this_optab, op0, NULL_RTX, unsignedp);
9218 gcc_assert (temp);
9219 /* The tree code produces a scalar result, but (somewhat by convention)
9220 the optab produces a vector with the result in element 0 if
9221 little-endian, or element N-1 if big-endian. So pull the scalar
9222 result out of that element. */
9223 int index = BYTES_BIG_ENDIAN ? GET_MODE_NUNITS (vec_mode) - 1 : 0;
9224 int bitsize = GET_MODE_BITSIZE (GET_MODE_INNER (vec_mode));
9225 temp = extract_bit_field (temp, bitsize, bitsize * index, unsignedp,
9226 target, mode, mode, false);
9227 gcc_assert (temp);
9228 return temp;
9231 case VEC_UNPACK_HI_EXPR:
9232 case VEC_UNPACK_LO_EXPR:
9234 op0 = expand_normal (treeop0);
9235 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9236 target, unsignedp);
9237 gcc_assert (temp);
9238 return temp;
9241 case VEC_UNPACK_FLOAT_HI_EXPR:
9242 case VEC_UNPACK_FLOAT_LO_EXPR:
9244 op0 = expand_normal (treeop0);
9245 /* The signedness is determined from input operand. */
9246 temp = expand_widen_pattern_expr
9247 (ops, op0, NULL_RTX, NULL_RTX,
9248 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9250 gcc_assert (temp);
9251 return temp;
9254 case VEC_WIDEN_MULT_HI_EXPR:
9255 case VEC_WIDEN_MULT_LO_EXPR:
9256 case VEC_WIDEN_MULT_EVEN_EXPR:
9257 case VEC_WIDEN_MULT_ODD_EXPR:
9258 case VEC_WIDEN_LSHIFT_HI_EXPR:
9259 case VEC_WIDEN_LSHIFT_LO_EXPR:
9260 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9261 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9262 target, unsignedp);
9263 gcc_assert (target);
9264 return target;
9266 case VEC_PACK_TRUNC_EXPR:
9267 case VEC_PACK_SAT_EXPR:
9268 case VEC_PACK_FIX_TRUNC_EXPR:
9269 mode = TYPE_MODE (TREE_TYPE (treeop0));
9270 goto binop;
9272 case VEC_PERM_EXPR:
9273 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9274 op2 = expand_normal (treeop2);
9276 /* Careful here: if the target doesn't support integral vector modes,
9277 a constant selection vector could wind up smooshed into a normal
9278 integral constant. */
9279 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9281 tree sel_type = TREE_TYPE (treeop2);
9282 machine_mode vmode
9283 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9284 TYPE_VECTOR_SUBPARTS (sel_type));
9285 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9286 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9287 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9289 else
9290 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9292 temp = expand_vec_perm (mode, op0, op1, op2, target);
9293 gcc_assert (temp);
9294 return temp;
9296 case DOT_PROD_EXPR:
9298 tree oprnd0 = treeop0;
9299 tree oprnd1 = treeop1;
9300 tree oprnd2 = treeop2;
9301 rtx op2;
9303 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9304 op2 = expand_normal (oprnd2);
9305 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9306 target, unsignedp);
9307 return target;
9310 case SAD_EXPR:
9312 tree oprnd0 = treeop0;
9313 tree oprnd1 = treeop1;
9314 tree oprnd2 = treeop2;
9315 rtx op2;
9317 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9318 op2 = expand_normal (oprnd2);
9319 target = expand_widen_pattern_expr (ops, op0, op1, op2,
9320 target, unsignedp);
9321 return target;
9324 case REALIGN_LOAD_EXPR:
9326 tree oprnd0 = treeop0;
9327 tree oprnd1 = treeop1;
9328 tree oprnd2 = treeop2;
9329 rtx op2;
9331 this_optab = optab_for_tree_code (code, type, optab_default);
9332 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9333 op2 = expand_normal (oprnd2);
9334 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9335 target, unsignedp);
9336 gcc_assert (temp);
9337 return temp;
9340 case COND_EXPR:
9341 /* A COND_EXPR with its type being VOID_TYPE represents a
9342 conditional jump and is handled in
9343 expand_gimple_cond_expr. */
9344 gcc_assert (!VOID_TYPE_P (type));
9346 /* Note that COND_EXPRs whose type is a structure or union
9347 are required to be constructed to contain assignments of
9348 a temporary variable, so that we can evaluate them here
9349 for side effect only. If type is void, we must do likewise. */
9351 gcc_assert (!TREE_ADDRESSABLE (type)
9352 && !ignore
9353 && TREE_TYPE (treeop1) != void_type_node
9354 && TREE_TYPE (treeop2) != void_type_node);
9356 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9357 if (temp)
9358 return temp;
9360 /* If we are not to produce a result, we have no target. Otherwise,
9361 if a target was specified use it; it will not be used as an
9362 intermediate target unless it is safe. If no target, use a
9363 temporary. */
9365 if (modifier != EXPAND_STACK_PARM
9366 && original_target
9367 && safe_from_p (original_target, treeop0, 1)
9368 && GET_MODE (original_target) == mode
9369 && !MEM_P (original_target))
9370 temp = original_target;
9371 else
9372 temp = assign_temp (type, 0, 1);
9374 do_pending_stack_adjust ();
9375 NO_DEFER_POP;
9376 op0 = gen_label_rtx ();
9377 op1 = gen_label_rtx ();
9378 jumpifnot (treeop0, op0, -1);
9379 store_expr (treeop1, temp,
9380 modifier == EXPAND_STACK_PARM,
9381 false, false);
9383 emit_jump_insn (gen_jump (op1));
9384 emit_barrier ();
9385 emit_label (op0);
9386 store_expr (treeop2, temp,
9387 modifier == EXPAND_STACK_PARM,
9388 false, false);
9390 emit_label (op1);
9391 OK_DEFER_POP;
9392 return temp;
9394 case VEC_COND_EXPR:
9395 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9396 return target;
9398 default:
9399 gcc_unreachable ();
9402 /* Here to do an ordinary binary operator. */
9403 binop:
9404 expand_operands (treeop0, treeop1,
9405 subtarget, &op0, &op1, EXPAND_NORMAL);
9406 binop2:
9407 this_optab = optab_for_tree_code (code, type, optab_default);
9408 binop3:
9409 if (modifier == EXPAND_STACK_PARM)
9410 target = 0;
9411 temp = expand_binop (mode, this_optab, op0, op1, target,
9412 unsignedp, OPTAB_LIB_WIDEN);
9413 gcc_assert (temp);
9414 /* Bitwise operations do not need bitfield reduction as we expect their
9415 operands being properly truncated. */
9416 if (code == BIT_XOR_EXPR
9417 || code == BIT_AND_EXPR
9418 || code == BIT_IOR_EXPR)
9419 return temp;
9420 return REDUCE_BIT_FIELD (temp);
9422 #undef REDUCE_BIT_FIELD
9425 /* Return TRUE if expression STMT is suitable for replacement.
9426 Never consider memory loads as replaceable, because those don't ever lead
9427 into constant expressions. */
9429 static bool
9430 stmt_is_replaceable_p (gimple stmt)
9432 if (ssa_is_replaceable_p (stmt))
9434 /* Don't move around loads. */
9435 if (!gimple_assign_single_p (stmt)
9436 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9437 return true;
9439 return false;
9443 expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
9444 enum expand_modifier modifier, rtx *alt_rtl,
9445 bool inner_reference_p)
9447 rtx op0, op1, temp, decl_rtl;
9448 tree type;
9449 int unsignedp;
9450 machine_mode mode;
9451 enum tree_code code = TREE_CODE (exp);
9452 rtx subtarget, original_target;
9453 int ignore;
9454 tree context;
9455 bool reduce_bit_field;
9456 location_t loc = EXPR_LOCATION (exp);
9457 struct separate_ops ops;
9458 tree treeop0, treeop1, treeop2;
9459 tree ssa_name = NULL_TREE;
9460 gimple g;
9462 type = TREE_TYPE (exp);
9463 mode = TYPE_MODE (type);
9464 unsignedp = TYPE_UNSIGNED (type);
9466 treeop0 = treeop1 = treeop2 = NULL_TREE;
9467 if (!VL_EXP_CLASS_P (exp))
9468 switch (TREE_CODE_LENGTH (code))
9470 default:
9471 case 3: treeop2 = TREE_OPERAND (exp, 2);
9472 case 2: treeop1 = TREE_OPERAND (exp, 1);
9473 case 1: treeop0 = TREE_OPERAND (exp, 0);
9474 case 0: break;
9476 ops.code = code;
9477 ops.type = type;
9478 ops.op0 = treeop0;
9479 ops.op1 = treeop1;
9480 ops.op2 = treeop2;
9481 ops.location = loc;
9483 ignore = (target == const0_rtx
9484 || ((CONVERT_EXPR_CODE_P (code)
9485 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9486 && TREE_CODE (type) == VOID_TYPE));
9488 /* An operation in what may be a bit-field type needs the
9489 result to be reduced to the precision of the bit-field type,
9490 which is narrower than that of the type's mode. */
9491 reduce_bit_field = (!ignore
9492 && INTEGRAL_TYPE_P (type)
9493 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9495 /* If we are going to ignore this result, we need only do something
9496 if there is a side-effect somewhere in the expression. If there
9497 is, short-circuit the most common cases here. Note that we must
9498 not call expand_expr with anything but const0_rtx in case this
9499 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9501 if (ignore)
9503 if (! TREE_SIDE_EFFECTS (exp))
9504 return const0_rtx;
9506 /* Ensure we reference a volatile object even if value is ignored, but
9507 don't do this if all we are doing is taking its address. */
9508 if (TREE_THIS_VOLATILE (exp)
9509 && TREE_CODE (exp) != FUNCTION_DECL
9510 && mode != VOIDmode && mode != BLKmode
9511 && modifier != EXPAND_CONST_ADDRESS)
9513 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9514 if (MEM_P (temp))
9515 copy_to_reg (temp);
9516 return const0_rtx;
9519 if (TREE_CODE_CLASS (code) == tcc_unary
9520 || code == BIT_FIELD_REF
9521 || code == COMPONENT_REF
9522 || code == INDIRECT_REF)
9523 return expand_expr (treeop0, const0_rtx, VOIDmode,
9524 modifier);
9526 else if (TREE_CODE_CLASS (code) == tcc_binary
9527 || TREE_CODE_CLASS (code) == tcc_comparison
9528 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9530 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9531 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9532 return const0_rtx;
9535 target = 0;
9538 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9539 target = 0;
9541 /* Use subtarget as the target for operand 0 of a binary operation. */
9542 subtarget = get_subtarget (target);
9543 original_target = target;
9545 switch (code)
9547 case LABEL_DECL:
9549 tree function = decl_function_context (exp);
9551 temp = label_rtx (exp);
9552 temp = gen_rtx_LABEL_REF (Pmode, temp);
9554 if (function != current_function_decl
9555 && function != 0)
9556 LABEL_REF_NONLOCAL_P (temp) = 1;
9558 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9559 return temp;
9562 case SSA_NAME:
9563 /* ??? ivopts calls expander, without any preparation from
9564 out-of-ssa. So fake instructions as if this was an access to the
9565 base variable. This unnecessarily allocates a pseudo, see how we can
9566 reuse it, if partition base vars have it set already. */
9567 if (!currently_expanding_to_rtl)
9569 tree var = SSA_NAME_VAR (exp);
9570 if (var && DECL_RTL_SET_P (var))
9571 return DECL_RTL (var);
9572 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9573 LAST_VIRTUAL_REGISTER + 1);
9576 g = get_gimple_for_ssa_name (exp);
9577 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9578 if (g == NULL
9579 && modifier == EXPAND_INITIALIZER
9580 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9581 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9582 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9583 g = SSA_NAME_DEF_STMT (exp);
9584 if (g)
9586 rtx r;
9587 ops.code = gimple_assign_rhs_code (g);
9588 switch (get_gimple_rhs_class (ops.code))
9590 case GIMPLE_TERNARY_RHS:
9591 ops.op2 = gimple_assign_rhs3 (g);
9592 /* Fallthru */
9593 case GIMPLE_BINARY_RHS:
9594 ops.op1 = gimple_assign_rhs2 (g);
9596 /* Try to expand conditonal compare. */
9597 if (targetm.gen_ccmp_first)
9599 gcc_checking_assert (targetm.gen_ccmp_next != NULL);
9600 r = expand_ccmp_expr (g);
9601 if (r)
9602 break;
9604 /* Fallthru */
9605 case GIMPLE_UNARY_RHS:
9606 ops.op0 = gimple_assign_rhs1 (g);
9607 ops.type = TREE_TYPE (gimple_assign_lhs (g));
9608 ops.location = gimple_location (g);
9609 r = expand_expr_real_2 (&ops, target, tmode, modifier);
9610 break;
9611 case GIMPLE_SINGLE_RHS:
9613 location_t saved_loc = curr_insn_location ();
9614 set_curr_insn_location (gimple_location (g));
9615 r = expand_expr_real (gimple_assign_rhs1 (g), target,
9616 tmode, modifier, NULL, inner_reference_p);
9617 set_curr_insn_location (saved_loc);
9618 break;
9620 default:
9621 gcc_unreachable ();
9623 if (REG_P (r) && !REG_EXPR (r))
9624 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9625 return r;
9628 ssa_name = exp;
9629 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9630 exp = SSA_NAME_VAR (ssa_name);
9631 goto expand_decl_rtl;
9633 case PARM_DECL:
9634 case VAR_DECL:
9635 /* If a static var's type was incomplete when the decl was written,
9636 but the type is complete now, lay out the decl now. */
9637 if (DECL_SIZE (exp) == 0
9638 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9639 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9640 layout_decl (exp, 0);
9642 /* ... fall through ... */
9644 case FUNCTION_DECL:
9645 case RESULT_DECL:
9646 decl_rtl = DECL_RTL (exp);
9647 expand_decl_rtl:
9648 gcc_assert (decl_rtl);
9649 decl_rtl = copy_rtx (decl_rtl);
9650 /* Record writes to register variables. */
9651 if (modifier == EXPAND_WRITE
9652 && REG_P (decl_rtl)
9653 && HARD_REGISTER_P (decl_rtl))
9654 add_to_hard_reg_set (&crtl->asm_clobbers,
9655 GET_MODE (decl_rtl), REGNO (decl_rtl));
9657 /* Ensure variable marked as used even if it doesn't go through
9658 a parser. If it hasn't be used yet, write out an external
9659 definition. */
9660 TREE_USED (exp) = 1;
9662 /* Show we haven't gotten RTL for this yet. */
9663 temp = 0;
9665 /* Variables inherited from containing functions should have
9666 been lowered by this point. */
9667 context = decl_function_context (exp);
9668 gcc_assert (SCOPE_FILE_SCOPE_P (context)
9669 || context == current_function_decl
9670 || TREE_STATIC (exp)
9671 || DECL_EXTERNAL (exp)
9672 /* ??? C++ creates functions that are not TREE_STATIC. */
9673 || TREE_CODE (exp) == FUNCTION_DECL);
9675 /* This is the case of an array whose size is to be determined
9676 from its initializer, while the initializer is still being parsed.
9677 ??? We aren't parsing while expanding anymore. */
9679 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9680 temp = validize_mem (decl_rtl);
9682 /* If DECL_RTL is memory, we are in the normal case and the
9683 address is not valid, get the address into a register. */
9685 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9687 if (alt_rtl)
9688 *alt_rtl = decl_rtl;
9689 decl_rtl = use_anchored_address (decl_rtl);
9690 if (modifier != EXPAND_CONST_ADDRESS
9691 && modifier != EXPAND_SUM
9692 && !memory_address_addr_space_p (DECL_MODE (exp),
9693 XEXP (decl_rtl, 0),
9694 MEM_ADDR_SPACE (decl_rtl)))
9695 temp = replace_equiv_address (decl_rtl,
9696 copy_rtx (XEXP (decl_rtl, 0)));
9699 /* If we got something, return it. But first, set the alignment
9700 if the address is a register. */
9701 if (temp != 0)
9703 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9704 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9706 return temp;
9709 /* If the mode of DECL_RTL does not match that of the decl,
9710 there are two cases: we are dealing with a BLKmode value
9711 that is returned in a register, or we are dealing with
9712 a promoted value. In the latter case, return a SUBREG
9713 of the wanted mode, but mark it so that we know that it
9714 was already extended. */
9715 if (REG_P (decl_rtl)
9716 && DECL_MODE (exp) != BLKmode
9717 && GET_MODE (decl_rtl) != DECL_MODE (exp))
9719 machine_mode pmode;
9721 /* Get the signedness to be used for this variable. Ensure we get
9722 the same mode we got when the variable was declared. */
9723 if (code == SSA_NAME
9724 && (g = SSA_NAME_DEF_STMT (ssa_name))
9725 && gimple_code (g) == GIMPLE_CALL
9726 && !gimple_call_internal_p (g))
9727 pmode = promote_function_mode (type, mode, &unsignedp,
9728 gimple_call_fntype (g),
9730 else
9731 pmode = promote_decl_mode (exp, &unsignedp);
9732 gcc_assert (GET_MODE (decl_rtl) == pmode);
9734 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9735 SUBREG_PROMOTED_VAR_P (temp) = 1;
9736 SUBREG_PROMOTED_SET (temp, unsignedp);
9737 return temp;
9740 return decl_rtl;
9742 case INTEGER_CST:
9743 /* Given that TYPE_PRECISION (type) is not always equal to
9744 GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9745 the former to the latter according to the signedness of the
9746 type. */
9747 temp = immed_wide_int_const (wide_int::from
9748 (exp,
9749 GET_MODE_PRECISION (TYPE_MODE (type)),
9750 TYPE_SIGN (type)),
9751 TYPE_MODE (type));
9752 return temp;
9754 case VECTOR_CST:
9756 tree tmp = NULL_TREE;
9757 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9758 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9759 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9760 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9761 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9762 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9763 return const_vector_from_tree (exp);
9764 if (GET_MODE_CLASS (mode) == MODE_INT)
9766 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9767 if (type_for_mode)
9768 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9770 if (!tmp)
9772 vec<constructor_elt, va_gc> *v;
9773 unsigned i;
9774 vec_alloc (v, VECTOR_CST_NELTS (exp));
9775 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9776 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9777 tmp = build_constructor (type, v);
9779 return expand_expr (tmp, ignore ? const0_rtx : target,
9780 tmode, modifier);
9783 case CONST_DECL:
9784 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9786 case REAL_CST:
9787 /* If optimized, generate immediate CONST_DOUBLE
9788 which will be turned into memory by reload if necessary.
9790 We used to force a register so that loop.c could see it. But
9791 this does not allow gen_* patterns to perform optimizations with
9792 the constants. It also produces two insns in cases like "x = 1.0;".
9793 On most machines, floating-point constants are not permitted in
9794 many insns, so we'd end up copying it to a register in any case.
9796 Now, we do the copying in expand_binop, if appropriate. */
9797 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9798 TYPE_MODE (TREE_TYPE (exp)));
9800 case FIXED_CST:
9801 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9802 TYPE_MODE (TREE_TYPE (exp)));
9804 case COMPLEX_CST:
9805 /* Handle evaluating a complex constant in a CONCAT target. */
9806 if (original_target && GET_CODE (original_target) == CONCAT)
9808 machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9809 rtx rtarg, itarg;
9811 rtarg = XEXP (original_target, 0);
9812 itarg = XEXP (original_target, 1);
9814 /* Move the real and imaginary parts separately. */
9815 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9816 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9818 if (op0 != rtarg)
9819 emit_move_insn (rtarg, op0);
9820 if (op1 != itarg)
9821 emit_move_insn (itarg, op1);
9823 return original_target;
9826 /* ... fall through ... */
9828 case STRING_CST:
9829 temp = expand_expr_constant (exp, 1, modifier);
9831 /* temp contains a constant address.
9832 On RISC machines where a constant address isn't valid,
9833 make some insns to get that address into a register. */
9834 if (modifier != EXPAND_CONST_ADDRESS
9835 && modifier != EXPAND_INITIALIZER
9836 && modifier != EXPAND_SUM
9837 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9838 MEM_ADDR_SPACE (temp)))
9839 return replace_equiv_address (temp,
9840 copy_rtx (XEXP (temp, 0)));
9841 return temp;
9843 case SAVE_EXPR:
9845 tree val = treeop0;
9846 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
9847 inner_reference_p);
9849 if (!SAVE_EXPR_RESOLVED_P (exp))
9851 /* We can indeed still hit this case, typically via builtin
9852 expanders calling save_expr immediately before expanding
9853 something. Assume this means that we only have to deal
9854 with non-BLKmode values. */
9855 gcc_assert (GET_MODE (ret) != BLKmode);
9857 val = build_decl (curr_insn_location (),
9858 VAR_DECL, NULL, TREE_TYPE (exp));
9859 DECL_ARTIFICIAL (val) = 1;
9860 DECL_IGNORED_P (val) = 1;
9861 treeop0 = val;
9862 TREE_OPERAND (exp, 0) = treeop0;
9863 SAVE_EXPR_RESOLVED_P (exp) = 1;
9865 if (!CONSTANT_P (ret))
9866 ret = copy_to_reg (ret);
9867 SET_DECL_RTL (val, ret);
9870 return ret;
9874 case CONSTRUCTOR:
9875 /* If we don't need the result, just ensure we evaluate any
9876 subexpressions. */
9877 if (ignore)
9879 unsigned HOST_WIDE_INT idx;
9880 tree value;
9882 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9883 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9885 return const0_rtx;
9888 return expand_constructor (exp, target, modifier, false);
9890 case TARGET_MEM_REF:
9892 addr_space_t as
9893 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9894 enum insn_code icode;
9895 unsigned int align;
9897 op0 = addr_for_mem_ref (exp, as, true);
9898 op0 = memory_address_addr_space (mode, op0, as);
9899 temp = gen_rtx_MEM (mode, op0);
9900 set_mem_attributes (temp, exp, 0);
9901 set_mem_addr_space (temp, as);
9902 align = get_object_alignment (exp);
9903 if (modifier != EXPAND_WRITE
9904 && modifier != EXPAND_MEMORY
9905 && mode != BLKmode
9906 && align < GET_MODE_ALIGNMENT (mode)
9907 /* If the target does not have special handling for unaligned
9908 loads of mode then it can use regular moves for them. */
9909 && ((icode = optab_handler (movmisalign_optab, mode))
9910 != CODE_FOR_nothing))
9912 struct expand_operand ops[2];
9914 /* We've already validated the memory, and we're creating a
9915 new pseudo destination. The predicates really can't fail,
9916 nor can the generator. */
9917 create_output_operand (&ops[0], NULL_RTX, mode);
9918 create_fixed_operand (&ops[1], temp);
9919 expand_insn (icode, 2, ops);
9920 temp = ops[0].value;
9922 return temp;
9925 case MEM_REF:
9927 const bool reverse = REF_REVERSE_STORAGE_ORDER (exp);
9928 addr_space_t as
9929 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9930 machine_mode address_mode;
9931 tree base = TREE_OPERAND (exp, 0);
9932 gimple def_stmt;
9933 enum insn_code icode;
9934 unsigned align;
9935 /* Handle expansion of non-aliased memory with non-BLKmode. That
9936 might end up in a register. */
9937 if (mem_ref_refers_to_non_mem_p (exp))
9939 HOST_WIDE_INT offset = mem_ref_offset (exp).to_short_addr ();
9940 base = TREE_OPERAND (base, 0);
9941 if (offset == 0
9942 && !reverse
9943 && tree_fits_uhwi_p (TYPE_SIZE (type))
9944 && (GET_MODE_BITSIZE (DECL_MODE (base))
9945 == tree_to_uhwi (TYPE_SIZE (type))))
9946 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
9947 target, tmode, modifier);
9948 if (TYPE_MODE (type) == BLKmode)
9950 temp = assign_stack_temp (DECL_MODE (base),
9951 GET_MODE_SIZE (DECL_MODE (base)));
9952 store_expr (base, temp, 0, false, false);
9953 temp = adjust_address (temp, BLKmode, offset);
9954 set_mem_size (temp, int_size_in_bytes (type));
9955 return temp;
9957 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
9958 bitsize_int (offset * BITS_PER_UNIT));
9959 REF_REVERSE_STORAGE_ORDER (exp) = reverse;
9960 return expand_expr (exp, target, tmode, modifier);
9962 address_mode = targetm.addr_space.address_mode (as);
9963 base = TREE_OPERAND (exp, 0);
9964 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9966 tree mask = gimple_assign_rhs2 (def_stmt);
9967 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9968 gimple_assign_rhs1 (def_stmt), mask);
9969 TREE_OPERAND (exp, 0) = base;
9971 align = get_object_alignment (exp);
9972 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9973 op0 = memory_address_addr_space (mode, op0, as);
9974 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9976 rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
9977 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9978 op0 = memory_address_addr_space (mode, op0, as);
9980 temp = gen_rtx_MEM (mode, op0);
9981 set_mem_attributes (temp, exp, 0);
9982 set_mem_addr_space (temp, as);
9983 if (TREE_THIS_VOLATILE (exp))
9984 MEM_VOLATILE_P (temp) = 1;
9985 if (modifier != EXPAND_WRITE
9986 && modifier != EXPAND_MEMORY
9987 && !inner_reference_p
9988 && mode != BLKmode
9989 && align < GET_MODE_ALIGNMENT (mode))
9991 if ((icode = optab_handler (movmisalign_optab, mode))
9992 != CODE_FOR_nothing)
9994 struct expand_operand ops[2];
9996 /* We've already validated the memory, and we're creating a
9997 new pseudo destination. The predicates really can't fail,
9998 nor can the generator. */
9999 create_output_operand (&ops[0], NULL_RTX, mode);
10000 create_fixed_operand (&ops[1], temp);
10001 expand_insn (icode, 2, ops);
10002 temp = ops[0].value;
10004 else if (SLOW_UNALIGNED_ACCESS (mode, align))
10005 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
10006 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
10007 (modifier == EXPAND_STACK_PARM
10008 ? NULL_RTX : target),
10009 mode, mode, false);
10011 if (reverse && modifier != EXPAND_WRITE)
10012 temp = flip_storage_order (mode, temp);
10013 return temp;
10016 case ARRAY_REF:
10019 tree array = treeop0;
10020 tree index = treeop1;
10021 tree init;
10023 /* Fold an expression like: "foo"[2].
10024 This is not done in fold so it won't happen inside &.
10025 Don't fold if this is for wide characters since it's too
10026 difficult to do correctly and this is a very rare case. */
10028 if (modifier != EXPAND_CONST_ADDRESS
10029 && modifier != EXPAND_INITIALIZER
10030 && modifier != EXPAND_MEMORY)
10032 tree t = fold_read_from_constant_string (exp);
10034 if (t)
10035 return expand_expr (t, target, tmode, modifier);
10038 /* If this is a constant index into a constant array,
10039 just get the value from the array. Handle both the cases when
10040 we have an explicit constructor and when our operand is a variable
10041 that was declared const. */
10043 if (modifier != EXPAND_CONST_ADDRESS
10044 && modifier != EXPAND_INITIALIZER
10045 && modifier != EXPAND_MEMORY
10046 && TREE_CODE (array) == CONSTRUCTOR
10047 && ! TREE_SIDE_EFFECTS (array)
10048 && TREE_CODE (index) == INTEGER_CST)
10050 unsigned HOST_WIDE_INT ix;
10051 tree field, value;
10053 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
10054 field, value)
10055 if (tree_int_cst_equal (field, index))
10057 if (!TREE_SIDE_EFFECTS (value))
10058 return expand_expr (fold (value), target, tmode, modifier);
10059 break;
10063 else if (optimize >= 1
10064 && modifier != EXPAND_CONST_ADDRESS
10065 && modifier != EXPAND_INITIALIZER
10066 && modifier != EXPAND_MEMORY
10067 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
10068 && TREE_CODE (index) == INTEGER_CST
10069 && (TREE_CODE (array) == VAR_DECL
10070 || TREE_CODE (array) == CONST_DECL)
10071 && (init = ctor_for_folding (array)) != error_mark_node)
10073 if (init == NULL_TREE)
10075 tree value = build_zero_cst (type);
10076 if (TREE_CODE (value) == CONSTRUCTOR)
10078 /* If VALUE is a CONSTRUCTOR, this optimization is only
10079 useful if this doesn't store the CONSTRUCTOR into
10080 memory. If it does, it is more efficient to just
10081 load the data from the array directly. */
10082 rtx ret = expand_constructor (value, target,
10083 modifier, true);
10084 if (ret == NULL_RTX)
10085 value = NULL_TREE;
10088 if (value)
10089 return expand_expr (value, target, tmode, modifier);
10091 else if (TREE_CODE (init) == CONSTRUCTOR)
10093 unsigned HOST_WIDE_INT ix;
10094 tree field, value;
10096 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
10097 field, value)
10098 if (tree_int_cst_equal (field, index))
10100 if (TREE_SIDE_EFFECTS (value))
10101 break;
10103 if (TREE_CODE (value) == CONSTRUCTOR)
10105 /* If VALUE is a CONSTRUCTOR, this
10106 optimization is only useful if
10107 this doesn't store the CONSTRUCTOR
10108 into memory. If it does, it is more
10109 efficient to just load the data from
10110 the array directly. */
10111 rtx ret = expand_constructor (value, target,
10112 modifier, true);
10113 if (ret == NULL_RTX)
10114 break;
10117 return
10118 expand_expr (fold (value), target, tmode, modifier);
10121 else if (TREE_CODE (init) == STRING_CST)
10123 tree low_bound = array_ref_low_bound (exp);
10124 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
10126 /* Optimize the special case of a zero lower bound.
10128 We convert the lower bound to sizetype to avoid problems
10129 with constant folding. E.g. suppose the lower bound is
10130 1 and its mode is QI. Without the conversion
10131 (ARRAY + (INDEX - (unsigned char)1))
10132 becomes
10133 (ARRAY + (-(unsigned char)1) + INDEX)
10134 which becomes
10135 (ARRAY + 255 + INDEX). Oops! */
10136 if (!integer_zerop (low_bound))
10137 index1 = size_diffop_loc (loc, index1,
10138 fold_convert_loc (loc, sizetype,
10139 low_bound));
10141 if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
10143 tree type = TREE_TYPE (TREE_TYPE (init));
10144 machine_mode mode = TYPE_MODE (type);
10146 if (GET_MODE_CLASS (mode) == MODE_INT
10147 && GET_MODE_SIZE (mode) == 1)
10148 return gen_int_mode (TREE_STRING_POINTER (init)
10149 [TREE_INT_CST_LOW (index1)],
10150 mode);
10155 goto normal_inner_ref;
10157 case COMPONENT_REF:
10158 /* If the operand is a CONSTRUCTOR, we can just extract the
10159 appropriate field if it is present. */
10160 if (TREE_CODE (treeop0) == CONSTRUCTOR)
10162 unsigned HOST_WIDE_INT idx;
10163 tree field, value;
10165 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
10166 idx, field, value)
10167 if (field == treeop1
10168 /* We can normally use the value of the field in the
10169 CONSTRUCTOR. However, if this is a bitfield in
10170 an integral mode that we can fit in a HOST_WIDE_INT,
10171 we must mask only the number of bits in the bitfield,
10172 since this is done implicitly by the constructor. If
10173 the bitfield does not meet either of those conditions,
10174 we can't do this optimization. */
10175 && (! DECL_BIT_FIELD (field)
10176 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
10177 && (GET_MODE_PRECISION (DECL_MODE (field))
10178 <= HOST_BITS_PER_WIDE_INT))))
10180 if (DECL_BIT_FIELD (field)
10181 && modifier == EXPAND_STACK_PARM)
10182 target = 0;
10183 op0 = expand_expr (value, target, tmode, modifier);
10184 if (DECL_BIT_FIELD (field))
10186 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
10187 machine_mode imode = TYPE_MODE (TREE_TYPE (field));
10189 if (TYPE_UNSIGNED (TREE_TYPE (field)))
10191 op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
10192 imode);
10193 op0 = expand_and (imode, op0, op1, target);
10195 else
10197 int count = GET_MODE_PRECISION (imode) - bitsize;
10199 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
10200 target, 0);
10201 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
10202 target, 0);
10206 return op0;
10209 goto normal_inner_ref;
10211 case BIT_FIELD_REF:
10212 case ARRAY_RANGE_REF:
10213 normal_inner_ref:
10215 machine_mode mode1, mode2;
10216 HOST_WIDE_INT bitsize, bitpos;
10217 tree offset;
10218 int reversep, volatilep = 0, must_force_mem;
10219 tree tem
10220 = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
10221 &unsignedp, &reversep, &volatilep, true);
10222 rtx orig_op0, memloc;
10223 bool clear_mem_expr = false;
10225 /* If we got back the original object, something is wrong. Perhaps
10226 we are evaluating an expression too early. In any event, don't
10227 infinitely recurse. */
10228 gcc_assert (tem != exp);
10230 /* If TEM's type is a union of variable size, pass TARGET to the inner
10231 computation, since it will need a temporary and TARGET is known
10232 to have to do. This occurs in unchecked conversion in Ada. */
10233 orig_op0 = op0
10234 = expand_expr_real (tem,
10235 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10236 && COMPLETE_TYPE_P (TREE_TYPE (tem))
10237 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10238 != INTEGER_CST)
10239 && modifier != EXPAND_STACK_PARM
10240 ? target : NULL_RTX),
10241 VOIDmode,
10242 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10243 NULL, true);
10245 /* If the field has a mode, we want to access it in the
10246 field's mode, not the computed mode.
10247 If a MEM has VOIDmode (external with incomplete type),
10248 use BLKmode for it instead. */
10249 if (MEM_P (op0))
10251 if (mode1 != VOIDmode)
10252 op0 = adjust_address (op0, mode1, 0);
10253 else if (GET_MODE (op0) == VOIDmode)
10254 op0 = adjust_address (op0, BLKmode, 0);
10257 mode2
10258 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10260 /* If we have either an offset, a BLKmode result, or a reference
10261 outside the underlying object, we must force it to memory.
10262 Such a case can occur in Ada if we have unchecked conversion
10263 of an expression from a scalar type to an aggregate type or
10264 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10265 passed a partially uninitialized object or a view-conversion
10266 to a larger size. */
10267 must_force_mem = (offset
10268 || mode1 == BLKmode
10269 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
10271 /* Handle CONCAT first. */
10272 if (GET_CODE (op0) == CONCAT && !must_force_mem)
10274 if (bitpos == 0
10275 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
10276 return op0;
10277 if (bitpos == 0
10278 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10279 && bitsize)
10281 op0 = XEXP (op0, 0);
10282 mode2 = GET_MODE (op0);
10284 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10285 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
10286 && bitpos
10287 && bitsize)
10289 op0 = XEXP (op0, 1);
10290 bitpos = 0;
10291 mode2 = GET_MODE (op0);
10293 else
10294 /* Otherwise force into memory. */
10295 must_force_mem = 1;
10298 /* If this is a constant, put it in a register if it is a legitimate
10299 constant and we don't need a memory reference. */
10300 if (CONSTANT_P (op0)
10301 && mode2 != BLKmode
10302 && targetm.legitimate_constant_p (mode2, op0)
10303 && !must_force_mem)
10304 op0 = force_reg (mode2, op0);
10306 /* Otherwise, if this is a constant, try to force it to the constant
10307 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
10308 is a legitimate constant. */
10309 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10310 op0 = validize_mem (memloc);
10312 /* Otherwise, if this is a constant or the object is not in memory
10313 and need be, put it there. */
10314 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10316 memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10317 emit_move_insn (memloc, op0);
10318 op0 = memloc;
10319 clear_mem_expr = true;
10322 if (offset)
10324 machine_mode address_mode;
10325 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10326 EXPAND_SUM);
10328 gcc_assert (MEM_P (op0));
10330 address_mode = get_address_mode (op0);
10331 if (GET_MODE (offset_rtx) != address_mode)
10333 /* We cannot be sure that the RTL in offset_rtx is valid outside
10334 of a memory address context, so force it into a register
10335 before attempting to convert it to the desired mode. */
10336 offset_rtx = force_operand (offset_rtx, NULL_RTX);
10337 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10340 /* See the comment in expand_assignment for the rationale. */
10341 if (mode1 != VOIDmode
10342 && bitpos != 0
10343 && bitsize > 0
10344 && (bitpos % bitsize) == 0
10345 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
10346 && MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
10348 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10349 bitpos = 0;
10352 op0 = offset_address (op0, offset_rtx,
10353 highest_pow2_factor (offset));
10356 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10357 record its alignment as BIGGEST_ALIGNMENT. */
10358 if (MEM_P (op0) && bitpos == 0 && offset != 0
10359 && is_aligning_offset (offset, tem))
10360 set_mem_align (op0, BIGGEST_ALIGNMENT);
10362 /* Don't forget about volatility even if this is a bitfield. */
10363 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10365 if (op0 == orig_op0)
10366 op0 = copy_rtx (op0);
10368 MEM_VOLATILE_P (op0) = 1;
10371 /* In cases where an aligned union has an unaligned object
10372 as a field, we might be extracting a BLKmode value from
10373 an integer-mode (e.g., SImode) object. Handle this case
10374 by doing the extract into an object as wide as the field
10375 (which we know to be the width of a basic mode), then
10376 storing into memory, and changing the mode to BLKmode. */
10377 if (mode1 == VOIDmode
10378 || REG_P (op0) || GET_CODE (op0) == SUBREG
10379 || (mode1 != BLKmode && ! direct_load[(int) mode1]
10380 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10381 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10382 && modifier != EXPAND_CONST_ADDRESS
10383 && modifier != EXPAND_INITIALIZER
10384 && modifier != EXPAND_MEMORY)
10385 /* If the bitfield is volatile and the bitsize
10386 is narrower than the access size of the bitfield,
10387 we need to extract bitfields from the access. */
10388 || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10389 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10390 && mode1 != BLKmode
10391 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)
10392 /* If the field isn't aligned enough to fetch as a memref,
10393 fetch it as a bit field. */
10394 || (mode1 != BLKmode
10395 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10396 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10397 || (MEM_P (op0)
10398 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10399 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10400 && modifier != EXPAND_MEMORY
10401 && ((modifier == EXPAND_CONST_ADDRESS
10402 || modifier == EXPAND_INITIALIZER)
10403 ? STRICT_ALIGNMENT
10404 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10405 || (bitpos % BITS_PER_UNIT != 0)))
10406 /* If the type and the field are a constant size and the
10407 size of the type isn't the same size as the bitfield,
10408 we must use bitfield operations. */
10409 || (bitsize >= 0
10410 && TYPE_SIZE (TREE_TYPE (exp))
10411 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10412 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10413 bitsize)))
10415 machine_mode ext_mode = mode;
10417 if (ext_mode == BLKmode
10418 && ! (target != 0 && MEM_P (op0)
10419 && MEM_P (target)
10420 && bitpos % BITS_PER_UNIT == 0))
10421 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10423 if (ext_mode == BLKmode)
10425 if (target == 0)
10426 target = assign_temp (type, 1, 1);
10428 /* ??? Unlike the similar test a few lines below, this one is
10429 very likely obsolete. */
10430 if (bitsize == 0)
10431 return target;
10433 /* In this case, BITPOS must start at a byte boundary and
10434 TARGET, if specified, must be a MEM. */
10435 gcc_assert (MEM_P (op0)
10436 && (!target || MEM_P (target))
10437 && !(bitpos % BITS_PER_UNIT));
10439 emit_block_move (target,
10440 adjust_address (op0, VOIDmode,
10441 bitpos / BITS_PER_UNIT),
10442 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10443 / BITS_PER_UNIT),
10444 (modifier == EXPAND_STACK_PARM
10445 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10447 return target;
10450 /* If we have nothing to extract, the result will be 0 for targets
10451 with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise. Always
10452 return 0 for the sake of consistency, as reading a zero-sized
10453 bitfield is valid in Ada and the value is fully specified. */
10454 if (bitsize == 0)
10455 return const0_rtx;
10457 op0 = validize_mem (op0);
10459 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10460 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10462 /* If the result has a record type and the extraction is done in
10463 an integral mode, then the field may be not aligned on a byte
10464 boundary; in this case, if it has reverse storage order, it
10465 needs to be extracted as a scalar field with reverse storage
10466 order and put back into memory order afterwards. */
10467 if (TREE_CODE (type) == RECORD_TYPE
10468 && GET_MODE_CLASS (ext_mode) == MODE_INT)
10469 reversep = TYPE_REVERSE_STORAGE_ORDER (type);
10471 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10472 (modifier == EXPAND_STACK_PARM
10473 ? NULL_RTX : target),
10474 ext_mode, ext_mode, reversep);
10476 /* If the result has a record type and the mode of OP0 is an
10477 integral mode then, if BITSIZE is narrower than this mode
10478 and this is a big-endian machine, we must put the field
10479 into the high-order bits. And we must also put it back
10480 into memory order if it has been previously reversed. */
10481 if (TREE_CODE (type) == RECORD_TYPE
10482 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
10484 HOST_WIDE_INT size = GET_MODE_BITSIZE (GET_MODE (op0));
10486 if (bitsize < size
10487 && reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
10488 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10489 size - bitsize, op0, 1);
10491 if (reversep)
10492 op0 = flip_storage_order (GET_MODE (op0), op0);
10495 /* If the result type is BLKmode, store the data into a temporary
10496 of the appropriate type, but with the mode corresponding to the
10497 mode for the data we have (op0's mode). */
10498 if (mode == BLKmode)
10500 rtx new_rtx
10501 = assign_stack_temp_for_type (ext_mode,
10502 GET_MODE_BITSIZE (ext_mode),
10503 type);
10504 emit_move_insn (new_rtx, op0);
10505 op0 = copy_rtx (new_rtx);
10506 PUT_MODE (op0, BLKmode);
10509 return op0;
10512 /* If the result is BLKmode, use that to access the object
10513 now as well. */
10514 if (mode == BLKmode)
10515 mode1 = BLKmode;
10517 /* Get a reference to just this component. */
10518 if (modifier == EXPAND_CONST_ADDRESS
10519 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10520 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10521 else
10522 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10524 if (op0 == orig_op0)
10525 op0 = copy_rtx (op0);
10527 set_mem_attributes (op0, exp, 0);
10529 if (REG_P (XEXP (op0, 0)))
10530 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10532 /* If op0 is a temporary because the original expressions was forced
10533 to memory, clear MEM_EXPR so that the original expression cannot
10534 be marked as addressable through MEM_EXPR of the temporary. */
10535 if (clear_mem_expr)
10536 set_mem_expr (op0, NULL_TREE);
10538 MEM_VOLATILE_P (op0) |= volatilep;
10540 if (reversep)
10541 op0 = flip_storage_order (mode1, op0);
10543 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10544 || modifier == EXPAND_CONST_ADDRESS
10545 || modifier == EXPAND_INITIALIZER)
10546 return op0;
10548 if (target == 0)
10549 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10551 convert_move (target, op0, unsignedp);
10552 return target;
10555 case OBJ_TYPE_REF:
10556 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10558 case CALL_EXPR:
10559 /* All valid uses of __builtin_va_arg_pack () are removed during
10560 inlining. */
10561 if (CALL_EXPR_VA_ARG_PACK (exp))
10562 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10564 tree fndecl = get_callee_fndecl (exp), attr;
10566 if (fndecl
10567 && (attr = lookup_attribute ("error",
10568 DECL_ATTRIBUTES (fndecl))) != NULL)
10569 error ("%Kcall to %qs declared with attribute error: %s",
10570 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10571 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10572 if (fndecl
10573 && (attr = lookup_attribute ("warning",
10574 DECL_ATTRIBUTES (fndecl))) != NULL)
10575 warning_at (tree_nonartificial_location (exp),
10576 0, "%Kcall to %qs declared with attribute warning: %s",
10577 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10578 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10580 /* Check for a built-in function. */
10581 if (fndecl && DECL_BUILT_IN (fndecl))
10583 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10584 if (CALL_WITH_BOUNDS_P (exp))
10585 return expand_builtin_with_bounds (exp, target, subtarget,
10586 tmode, ignore);
10587 else
10588 return expand_builtin (exp, target, subtarget, tmode, ignore);
10591 return expand_call (exp, target, ignore);
10593 case VIEW_CONVERT_EXPR:
10594 op0 = NULL_RTX;
10596 /* If we are converting to BLKmode, try to avoid an intermediate
10597 temporary by fetching an inner memory reference. */
10598 if (mode == BLKmode
10599 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10600 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10601 && handled_component_p (treeop0))
10603 machine_mode mode1;
10604 HOST_WIDE_INT bitsize, bitpos;
10605 tree offset;
10606 int unsignedp, reversep, volatilep = 0;
10607 tree tem
10608 = get_inner_reference (treeop0, &bitsize, &bitpos, &offset, &mode1,
10609 &unsignedp, &reversep, &volatilep, true);
10610 rtx orig_op0;
10612 /* ??? We should work harder and deal with non-zero offsets. */
10613 if (!offset
10614 && (bitpos % BITS_PER_UNIT) == 0
10615 && !reversep
10616 && bitsize >= 0
10617 && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10619 /* See the normal_inner_ref case for the rationale. */
10620 orig_op0
10621 = expand_expr_real (tem,
10622 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10623 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10624 != INTEGER_CST)
10625 && modifier != EXPAND_STACK_PARM
10626 ? target : NULL_RTX),
10627 VOIDmode,
10628 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10629 NULL, true);
10631 if (MEM_P (orig_op0))
10633 op0 = orig_op0;
10635 /* Get a reference to just this component. */
10636 if (modifier == EXPAND_CONST_ADDRESS
10637 || modifier == EXPAND_SUM
10638 || modifier == EXPAND_INITIALIZER)
10639 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10640 else
10641 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10643 if (op0 == orig_op0)
10644 op0 = copy_rtx (op0);
10646 set_mem_attributes (op0, treeop0, 0);
10647 if (REG_P (XEXP (op0, 0)))
10648 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10650 MEM_VOLATILE_P (op0) |= volatilep;
10655 if (!op0)
10656 op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
10657 NULL, inner_reference_p);
10659 /* If the input and output modes are both the same, we are done. */
10660 if (mode == GET_MODE (op0))
10662 /* If neither mode is BLKmode, and both modes are the same size
10663 then we can use gen_lowpart. */
10664 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10665 && (GET_MODE_PRECISION (mode)
10666 == GET_MODE_PRECISION (GET_MODE (op0)))
10667 && !COMPLEX_MODE_P (GET_MODE (op0)))
10669 if (GET_CODE (op0) == SUBREG)
10670 op0 = force_reg (GET_MODE (op0), op0);
10671 temp = gen_lowpart_common (mode, op0);
10672 if (temp)
10673 op0 = temp;
10674 else
10676 if (!REG_P (op0) && !MEM_P (op0))
10677 op0 = force_reg (GET_MODE (op0), op0);
10678 op0 = gen_lowpart (mode, op0);
10681 /* If both types are integral, convert from one mode to the other. */
10682 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10683 op0 = convert_modes (mode, GET_MODE (op0), op0,
10684 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10685 /* If the output type is a bit-field type, do an extraction. */
10686 else if (reduce_bit_field)
10687 return extract_bit_field (op0, TYPE_PRECISION (type), 0,
10688 TYPE_UNSIGNED (type), NULL_RTX,
10689 mode, mode, false);
10690 /* As a last resort, spill op0 to memory, and reload it in a
10691 different mode. */
10692 else if (!MEM_P (op0))
10694 /* If the operand is not a MEM, force it into memory. Since we
10695 are going to be changing the mode of the MEM, don't call
10696 force_const_mem for constants because we don't allow pool
10697 constants to change mode. */
10698 tree inner_type = TREE_TYPE (treeop0);
10700 gcc_assert (!TREE_ADDRESSABLE (exp));
10702 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10703 target
10704 = assign_stack_temp_for_type
10705 (TYPE_MODE (inner_type),
10706 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10708 emit_move_insn (target, op0);
10709 op0 = target;
10712 /* If OP0 is (now) a MEM, we need to deal with alignment issues. If the
10713 output type is such that the operand is known to be aligned, indicate
10714 that it is. Otherwise, we need only be concerned about alignment for
10715 non-BLKmode results. */
10716 if (MEM_P (op0))
10718 enum insn_code icode;
10720 if (TYPE_ALIGN_OK (type))
10722 /* ??? Copying the MEM without substantially changing it might
10723 run afoul of the code handling volatile memory references in
10724 store_expr, which assumes that TARGET is returned unmodified
10725 if it has been used. */
10726 op0 = copy_rtx (op0);
10727 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10729 else if (modifier != EXPAND_WRITE
10730 && modifier != EXPAND_MEMORY
10731 && !inner_reference_p
10732 && mode != BLKmode
10733 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10735 /* If the target does have special handling for unaligned
10736 loads of mode then use them. */
10737 if ((icode = optab_handler (movmisalign_optab, mode))
10738 != CODE_FOR_nothing)
10740 rtx reg, insn;
10742 op0 = adjust_address (op0, mode, 0);
10743 /* We've already validated the memory, and we're creating a
10744 new pseudo destination. The predicates really can't
10745 fail. */
10746 reg = gen_reg_rtx (mode);
10748 /* Nor can the insn generator. */
10749 insn = GEN_FCN (icode) (reg, op0);
10750 emit_insn (insn);
10751 return reg;
10753 else if (STRICT_ALIGNMENT)
10755 tree inner_type = TREE_TYPE (treeop0);
10756 HOST_WIDE_INT temp_size
10757 = MAX (int_size_in_bytes (inner_type),
10758 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10759 rtx new_rtx
10760 = assign_stack_temp_for_type (mode, temp_size, type);
10761 rtx new_with_op0_mode
10762 = adjust_address (new_rtx, GET_MODE (op0), 0);
10764 gcc_assert (!TREE_ADDRESSABLE (exp));
10766 if (GET_MODE (op0) == BLKmode)
10767 emit_block_move (new_with_op0_mode, op0,
10768 GEN_INT (GET_MODE_SIZE (mode)),
10769 (modifier == EXPAND_STACK_PARM
10770 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10771 else
10772 emit_move_insn (new_with_op0_mode, op0);
10774 op0 = new_rtx;
10778 op0 = adjust_address (op0, mode, 0);
10781 return op0;
10783 case MODIFY_EXPR:
10785 tree lhs = treeop0;
10786 tree rhs = treeop1;
10787 gcc_assert (ignore);
10789 /* Check for |= or &= of a bitfield of size one into another bitfield
10790 of size 1. In this case, (unless we need the result of the
10791 assignment) we can do this more efficiently with a
10792 test followed by an assignment, if necessary.
10794 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10795 things change so we do, this code should be enhanced to
10796 support it. */
10797 if (TREE_CODE (lhs) == COMPONENT_REF
10798 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10799 || TREE_CODE (rhs) == BIT_AND_EXPR)
10800 && TREE_OPERAND (rhs, 0) == lhs
10801 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10802 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10803 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10805 rtx_code_label *label = gen_label_rtx ();
10806 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10807 do_jump (TREE_OPERAND (rhs, 1),
10808 value ? label : 0,
10809 value ? 0 : label, -1);
10810 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10811 false);
10812 do_pending_stack_adjust ();
10813 emit_label (label);
10814 return const0_rtx;
10817 expand_assignment (lhs, rhs, false);
10818 return const0_rtx;
10821 case ADDR_EXPR:
10822 return expand_expr_addr_expr (exp, target, tmode, modifier);
10824 case REALPART_EXPR:
10825 op0 = expand_normal (treeop0);
10826 return read_complex_part (op0, false);
10828 case IMAGPART_EXPR:
10829 op0 = expand_normal (treeop0);
10830 return read_complex_part (op0, true);
10832 case RETURN_EXPR:
10833 case LABEL_EXPR:
10834 case GOTO_EXPR:
10835 case SWITCH_EXPR:
10836 case ASM_EXPR:
10837 /* Expanded in cfgexpand.c. */
10838 gcc_unreachable ();
10840 case TRY_CATCH_EXPR:
10841 case CATCH_EXPR:
10842 case EH_FILTER_EXPR:
10843 case TRY_FINALLY_EXPR:
10844 /* Lowered by tree-eh.c. */
10845 gcc_unreachable ();
10847 case WITH_CLEANUP_EXPR:
10848 case CLEANUP_POINT_EXPR:
10849 case TARGET_EXPR:
10850 case CASE_LABEL_EXPR:
10851 case VA_ARG_EXPR:
10852 case BIND_EXPR:
10853 case INIT_EXPR:
10854 case CONJ_EXPR:
10855 case COMPOUND_EXPR:
10856 case PREINCREMENT_EXPR:
10857 case PREDECREMENT_EXPR:
10858 case POSTINCREMENT_EXPR:
10859 case POSTDECREMENT_EXPR:
10860 case LOOP_EXPR:
10861 case EXIT_EXPR:
10862 case COMPOUND_LITERAL_EXPR:
10863 /* Lowered by gimplify.c. */
10864 gcc_unreachable ();
10866 case FDESC_EXPR:
10867 /* Function descriptors are not valid except for as
10868 initialization constants, and should not be expanded. */
10869 gcc_unreachable ();
10871 case WITH_SIZE_EXPR:
10872 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10873 have pulled out the size to use in whatever context it needed. */
10874 return expand_expr_real (treeop0, original_target, tmode,
10875 modifier, alt_rtl, inner_reference_p);
10877 default:
10878 return expand_expr_real_2 (&ops, target, tmode, modifier);
10882 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10883 signedness of TYPE), possibly returning the result in TARGET. */
10884 static rtx
10885 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10887 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10888 if (target && GET_MODE (target) != GET_MODE (exp))
10889 target = 0;
10890 /* For constant values, reduce using build_int_cst_type. */
10891 if (CONST_INT_P (exp))
10893 HOST_WIDE_INT value = INTVAL (exp);
10894 tree t = build_int_cst_type (type, value);
10895 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10897 else if (TYPE_UNSIGNED (type))
10899 machine_mode mode = GET_MODE (exp);
10900 rtx mask = immed_wide_int_const
10901 (wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
10902 return expand_and (mode, exp, mask, target);
10904 else
10906 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10907 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10908 exp, count, target, 0);
10909 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10910 exp, count, target, 0);
10914 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10915 when applied to the address of EXP produces an address known to be
10916 aligned more than BIGGEST_ALIGNMENT. */
10918 static int
10919 is_aligning_offset (const_tree offset, const_tree exp)
10921 /* Strip off any conversions. */
10922 while (CONVERT_EXPR_P (offset))
10923 offset = TREE_OPERAND (offset, 0);
10925 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10926 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10927 if (TREE_CODE (offset) != BIT_AND_EXPR
10928 || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
10929 || compare_tree_int (TREE_OPERAND (offset, 1),
10930 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10931 || exact_log2 (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1) < 0)
10932 return 0;
10934 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10935 It must be NEGATE_EXPR. Then strip any more conversions. */
10936 offset = TREE_OPERAND (offset, 0);
10937 while (CONVERT_EXPR_P (offset))
10938 offset = TREE_OPERAND (offset, 0);
10940 if (TREE_CODE (offset) != NEGATE_EXPR)
10941 return 0;
10943 offset = TREE_OPERAND (offset, 0);
10944 while (CONVERT_EXPR_P (offset))
10945 offset = TREE_OPERAND (offset, 0);
10947 /* This must now be the address of EXP. */
10948 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10951 /* Return the tree node if an ARG corresponds to a string constant or zero
10952 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10953 in bytes within the string that ARG is accessing. The type of the
10954 offset will be `sizetype'. */
10956 tree
10957 string_constant (tree arg, tree *ptr_offset)
10959 tree array, offset, lower_bound;
10960 STRIP_NOPS (arg);
10962 if (TREE_CODE (arg) == ADDR_EXPR)
10964 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10966 *ptr_offset = size_zero_node;
10967 return TREE_OPERAND (arg, 0);
10969 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10971 array = TREE_OPERAND (arg, 0);
10972 offset = size_zero_node;
10974 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10976 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10977 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10978 if (TREE_CODE (array) != STRING_CST
10979 && TREE_CODE (array) != VAR_DECL)
10980 return 0;
10982 /* Check if the array has a nonzero lower bound. */
10983 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10984 if (!integer_zerop (lower_bound))
10986 /* If the offset and base aren't both constants, return 0. */
10987 if (TREE_CODE (lower_bound) != INTEGER_CST)
10988 return 0;
10989 if (TREE_CODE (offset) != INTEGER_CST)
10990 return 0;
10991 /* Adjust offset by the lower bound. */
10992 offset = size_diffop (fold_convert (sizetype, offset),
10993 fold_convert (sizetype, lower_bound));
10996 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10998 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10999 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
11000 if (TREE_CODE (array) != ADDR_EXPR)
11001 return 0;
11002 array = TREE_OPERAND (array, 0);
11003 if (TREE_CODE (array) != STRING_CST
11004 && TREE_CODE (array) != VAR_DECL)
11005 return 0;
11007 else
11008 return 0;
11010 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
11012 tree arg0 = TREE_OPERAND (arg, 0);
11013 tree arg1 = TREE_OPERAND (arg, 1);
11015 STRIP_NOPS (arg0);
11016 STRIP_NOPS (arg1);
11018 if (TREE_CODE (arg0) == ADDR_EXPR
11019 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
11020 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
11022 array = TREE_OPERAND (arg0, 0);
11023 offset = arg1;
11025 else if (TREE_CODE (arg1) == ADDR_EXPR
11026 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
11027 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
11029 array = TREE_OPERAND (arg1, 0);
11030 offset = arg0;
11032 else
11033 return 0;
11035 else
11036 return 0;
11038 if (TREE_CODE (array) == STRING_CST)
11040 *ptr_offset = fold_convert (sizetype, offset);
11041 return array;
11043 else if (TREE_CODE (array) == VAR_DECL
11044 || TREE_CODE (array) == CONST_DECL)
11046 int length;
11047 tree init = ctor_for_folding (array);
11049 /* Variables initialized to string literals can be handled too. */
11050 if (init == error_mark_node
11051 || !init
11052 || TREE_CODE (init) != STRING_CST)
11053 return 0;
11055 /* Avoid const char foo[4] = "abcde"; */
11056 if (DECL_SIZE_UNIT (array) == NULL_TREE
11057 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
11058 || (length = TREE_STRING_LENGTH (init)) <= 0
11059 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
11060 return 0;
11062 /* If variable is bigger than the string literal, OFFSET must be constant
11063 and inside of the bounds of the string literal. */
11064 offset = fold_convert (sizetype, offset);
11065 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
11066 && (! tree_fits_uhwi_p (offset)
11067 || compare_tree_int (offset, length) >= 0))
11068 return 0;
11070 *ptr_offset = offset;
11071 return init;
11074 return 0;
11077 /* Generate code to calculate OPS, and exploded expression
11078 using a store-flag instruction and return an rtx for the result.
11079 OPS reflects a comparison.
11081 If TARGET is nonzero, store the result there if convenient.
11083 Return zero if there is no suitable set-flag instruction
11084 available on this machine.
11086 Once expand_expr has been called on the arguments of the comparison,
11087 we are committed to doing the store flag, since it is not safe to
11088 re-evaluate the expression. We emit the store-flag insn by calling
11089 emit_store_flag, but only expand the arguments if we have a reason
11090 to believe that emit_store_flag will be successful. If we think that
11091 it will, but it isn't, we have to simulate the store-flag with a
11092 set/jump/set sequence. */
11094 static rtx
11095 do_store_flag (sepops ops, rtx target, machine_mode mode)
11097 enum rtx_code code;
11098 tree arg0, arg1, type;
11099 tree tem;
11100 machine_mode operand_mode;
11101 int unsignedp;
11102 rtx op0, op1;
11103 rtx subtarget = target;
11104 location_t loc = ops->location;
11106 arg0 = ops->op0;
11107 arg1 = ops->op1;
11109 /* Don't crash if the comparison was erroneous. */
11110 if (arg0 == error_mark_node || arg1 == error_mark_node)
11111 return const0_rtx;
11113 type = TREE_TYPE (arg0);
11114 operand_mode = TYPE_MODE (type);
11115 unsignedp = TYPE_UNSIGNED (type);
11117 /* We won't bother with BLKmode store-flag operations because it would mean
11118 passing a lot of information to emit_store_flag. */
11119 if (operand_mode == BLKmode)
11120 return 0;
11122 /* We won't bother with store-flag operations involving function pointers
11123 when function pointers must be canonicalized before comparisons. */
11124 #ifdef HAVE_canonicalize_funcptr_for_compare
11125 if (HAVE_canonicalize_funcptr_for_compare
11126 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
11127 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
11128 == FUNCTION_TYPE))
11129 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
11130 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
11131 == FUNCTION_TYPE))))
11132 return 0;
11133 #endif
11135 STRIP_NOPS (arg0);
11136 STRIP_NOPS (arg1);
11138 /* For vector typed comparisons emit code to generate the desired
11139 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
11140 expander for this. */
11141 if (TREE_CODE (ops->type) == VECTOR_TYPE)
11143 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
11144 tree if_true = constant_boolean_node (true, ops->type);
11145 tree if_false = constant_boolean_node (false, ops->type);
11146 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
11149 /* Get the rtx comparison code to use. We know that EXP is a comparison
11150 operation of some type. Some comparisons against 1 and -1 can be
11151 converted to comparisons with zero. Do so here so that the tests
11152 below will be aware that we have a comparison with zero. These
11153 tests will not catch constants in the first operand, but constants
11154 are rarely passed as the first operand. */
11156 switch (ops->code)
11158 case EQ_EXPR:
11159 code = EQ;
11160 break;
11161 case NE_EXPR:
11162 code = NE;
11163 break;
11164 case LT_EXPR:
11165 if (integer_onep (arg1))
11166 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11167 else
11168 code = unsignedp ? LTU : LT;
11169 break;
11170 case LE_EXPR:
11171 if (! unsignedp && integer_all_onesp (arg1))
11172 arg1 = integer_zero_node, code = LT;
11173 else
11174 code = unsignedp ? LEU : LE;
11175 break;
11176 case GT_EXPR:
11177 if (! unsignedp && integer_all_onesp (arg1))
11178 arg1 = integer_zero_node, code = GE;
11179 else
11180 code = unsignedp ? GTU : GT;
11181 break;
11182 case GE_EXPR:
11183 if (integer_onep (arg1))
11184 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11185 else
11186 code = unsignedp ? GEU : GE;
11187 break;
11189 case UNORDERED_EXPR:
11190 code = UNORDERED;
11191 break;
11192 case ORDERED_EXPR:
11193 code = ORDERED;
11194 break;
11195 case UNLT_EXPR:
11196 code = UNLT;
11197 break;
11198 case UNLE_EXPR:
11199 code = UNLE;
11200 break;
11201 case UNGT_EXPR:
11202 code = UNGT;
11203 break;
11204 case UNGE_EXPR:
11205 code = UNGE;
11206 break;
11207 case UNEQ_EXPR:
11208 code = UNEQ;
11209 break;
11210 case LTGT_EXPR:
11211 code = LTGT;
11212 break;
11214 default:
11215 gcc_unreachable ();
11218 /* Put a constant second. */
11219 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
11220 || TREE_CODE (arg0) == FIXED_CST)
11222 tem = arg0; arg0 = arg1; arg1 = tem;
11223 code = swap_condition (code);
11226 /* If this is an equality or inequality test of a single bit, we can
11227 do this by shifting the bit being tested to the low-order bit and
11228 masking the result with the constant 1. If the condition was EQ,
11229 we xor it with 1. This does not require an scc insn and is faster
11230 than an scc insn even if we have it.
11232 The code to make this transformation was moved into fold_single_bit_test,
11233 so we just call into the folder and expand its result. */
11235 if ((code == NE || code == EQ)
11236 && integer_zerop (arg1)
11237 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
11239 gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
11240 if (srcstmt
11241 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
11243 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
11244 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
11245 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
11246 gimple_assign_rhs1 (srcstmt),
11247 gimple_assign_rhs2 (srcstmt));
11248 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
11249 if (temp)
11250 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
11254 if (! get_subtarget (target)
11255 || GET_MODE (subtarget) != operand_mode)
11256 subtarget = 0;
11258 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
11260 if (target == 0)
11261 target = gen_reg_rtx (mode);
11263 /* Try a cstore if possible. */
11264 return emit_store_flag_force (target, code, op0, op1,
11265 operand_mode, unsignedp,
11266 (TYPE_PRECISION (ops->type) == 1
11267 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
11271 /* Stubs in case we haven't got a casesi insn. */
11272 #ifndef HAVE_casesi
11273 # define HAVE_casesi 0
11274 # define gen_casesi(a, b, c, d, e) (0)
11275 # define CODE_FOR_casesi CODE_FOR_nothing
11276 #endif
11278 /* Attempt to generate a casesi instruction. Returns 1 if successful,
11279 0 otherwise (i.e. if there is no casesi instruction).
11281 DEFAULT_PROBABILITY is the probability of jumping to the default
11282 label. */
11284 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
11285 rtx table_label, rtx default_label, rtx fallback_label,
11286 int default_probability)
11288 struct expand_operand ops[5];
11289 machine_mode index_mode = SImode;
11290 rtx op1, op2, index;
11292 if (! HAVE_casesi)
11293 return 0;
11295 /* Convert the index to SImode. */
11296 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11298 machine_mode omode = TYPE_MODE (index_type);
11299 rtx rangertx = expand_normal (range);
11301 /* We must handle the endpoints in the original mode. */
11302 index_expr = build2 (MINUS_EXPR, index_type,
11303 index_expr, minval);
11304 minval = integer_zero_node;
11305 index = expand_normal (index_expr);
11306 if (default_label)
11307 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11308 omode, 1, default_label,
11309 default_probability);
11310 /* Now we can safely truncate. */
11311 index = convert_to_mode (index_mode, index, 0);
11313 else
11315 if (TYPE_MODE (index_type) != index_mode)
11317 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
11318 index_expr = fold_convert (index_type, index_expr);
11321 index = expand_normal (index_expr);
11324 do_pending_stack_adjust ();
11326 op1 = expand_normal (minval);
11327 op2 = expand_normal (range);
11329 create_input_operand (&ops[0], index, index_mode);
11330 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11331 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11332 create_fixed_operand (&ops[3], table_label);
11333 create_fixed_operand (&ops[4], (default_label
11334 ? default_label
11335 : fallback_label));
11336 expand_jump_insn (CODE_FOR_casesi, 5, ops);
11337 return 1;
11340 /* Attempt to generate a tablejump instruction; same concept. */
11341 #ifndef HAVE_tablejump
11342 #define HAVE_tablejump 0
11343 #define gen_tablejump(x, y) (0)
11344 #endif
11346 /* Subroutine of the next function.
11348 INDEX is the value being switched on, with the lowest value
11349 in the table already subtracted.
11350 MODE is its expected mode (needed if INDEX is constant).
11351 RANGE is the length of the jump table.
11352 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11354 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11355 index value is out of range.
11356 DEFAULT_PROBABILITY is the probability of jumping to
11357 the default label. */
11359 static void
11360 do_tablejump (rtx index, machine_mode mode, rtx range, rtx table_label,
11361 rtx default_label, int default_probability)
11363 rtx temp, vector;
11365 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11366 cfun->cfg->max_jumptable_ents = INTVAL (range);
11368 /* Do an unsigned comparison (in the proper mode) between the index
11369 expression and the value which represents the length of the range.
11370 Since we just finished subtracting the lower bound of the range
11371 from the index expression, this comparison allows us to simultaneously
11372 check that the original index expression value is both greater than
11373 or equal to the minimum value of the range and less than or equal to
11374 the maximum value of the range. */
11376 if (default_label)
11377 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11378 default_label, default_probability);
11381 /* If index is in range, it must fit in Pmode.
11382 Convert to Pmode so we can index with it. */
11383 if (mode != Pmode)
11384 index = convert_to_mode (Pmode, index, 1);
11386 /* Don't let a MEM slip through, because then INDEX that comes
11387 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11388 and break_out_memory_refs will go to work on it and mess it up. */
11389 #ifdef PIC_CASE_VECTOR_ADDRESS
11390 if (flag_pic && !REG_P (index))
11391 index = copy_to_mode_reg (Pmode, index);
11392 #endif
11394 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11395 GET_MODE_SIZE, because this indicates how large insns are. The other
11396 uses should all be Pmode, because they are addresses. This code
11397 could fail if addresses and insns are not the same size. */
11398 index = simplify_gen_binary (MULT, Pmode, index,
11399 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
11400 Pmode));
11401 index = simplify_gen_binary (PLUS, Pmode, index,
11402 gen_rtx_LABEL_REF (Pmode, table_label));
11404 #ifdef PIC_CASE_VECTOR_ADDRESS
11405 if (flag_pic)
11406 index = PIC_CASE_VECTOR_ADDRESS (index);
11407 else
11408 #endif
11409 index = memory_address (CASE_VECTOR_MODE, index);
11410 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11411 vector = gen_const_mem (CASE_VECTOR_MODE, index);
11412 convert_move (temp, vector, 0);
11414 emit_jump_insn (gen_tablejump (temp, table_label));
11416 /* If we are generating PIC code or if the table is PC-relative, the
11417 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11418 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11419 emit_barrier ();
11423 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11424 rtx table_label, rtx default_label, int default_probability)
11426 rtx index;
11428 if (! HAVE_tablejump)
11429 return 0;
11431 index_expr = fold_build2 (MINUS_EXPR, index_type,
11432 fold_convert (index_type, index_expr),
11433 fold_convert (index_type, minval));
11434 index = expand_normal (index_expr);
11435 do_pending_stack_adjust ();
11437 do_tablejump (index, TYPE_MODE (index_type),
11438 convert_modes (TYPE_MODE (index_type),
11439 TYPE_MODE (TREE_TYPE (range)),
11440 expand_normal (range),
11441 TYPE_UNSIGNED (TREE_TYPE (range))),
11442 table_label, default_label, default_probability);
11443 return 1;
11446 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11447 static rtx
11448 const_vector_from_tree (tree exp)
11450 rtvec v;
11451 unsigned i;
11452 int units;
11453 tree elt;
11454 machine_mode inner, mode;
11456 mode = TYPE_MODE (TREE_TYPE (exp));
11458 if (initializer_zerop (exp))
11459 return CONST0_RTX (mode);
11461 units = GET_MODE_NUNITS (mode);
11462 inner = GET_MODE_INNER (mode);
11464 v = rtvec_alloc (units);
11466 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11468 elt = VECTOR_CST_ELT (exp, i);
11470 if (TREE_CODE (elt) == REAL_CST)
11471 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11472 inner);
11473 else if (TREE_CODE (elt) == FIXED_CST)
11474 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11475 inner);
11476 else
11477 RTVEC_ELT (v, i) = immed_wide_int_const (elt, inner);
11480 return gen_rtx_CONST_VECTOR (mode, v);
11483 /* Build a decl for a personality function given a language prefix. */
11485 tree
11486 build_personality_function (const char *lang)
11488 const char *unwind_and_version;
11489 tree decl, type;
11490 char *name;
11492 switch (targetm_common.except_unwind_info (&global_options))
11494 case UI_NONE:
11495 return NULL;
11496 case UI_SJLJ:
11497 unwind_and_version = "_sj0";
11498 break;
11499 case UI_DWARF2:
11500 case UI_TARGET:
11501 unwind_and_version = "_v0";
11502 break;
11503 case UI_SEH:
11504 unwind_and_version = "_seh0";
11505 break;
11506 default:
11507 gcc_unreachable ();
11510 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11512 type = build_function_type_list (integer_type_node, integer_type_node,
11513 long_long_unsigned_type_node,
11514 ptr_type_node, ptr_type_node, NULL_TREE);
11515 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11516 get_identifier (name), type);
11517 DECL_ARTIFICIAL (decl) = 1;
11518 DECL_EXTERNAL (decl) = 1;
11519 TREE_PUBLIC (decl) = 1;
11521 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11522 are the flags assigned by targetm.encode_section_info. */
11523 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11525 return decl;
11528 /* Extracts the personality function of DECL and returns the corresponding
11529 libfunc. */
11532 get_personality_function (tree decl)
11534 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11535 enum eh_personality_kind pk;
11537 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11538 if (pk == eh_personality_none)
11539 return NULL;
11541 if (!personality
11542 && pk == eh_personality_any)
11543 personality = lang_hooks.eh_personality ();
11545 if (pk == eh_personality_lang)
11546 gcc_assert (personality != NULL_TREE);
11548 return XEXP (DECL_RTL (personality), 0);
11551 /* Returns a tree for the size of EXP in bytes. */
11553 static tree
11554 tree_expr_size (const_tree exp)
11556 if (DECL_P (exp)
11557 && DECL_SIZE_UNIT (exp) != 0)
11558 return DECL_SIZE_UNIT (exp);
11559 else
11560 return size_in_bytes (TREE_TYPE (exp));
11563 /* Return an rtx for the size in bytes of the value of EXP. */
11566 expr_size (tree exp)
11568 tree size;
11570 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11571 size = TREE_OPERAND (exp, 1);
11572 else
11574 size = tree_expr_size (exp);
11575 gcc_assert (size);
11576 gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
11579 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
11582 /* Return a wide integer for the size in bytes of the value of EXP, or -1
11583 if the size can vary or is larger than an integer. */
11585 static HOST_WIDE_INT
11586 int_expr_size (tree exp)
11588 tree size;
11590 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11591 size = TREE_OPERAND (exp, 1);
11592 else
11594 size = tree_expr_size (exp);
11595 gcc_assert (size);
11598 if (size == 0 || !tree_fits_shwi_p (size))
11599 return -1;
11601 return tree_to_shwi (size);
11604 #include "gt-expr.h"