2013-11-08 Andrew MacLeod <amacleod@redhat.com>
[official-gcc.git] / gcc / expr.c
blob4c64489976f1af0f6a86d8a87a3f13d719903bf6
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "flags.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "except.h"
31 #include "function.h"
32 #include "insn-config.h"
33 #include "insn-attr.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
35 #include "expr.h"
36 #include "optabs.h"
37 #include "libfuncs.h"
38 #include "recog.h"
39 #include "reload.h"
40 #include "typeclass.h"
41 #include "toplev.h"
42 #include "langhooks.h"
43 #include "intl.h"
44 #include "tm_p.h"
45 #include "tree-iterator.h"
46 #include "gimple.h"
47 #include "gimple-ssa.h"
48 #include "cgraph.h"
49 #include "tree-ssanames.h"
50 #include "target.h"
51 #include "common/common-target.h"
52 #include "timevar.h"
53 #include "df.h"
54 #include "diagnostic.h"
55 #include "tree-ssa-live.h"
56 #include "tree-outof-ssa.h"
57 #include "target-globals.h"
58 #include "params.h"
59 #include "tree-ssa-address.h"
61 /* Decide whether a function's arguments should be processed
62 from first to last or from last to first.
64 They should if the stack and args grow in opposite directions, but
65 only if we have push insns. */
67 #ifdef PUSH_ROUNDING
69 #ifndef PUSH_ARGS_REVERSED
70 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
71 #define PUSH_ARGS_REVERSED /* If it's last to first. */
72 #endif
73 #endif
75 #endif
77 #ifndef STACK_PUSH_CODE
78 #ifdef STACK_GROWS_DOWNWARD
79 #define STACK_PUSH_CODE PRE_DEC
80 #else
81 #define STACK_PUSH_CODE PRE_INC
82 #endif
83 #endif
86 /* If this is nonzero, we do not bother generating VOLATILE
87 around volatile memory references, and we are willing to
88 output indirect addresses. If cse is to follow, we reject
89 indirect addresses so a useful potential cse is generated;
90 if it is used only once, instruction combination will produce
91 the same indirect address eventually. */
92 int cse_not_expected;
94 /* This structure is used by move_by_pieces to describe the move to
95 be performed. */
96 struct move_by_pieces_d
98 rtx to;
99 rtx to_addr;
100 int autinc_to;
101 int explicit_inc_to;
102 rtx from;
103 rtx from_addr;
104 int autinc_from;
105 int explicit_inc_from;
106 unsigned HOST_WIDE_INT len;
107 HOST_WIDE_INT offset;
108 int reverse;
111 /* This structure is used by store_by_pieces to describe the clear to
112 be performed. */
114 struct store_by_pieces_d
116 rtx to;
117 rtx to_addr;
118 int autinc_to;
119 int explicit_inc_to;
120 unsigned HOST_WIDE_INT len;
121 HOST_WIDE_INT offset;
122 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
123 void *constfundata;
124 int reverse;
127 static void move_by_pieces_1 (insn_gen_fn, machine_mode,
128 struct move_by_pieces_d *);
129 static bool block_move_libcall_safe_for_call_parm (void);
130 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
131 static tree emit_block_move_libcall_fn (int);
132 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
133 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
134 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
135 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
136 static void store_by_pieces_2 (insn_gen_fn, machine_mode,
137 struct store_by_pieces_d *);
138 static tree clear_storage_libcall_fn (int);
139 static rtx compress_float_constant (rtx, rtx);
140 static rtx get_subtarget (rtx);
141 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, int, alias_set_type);
144 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
146 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
147 enum machine_mode, tree, alias_set_type, bool);
149 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
151 static int is_aligning_offset (const_tree, const_tree);
152 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
153 enum expand_modifier);
154 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
155 static rtx do_store_flag (sepops, rtx, enum machine_mode);
156 #ifdef PUSH_ROUNDING
157 static void emit_single_push_insn (enum machine_mode, rtx, tree);
158 #endif
159 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx, int);
160 static rtx const_vector_from_tree (tree);
161 static void write_complex_part (rtx, rtx, bool);
163 /* This macro is used to determine whether move_by_pieces should be called
164 to perform a structure copy. */
165 #ifndef MOVE_BY_PIECES_P
166 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
167 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
168 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
169 #endif
171 /* This macro is used to determine whether clear_by_pieces should be
172 called to clear storage. */
173 #ifndef CLEAR_BY_PIECES_P
174 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
175 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
176 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
177 #endif
179 /* This macro is used to determine whether store_by_pieces should be
180 called to "memset" storage with byte values other than zero. */
181 #ifndef SET_BY_PIECES_P
182 #define SET_BY_PIECES_P(SIZE, ALIGN) \
183 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
184 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
185 #endif
187 /* This macro is used to determine whether store_by_pieces should be
188 called to "memcpy" storage when the source is a constant string. */
189 #ifndef STORE_BY_PIECES_P
190 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
191 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
192 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
193 #endif
195 /* This is run to set up which modes can be used
196 directly in memory and to initialize the block move optab. It is run
197 at the beginning of compilation and when the target is reinitialized. */
199 void
200 init_expr_target (void)
202 rtx insn, pat;
203 enum machine_mode mode;
204 int num_clobbers;
205 rtx mem, mem1;
206 rtx reg;
208 /* Try indexing by frame ptr and try by stack ptr.
209 It is known that on the Convex the stack ptr isn't a valid index.
210 With luck, one or the other is valid on any machine. */
211 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
212 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
214 /* A scratch register we can modify in-place below to avoid
215 useless RTL allocations. */
216 reg = gen_rtx_REG (VOIDmode, -1);
218 insn = rtx_alloc (INSN);
219 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
220 PATTERN (insn) = pat;
222 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
223 mode = (enum machine_mode) ((int) mode + 1))
225 int regno;
227 direct_load[(int) mode] = direct_store[(int) mode] = 0;
228 PUT_MODE (mem, mode);
229 PUT_MODE (mem1, mode);
230 PUT_MODE (reg, mode);
232 /* See if there is some register that can be used in this mode and
233 directly loaded or stored from memory. */
235 if (mode != VOIDmode && mode != BLKmode)
236 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
237 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
238 regno++)
240 if (! HARD_REGNO_MODE_OK (regno, mode))
241 continue;
243 SET_REGNO (reg, regno);
245 SET_SRC (pat) = mem;
246 SET_DEST (pat) = reg;
247 if (recog (pat, insn, &num_clobbers) >= 0)
248 direct_load[(int) mode] = 1;
250 SET_SRC (pat) = mem1;
251 SET_DEST (pat) = reg;
252 if (recog (pat, insn, &num_clobbers) >= 0)
253 direct_load[(int) mode] = 1;
255 SET_SRC (pat) = reg;
256 SET_DEST (pat) = mem;
257 if (recog (pat, insn, &num_clobbers) >= 0)
258 direct_store[(int) mode] = 1;
260 SET_SRC (pat) = reg;
261 SET_DEST (pat) = mem1;
262 if (recog (pat, insn, &num_clobbers) >= 0)
263 direct_store[(int) mode] = 1;
267 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
269 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
270 mode = GET_MODE_WIDER_MODE (mode))
272 enum machine_mode srcmode;
273 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
274 srcmode = GET_MODE_WIDER_MODE (srcmode))
276 enum insn_code ic;
278 ic = can_extend_p (mode, srcmode, 0);
279 if (ic == CODE_FOR_nothing)
280 continue;
282 PUT_MODE (mem, srcmode);
284 if (insn_operand_matches (ic, 1, mem))
285 float_extend_from_mem[mode][srcmode] = true;
290 /* This is run at the start of compiling a function. */
292 void
293 init_expr (void)
295 memset (&crtl->expr, 0, sizeof (crtl->expr));
298 /* Copy data from FROM to TO, where the machine modes are not the same.
299 Both modes may be integer, or both may be floating, or both may be
300 fixed-point.
301 UNSIGNEDP should be nonzero if FROM is an unsigned type.
302 This causes zero-extension instead of sign-extension. */
304 void
305 convert_move (rtx to, rtx from, int unsignedp)
307 enum machine_mode to_mode = GET_MODE (to);
308 enum machine_mode from_mode = GET_MODE (from);
309 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
310 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
311 enum insn_code code;
312 rtx libcall;
314 /* rtx code for making an equivalent value. */
315 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
316 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
319 gcc_assert (to_real == from_real);
320 gcc_assert (to_mode != BLKmode);
321 gcc_assert (from_mode != BLKmode);
323 /* If the source and destination are already the same, then there's
324 nothing to do. */
325 if (to == from)
326 return;
328 /* If FROM is a SUBREG that indicates that we have already done at least
329 the required extension, strip it. We don't handle such SUBREGs as
330 TO here. */
332 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
333 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
334 >= GET_MODE_PRECISION (to_mode))
335 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
336 from = gen_lowpart (to_mode, from), from_mode = to_mode;
338 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
340 if (to_mode == from_mode
341 || (from_mode == VOIDmode && CONSTANT_P (from)))
343 emit_move_insn (to, from);
344 return;
347 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
349 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
351 if (VECTOR_MODE_P (to_mode))
352 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
353 else
354 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
356 emit_move_insn (to, from);
357 return;
360 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
362 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
363 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
364 return;
367 if (to_real)
369 rtx value, insns;
370 convert_optab tab;
372 gcc_assert ((GET_MODE_PRECISION (from_mode)
373 != GET_MODE_PRECISION (to_mode))
374 || (DECIMAL_FLOAT_MODE_P (from_mode)
375 != DECIMAL_FLOAT_MODE_P (to_mode)));
377 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
378 /* Conversion between decimal float and binary float, same size. */
379 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
380 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
381 tab = sext_optab;
382 else
383 tab = trunc_optab;
385 /* Try converting directly if the insn is supported. */
387 code = convert_optab_handler (tab, to_mode, from_mode);
388 if (code != CODE_FOR_nothing)
390 emit_unop_insn (code, to, from,
391 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
392 return;
395 /* Otherwise use a libcall. */
396 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
398 /* Is this conversion implemented yet? */
399 gcc_assert (libcall);
401 start_sequence ();
402 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
403 1, from, from_mode);
404 insns = get_insns ();
405 end_sequence ();
406 emit_libcall_block (insns, to, value,
407 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
408 from)
409 : gen_rtx_FLOAT_EXTEND (to_mode, from));
410 return;
413 /* Handle pointer conversion. */ /* SPEE 900220. */
414 /* Targets are expected to provide conversion insns between PxImode and
415 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
416 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
418 enum machine_mode full_mode
419 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
421 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
422 != CODE_FOR_nothing);
424 if (full_mode != from_mode)
425 from = convert_to_mode (full_mode, from, unsignedp);
426 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
427 to, from, UNKNOWN);
428 return;
430 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
432 rtx new_from;
433 enum machine_mode full_mode
434 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
435 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
436 enum insn_code icode;
438 icode = convert_optab_handler (ctab, full_mode, from_mode);
439 gcc_assert (icode != CODE_FOR_nothing);
441 if (to_mode == full_mode)
443 emit_unop_insn (icode, to, from, UNKNOWN);
444 return;
447 new_from = gen_reg_rtx (full_mode);
448 emit_unop_insn (icode, new_from, from, UNKNOWN);
450 /* else proceed to integer conversions below. */
451 from_mode = full_mode;
452 from = new_from;
455 /* Make sure both are fixed-point modes or both are not. */
456 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
457 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
458 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
460 /* If we widen from_mode to to_mode and they are in the same class,
461 we won't saturate the result.
462 Otherwise, always saturate the result to play safe. */
463 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
464 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
465 expand_fixed_convert (to, from, 0, 0);
466 else
467 expand_fixed_convert (to, from, 0, 1);
468 return;
471 /* Now both modes are integers. */
473 /* Handle expanding beyond a word. */
474 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
475 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
477 rtx insns;
478 rtx lowpart;
479 rtx fill_value;
480 rtx lowfrom;
481 int i;
482 enum machine_mode lowpart_mode;
483 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
485 /* Try converting directly if the insn is supported. */
486 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
487 != CODE_FOR_nothing)
489 /* If FROM is a SUBREG, put it into a register. Do this
490 so that we always generate the same set of insns for
491 better cse'ing; if an intermediate assignment occurred,
492 we won't be doing the operation directly on the SUBREG. */
493 if (optimize > 0 && GET_CODE (from) == SUBREG)
494 from = force_reg (from_mode, from);
495 emit_unop_insn (code, to, from, equiv_code);
496 return;
498 /* Next, try converting via full word. */
499 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
500 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
501 != CODE_FOR_nothing))
503 rtx word_to = gen_reg_rtx (word_mode);
504 if (REG_P (to))
506 if (reg_overlap_mentioned_p (to, from))
507 from = force_reg (from_mode, from);
508 emit_clobber (to);
510 convert_move (word_to, from, unsignedp);
511 emit_unop_insn (code, to, word_to, equiv_code);
512 return;
515 /* No special multiword conversion insn; do it by hand. */
516 start_sequence ();
518 /* Since we will turn this into a no conflict block, we must ensure the
519 the source does not overlap the target so force it into an isolated
520 register when maybe so. Likewise for any MEM input, since the
521 conversion sequence might require several references to it and we
522 must ensure we're getting the same value every time. */
524 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
525 from = force_reg (from_mode, from);
527 /* Get a copy of FROM widened to a word, if necessary. */
528 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
529 lowpart_mode = word_mode;
530 else
531 lowpart_mode = from_mode;
533 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
535 lowpart = gen_lowpart (lowpart_mode, to);
536 emit_move_insn (lowpart, lowfrom);
538 /* Compute the value to put in each remaining word. */
539 if (unsignedp)
540 fill_value = const0_rtx;
541 else
542 fill_value = emit_store_flag (gen_reg_rtx (word_mode),
543 LT, lowfrom, const0_rtx,
544 VOIDmode, 0, -1);
546 /* Fill the remaining words. */
547 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
549 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
550 rtx subword = operand_subword (to, index, 1, to_mode);
552 gcc_assert (subword);
554 if (fill_value != subword)
555 emit_move_insn (subword, fill_value);
558 insns = get_insns ();
559 end_sequence ();
561 emit_insn (insns);
562 return;
565 /* Truncating multi-word to a word or less. */
566 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
567 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
569 if (!((MEM_P (from)
570 && ! MEM_VOLATILE_P (from)
571 && direct_load[(int) to_mode]
572 && ! mode_dependent_address_p (XEXP (from, 0),
573 MEM_ADDR_SPACE (from)))
574 || REG_P (from)
575 || GET_CODE (from) == SUBREG))
576 from = force_reg (from_mode, from);
577 convert_move (to, gen_lowpart (word_mode, from), 0);
578 return;
581 /* Now follow all the conversions between integers
582 no more than a word long. */
584 /* For truncation, usually we can just refer to FROM in a narrower mode. */
585 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
586 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
588 if (!((MEM_P (from)
589 && ! MEM_VOLATILE_P (from)
590 && direct_load[(int) to_mode]
591 && ! mode_dependent_address_p (XEXP (from, 0),
592 MEM_ADDR_SPACE (from)))
593 || REG_P (from)
594 || GET_CODE (from) == SUBREG))
595 from = force_reg (from_mode, from);
596 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
597 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
598 from = copy_to_reg (from);
599 emit_move_insn (to, gen_lowpart (to_mode, from));
600 return;
603 /* Handle extension. */
604 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
606 /* Convert directly if that works. */
607 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
608 != CODE_FOR_nothing)
610 emit_unop_insn (code, to, from, equiv_code);
611 return;
613 else
615 enum machine_mode intermediate;
616 rtx tmp;
617 int shift_amount;
619 /* Search for a mode to convert via. */
620 for (intermediate = from_mode; intermediate != VOIDmode;
621 intermediate = GET_MODE_WIDER_MODE (intermediate))
622 if (((can_extend_p (to_mode, intermediate, unsignedp)
623 != CODE_FOR_nothing)
624 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
625 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
626 && (can_extend_p (intermediate, from_mode, unsignedp)
627 != CODE_FOR_nothing))
629 convert_move (to, convert_to_mode (intermediate, from,
630 unsignedp), unsignedp);
631 return;
634 /* No suitable intermediate mode.
635 Generate what we need with shifts. */
636 shift_amount = (GET_MODE_PRECISION (to_mode)
637 - GET_MODE_PRECISION (from_mode));
638 from = gen_lowpart (to_mode, force_reg (from_mode, from));
639 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
640 to, unsignedp);
641 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
642 to, unsignedp);
643 if (tmp != to)
644 emit_move_insn (to, tmp);
645 return;
649 /* Support special truncate insns for certain modes. */
650 if (convert_optab_handler (trunc_optab, to_mode,
651 from_mode) != CODE_FOR_nothing)
653 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
654 to, from, UNKNOWN);
655 return;
658 /* Handle truncation of volatile memrefs, and so on;
659 the things that couldn't be truncated directly,
660 and for which there was no special instruction.
662 ??? Code above formerly short-circuited this, for most integer
663 mode pairs, with a force_reg in from_mode followed by a recursive
664 call to this routine. Appears always to have been wrong. */
665 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
667 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
668 emit_move_insn (to, temp);
669 return;
672 /* Mode combination is not recognized. */
673 gcc_unreachable ();
676 /* Return an rtx for a value that would result
677 from converting X to mode MODE.
678 Both X and MODE may be floating, or both integer.
679 UNSIGNEDP is nonzero if X is an unsigned value.
680 This can be done by referring to a part of X in place
681 or by copying to a new temporary with conversion. */
684 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
686 return convert_modes (mode, VOIDmode, x, unsignedp);
689 /* Return an rtx for a value that would result
690 from converting X from mode OLDMODE to mode MODE.
691 Both modes may be floating, or both integer.
692 UNSIGNEDP is nonzero if X is an unsigned value.
694 This can be done by referring to a part of X in place
695 or by copying to a new temporary with conversion.
697 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
700 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
702 rtx temp;
704 /* If FROM is a SUBREG that indicates that we have already done at least
705 the required extension, strip it. */
707 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
708 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
709 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
710 x = gen_lowpart (mode, x);
712 if (GET_MODE (x) != VOIDmode)
713 oldmode = GET_MODE (x);
715 if (mode == oldmode)
716 return x;
718 /* There is one case that we must handle specially: If we are converting
719 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
720 we are to interpret the constant as unsigned, gen_lowpart will do
721 the wrong if the constant appears negative. What we want to do is
722 make the high-order word of the constant zero, not all ones. */
724 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
725 && GET_MODE_BITSIZE (mode) == HOST_BITS_PER_DOUBLE_INT
726 && CONST_INT_P (x) && INTVAL (x) < 0)
728 double_int val = double_int::from_uhwi (INTVAL (x));
730 /* We need to zero extend VAL. */
731 if (oldmode != VOIDmode)
732 val = val.zext (GET_MODE_BITSIZE (oldmode));
734 return immed_double_int_const (val, mode);
737 /* We can do this with a gen_lowpart if both desired and current modes
738 are integer, and this is either a constant integer, a register, or a
739 non-volatile MEM. Except for the constant case where MODE is no
740 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
742 if ((CONST_INT_P (x)
743 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT)
744 || (GET_MODE_CLASS (mode) == MODE_INT
745 && GET_MODE_CLASS (oldmode) == MODE_INT
746 && (CONST_DOUBLE_AS_INT_P (x)
747 || (GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
748 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
749 && direct_load[(int) mode])
750 || (REG_P (x)
751 && (! HARD_REGISTER_P (x)
752 || HARD_REGNO_MODE_OK (REGNO (x), mode))
753 && TRULY_NOOP_TRUNCATION_MODES_P (mode,
754 GET_MODE (x))))))))
756 /* ?? If we don't know OLDMODE, we have to assume here that
757 X does not need sign- or zero-extension. This may not be
758 the case, but it's the best we can do. */
759 if (CONST_INT_P (x) && oldmode != VOIDmode
760 && GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (oldmode))
762 HOST_WIDE_INT val = INTVAL (x);
764 /* We must sign or zero-extend in this case. Start by
765 zero-extending, then sign extend if we need to. */
766 val &= GET_MODE_MASK (oldmode);
767 if (! unsignedp
768 && val_signbit_known_set_p (oldmode, val))
769 val |= ~GET_MODE_MASK (oldmode);
771 return gen_int_mode (val, mode);
774 return gen_lowpart (mode, x);
777 /* Converting from integer constant into mode is always equivalent to an
778 subreg operation. */
779 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
781 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
782 return simplify_gen_subreg (mode, x, oldmode, 0);
785 temp = gen_reg_rtx (mode);
786 convert_move (temp, x, unsignedp);
787 return temp;
790 /* Return the largest alignment we can use for doing a move (or store)
791 of MAX_PIECES. ALIGN is the largest alignment we could use. */
793 static unsigned int
794 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
796 enum machine_mode tmode;
798 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
799 if (align >= GET_MODE_ALIGNMENT (tmode))
800 align = GET_MODE_ALIGNMENT (tmode);
801 else
803 enum machine_mode tmode, xmode;
805 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
806 tmode != VOIDmode;
807 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
808 if (GET_MODE_SIZE (tmode) > max_pieces
809 || SLOW_UNALIGNED_ACCESS (tmode, align))
810 break;
812 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
815 return align;
818 /* Return the widest integer mode no wider than SIZE. If no such mode
819 can be found, return VOIDmode. */
821 static enum machine_mode
822 widest_int_mode_for_size (unsigned int size)
824 enum machine_mode tmode, mode = VOIDmode;
826 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
827 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
828 if (GET_MODE_SIZE (tmode) < size)
829 mode = tmode;
831 return mode;
834 /* STORE_MAX_PIECES is the number of bytes at a time that we can
835 store efficiently. Due to internal GCC limitations, this is
836 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
837 for an immediate constant. */
839 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
841 /* Determine whether the LEN bytes can be moved by using several move
842 instructions. Return nonzero if a call to move_by_pieces should
843 succeed. */
846 can_move_by_pieces (unsigned HOST_WIDE_INT len ATTRIBUTE_UNUSED,
847 unsigned int align ATTRIBUTE_UNUSED)
849 return MOVE_BY_PIECES_P (len, align);
852 /* Generate several move instructions to copy LEN bytes from block FROM to
853 block TO. (These are MEM rtx's with BLKmode).
855 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
856 used to push FROM to the stack.
858 ALIGN is maximum stack alignment we can assume.
860 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
861 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
862 stpcpy. */
865 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
866 unsigned int align, int endp)
868 struct move_by_pieces_d data;
869 enum machine_mode to_addr_mode;
870 enum machine_mode from_addr_mode = get_address_mode (from);
871 rtx to_addr, from_addr = XEXP (from, 0);
872 unsigned int max_size = MOVE_MAX_PIECES + 1;
873 enum insn_code icode;
875 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
877 data.offset = 0;
878 data.from_addr = from_addr;
879 if (to)
881 to_addr_mode = get_address_mode (to);
882 to_addr = XEXP (to, 0);
883 data.to = to;
884 data.autinc_to
885 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
886 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
887 data.reverse
888 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
890 else
892 to_addr_mode = VOIDmode;
893 to_addr = NULL_RTX;
894 data.to = NULL_RTX;
895 data.autinc_to = 1;
896 #ifdef STACK_GROWS_DOWNWARD
897 data.reverse = 1;
898 #else
899 data.reverse = 0;
900 #endif
902 data.to_addr = to_addr;
903 data.from = from;
904 data.autinc_from
905 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
906 || GET_CODE (from_addr) == POST_INC
907 || GET_CODE (from_addr) == POST_DEC);
909 data.explicit_inc_from = 0;
910 data.explicit_inc_to = 0;
911 if (data.reverse) data.offset = len;
912 data.len = len;
914 /* If copying requires more than two move insns,
915 copy addresses to registers (to make displacements shorter)
916 and use post-increment if available. */
917 if (!(data.autinc_from && data.autinc_to)
918 && move_by_pieces_ninsns (len, align, max_size) > 2)
920 /* Find the mode of the largest move...
921 MODE might not be used depending on the definitions of the
922 USE_* macros below. */
923 enum machine_mode mode ATTRIBUTE_UNUSED
924 = widest_int_mode_for_size (max_size);
926 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
928 data.from_addr = copy_to_mode_reg (from_addr_mode,
929 plus_constant (from_addr_mode,
930 from_addr, len));
931 data.autinc_from = 1;
932 data.explicit_inc_from = -1;
934 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
936 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
937 data.autinc_from = 1;
938 data.explicit_inc_from = 1;
940 if (!data.autinc_from && CONSTANT_P (from_addr))
941 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
942 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
944 data.to_addr = copy_to_mode_reg (to_addr_mode,
945 plus_constant (to_addr_mode,
946 to_addr, len));
947 data.autinc_to = 1;
948 data.explicit_inc_to = -1;
950 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
952 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
953 data.autinc_to = 1;
954 data.explicit_inc_to = 1;
956 if (!data.autinc_to && CONSTANT_P (to_addr))
957 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
960 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
962 /* First move what we can in the largest integer mode, then go to
963 successively smaller modes. */
965 while (max_size > 1 && data.len > 0)
967 enum machine_mode mode = widest_int_mode_for_size (max_size);
969 if (mode == VOIDmode)
970 break;
972 icode = optab_handler (mov_optab, mode);
973 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
974 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
976 max_size = GET_MODE_SIZE (mode);
979 /* The code above should have handled everything. */
980 gcc_assert (!data.len);
982 if (endp)
984 rtx to1;
986 gcc_assert (!data.reverse);
987 if (data.autinc_to)
989 if (endp == 2)
991 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
992 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
993 else
994 data.to_addr = copy_to_mode_reg (to_addr_mode,
995 plus_constant (to_addr_mode,
996 data.to_addr,
997 -1));
999 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1000 data.offset);
1002 else
1004 if (endp == 2)
1005 --data.offset;
1006 to1 = adjust_address (data.to, QImode, data.offset);
1008 return to1;
1010 else
1011 return data.to;
1014 /* Return number of insns required to move L bytes by pieces.
1015 ALIGN (in bits) is maximum alignment we can assume. */
1017 unsigned HOST_WIDE_INT
1018 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1019 unsigned int max_size)
1021 unsigned HOST_WIDE_INT n_insns = 0;
1023 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1025 while (max_size > 1 && l > 0)
1027 enum machine_mode mode;
1028 enum insn_code icode;
1030 mode = widest_int_mode_for_size (max_size);
1032 if (mode == VOIDmode)
1033 break;
1035 icode = optab_handler (mov_optab, mode);
1036 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1037 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1039 max_size = GET_MODE_SIZE (mode);
1042 gcc_assert (!l);
1043 return n_insns;
1046 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1047 with move instructions for mode MODE. GENFUN is the gen_... function
1048 to make a move insn for that mode. DATA has all the other info. */
1050 static void
1051 move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
1052 struct move_by_pieces_d *data)
1054 unsigned int size = GET_MODE_SIZE (mode);
1055 rtx to1 = NULL_RTX, from1;
1057 while (data->len >= size)
1059 if (data->reverse)
1060 data->offset -= size;
1062 if (data->to)
1064 if (data->autinc_to)
1065 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1066 data->offset);
1067 else
1068 to1 = adjust_address (data->to, mode, data->offset);
1071 if (data->autinc_from)
1072 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1073 data->offset);
1074 else
1075 from1 = adjust_address (data->from, mode, data->offset);
1077 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1078 emit_insn (gen_add2_insn (data->to_addr,
1079 gen_int_mode (-(HOST_WIDE_INT) size,
1080 GET_MODE (data->to_addr))));
1081 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1082 emit_insn (gen_add2_insn (data->from_addr,
1083 gen_int_mode (-(HOST_WIDE_INT) size,
1084 GET_MODE (data->from_addr))));
1086 if (data->to)
1087 emit_insn ((*genfun) (to1, from1));
1088 else
1090 #ifdef PUSH_ROUNDING
1091 emit_single_push_insn (mode, from1, NULL);
1092 #else
1093 gcc_unreachable ();
1094 #endif
1097 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1098 emit_insn (gen_add2_insn (data->to_addr,
1099 gen_int_mode (size,
1100 GET_MODE (data->to_addr))));
1101 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1102 emit_insn (gen_add2_insn (data->from_addr,
1103 gen_int_mode (size,
1104 GET_MODE (data->from_addr))));
1106 if (! data->reverse)
1107 data->offset += size;
1109 data->len -= size;
1113 /* Emit code to move a block Y to a block X. This may be done with
1114 string-move instructions, with multiple scalar move instructions,
1115 or with a library call.
1117 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1118 SIZE is an rtx that says how long they are.
1119 ALIGN is the maximum alignment we can assume they have.
1120 METHOD describes what kind of copy this is, and what mechanisms may be used.
1122 Return the address of the new block, if memcpy is called and returns it,
1123 0 otherwise. */
1126 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1127 unsigned int expected_align, HOST_WIDE_INT expected_size)
1129 bool may_use_call;
1130 rtx retval = 0;
1131 unsigned int align;
1133 gcc_assert (size);
1134 if (CONST_INT_P (size)
1135 && INTVAL (size) == 0)
1136 return 0;
1138 switch (method)
1140 case BLOCK_OP_NORMAL:
1141 case BLOCK_OP_TAILCALL:
1142 may_use_call = true;
1143 break;
1145 case BLOCK_OP_CALL_PARM:
1146 may_use_call = block_move_libcall_safe_for_call_parm ();
1148 /* Make inhibit_defer_pop nonzero around the library call
1149 to force it to pop the arguments right away. */
1150 NO_DEFER_POP;
1151 break;
1153 case BLOCK_OP_NO_LIBCALL:
1154 may_use_call = false;
1155 break;
1157 default:
1158 gcc_unreachable ();
1161 gcc_assert (MEM_P (x) && MEM_P (y));
1162 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1163 gcc_assert (align >= BITS_PER_UNIT);
1165 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1166 block copy is more efficient for other large modes, e.g. DCmode. */
1167 x = adjust_address (x, BLKmode, 0);
1168 y = adjust_address (y, BLKmode, 0);
1170 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1171 can be incorrect is coming from __builtin_memcpy. */
1172 if (CONST_INT_P (size))
1174 x = shallow_copy_rtx (x);
1175 y = shallow_copy_rtx (y);
1176 set_mem_size (x, INTVAL (size));
1177 set_mem_size (y, INTVAL (size));
1180 if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1181 move_by_pieces (x, y, INTVAL (size), align, 0);
1182 else if (emit_block_move_via_movmem (x, y, size, align,
1183 expected_align, expected_size))
1185 else if (may_use_call
1186 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1187 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1189 /* Since x and y are passed to a libcall, mark the corresponding
1190 tree EXPR as addressable. */
1191 tree y_expr = MEM_EXPR (y);
1192 tree x_expr = MEM_EXPR (x);
1193 if (y_expr)
1194 mark_addressable (y_expr);
1195 if (x_expr)
1196 mark_addressable (x_expr);
1197 retval = emit_block_move_via_libcall (x, y, size,
1198 method == BLOCK_OP_TAILCALL);
1201 else
1202 emit_block_move_via_loop (x, y, size, align);
1204 if (method == BLOCK_OP_CALL_PARM)
1205 OK_DEFER_POP;
1207 return retval;
1211 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1213 return emit_block_move_hints (x, y, size, method, 0, -1);
1216 /* A subroutine of emit_block_move. Returns true if calling the
1217 block move libcall will not clobber any parameters which may have
1218 already been placed on the stack. */
1220 static bool
1221 block_move_libcall_safe_for_call_parm (void)
1223 #if defined (REG_PARM_STACK_SPACE)
1224 tree fn;
1225 #endif
1227 /* If arguments are pushed on the stack, then they're safe. */
1228 if (PUSH_ARGS)
1229 return true;
1231 /* If registers go on the stack anyway, any argument is sure to clobber
1232 an outgoing argument. */
1233 #if defined (REG_PARM_STACK_SPACE)
1234 fn = emit_block_move_libcall_fn (false);
1235 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1236 depend on its argument. */
1237 (void) fn;
1238 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1239 && REG_PARM_STACK_SPACE (fn) != 0)
1240 return false;
1241 #endif
1243 /* If any argument goes in memory, then it might clobber an outgoing
1244 argument. */
1246 CUMULATIVE_ARGS args_so_far_v;
1247 cumulative_args_t args_so_far;
1248 tree fn, arg;
1250 fn = emit_block_move_libcall_fn (false);
1251 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1252 args_so_far = pack_cumulative_args (&args_so_far_v);
1254 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1255 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1257 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1258 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1259 NULL_TREE, true);
1260 if (!tmp || !REG_P (tmp))
1261 return false;
1262 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1263 return false;
1264 targetm.calls.function_arg_advance (args_so_far, mode,
1265 NULL_TREE, true);
1268 return true;
1271 /* A subroutine of emit_block_move. Expand a movmem pattern;
1272 return true if successful. */
1274 static bool
1275 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1276 unsigned int expected_align, HOST_WIDE_INT expected_size)
1278 int save_volatile_ok = volatile_ok;
1279 enum machine_mode mode;
1281 if (expected_align < align)
1282 expected_align = align;
1284 /* Since this is a move insn, we don't care about volatility. */
1285 volatile_ok = 1;
1287 /* Try the most limited insn first, because there's no point
1288 including more than one in the machine description unless
1289 the more limited one has some advantage. */
1291 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1292 mode = GET_MODE_WIDER_MODE (mode))
1294 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1296 if (code != CODE_FOR_nothing
1297 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1298 here because if SIZE is less than the mode mask, as it is
1299 returned by the macro, it will definitely be less than the
1300 actual mode mask. Since SIZE is within the Pmode address
1301 space, we limit MODE to Pmode. */
1302 && ((CONST_INT_P (size)
1303 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1304 <= (GET_MODE_MASK (mode) >> 1)))
1305 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1307 struct expand_operand ops[6];
1308 unsigned int nops;
1310 /* ??? When called via emit_block_move_for_call, it'd be
1311 nice if there were some way to inform the backend, so
1312 that it doesn't fail the expansion because it thinks
1313 emitting the libcall would be more efficient. */
1314 nops = insn_data[(int) code].n_generator_args;
1315 gcc_assert (nops == 4 || nops == 6);
1317 create_fixed_operand (&ops[0], x);
1318 create_fixed_operand (&ops[1], y);
1319 /* The check above guarantees that this size conversion is valid. */
1320 create_convert_operand_to (&ops[2], size, mode, true);
1321 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1322 if (nops == 6)
1324 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1325 create_integer_operand (&ops[5], expected_size);
1327 if (maybe_expand_insn (code, nops, ops))
1329 volatile_ok = save_volatile_ok;
1330 return true;
1335 volatile_ok = save_volatile_ok;
1336 return false;
1339 /* A subroutine of emit_block_move. Expand a call to memcpy.
1340 Return the return value from memcpy, 0 otherwise. */
1343 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1345 rtx dst_addr, src_addr;
1346 tree call_expr, fn, src_tree, dst_tree, size_tree;
1347 enum machine_mode size_mode;
1348 rtx retval;
1350 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1351 pseudos. We can then place those new pseudos into a VAR_DECL and
1352 use them later. */
1354 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1355 src_addr = copy_addr_to_reg (XEXP (src, 0));
1357 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1358 src_addr = convert_memory_address (ptr_mode, src_addr);
1360 dst_tree = make_tree (ptr_type_node, dst_addr);
1361 src_tree = make_tree (ptr_type_node, src_addr);
1363 size_mode = TYPE_MODE (sizetype);
1365 size = convert_to_mode (size_mode, size, 1);
1366 size = copy_to_mode_reg (size_mode, size);
1368 /* It is incorrect to use the libcall calling conventions to call
1369 memcpy in this context. This could be a user call to memcpy and
1370 the user may wish to examine the return value from memcpy. For
1371 targets where libcalls and normal calls have different conventions
1372 for returning pointers, we could end up generating incorrect code. */
1374 size_tree = make_tree (sizetype, size);
1376 fn = emit_block_move_libcall_fn (true);
1377 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1378 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1380 retval = expand_normal (call_expr);
1382 return retval;
1385 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1386 for the function we use for block copies. */
1388 static GTY(()) tree block_move_fn;
1390 void
1391 init_block_move_fn (const char *asmspec)
1393 if (!block_move_fn)
1395 tree args, fn, attrs, attr_args;
1397 fn = get_identifier ("memcpy");
1398 args = build_function_type_list (ptr_type_node, ptr_type_node,
1399 const_ptr_type_node, sizetype,
1400 NULL_TREE);
1402 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1403 DECL_EXTERNAL (fn) = 1;
1404 TREE_PUBLIC (fn) = 1;
1405 DECL_ARTIFICIAL (fn) = 1;
1406 TREE_NOTHROW (fn) = 1;
1407 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1408 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1410 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1411 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1413 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1415 block_move_fn = fn;
1418 if (asmspec)
1419 set_user_assembler_name (block_move_fn, asmspec);
1422 static tree
1423 emit_block_move_libcall_fn (int for_call)
1425 static bool emitted_extern;
1427 if (!block_move_fn)
1428 init_block_move_fn (NULL);
1430 if (for_call && !emitted_extern)
1432 emitted_extern = true;
1433 make_decl_rtl (block_move_fn);
1436 return block_move_fn;
1439 /* A subroutine of emit_block_move. Copy the data via an explicit
1440 loop. This is used only when libcalls are forbidden. */
1441 /* ??? It'd be nice to copy in hunks larger than QImode. */
1443 static void
1444 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1445 unsigned int align ATTRIBUTE_UNUSED)
1447 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1448 enum machine_mode x_addr_mode = get_address_mode (x);
1449 enum machine_mode y_addr_mode = get_address_mode (y);
1450 enum machine_mode iter_mode;
1452 iter_mode = GET_MODE (size);
1453 if (iter_mode == VOIDmode)
1454 iter_mode = word_mode;
1456 top_label = gen_label_rtx ();
1457 cmp_label = gen_label_rtx ();
1458 iter = gen_reg_rtx (iter_mode);
1460 emit_move_insn (iter, const0_rtx);
1462 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1463 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1464 do_pending_stack_adjust ();
1466 emit_jump (cmp_label);
1467 emit_label (top_label);
1469 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1470 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1472 if (x_addr_mode != y_addr_mode)
1473 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1474 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1476 x = change_address (x, QImode, x_addr);
1477 y = change_address (y, QImode, y_addr);
1479 emit_move_insn (x, y);
1481 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1482 true, OPTAB_LIB_WIDEN);
1483 if (tmp != iter)
1484 emit_move_insn (iter, tmp);
1486 emit_label (cmp_label);
1488 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1489 true, top_label, REG_BR_PROB_BASE * 90 / 100);
1492 /* Copy all or part of a value X into registers starting at REGNO.
1493 The number of registers to be filled is NREGS. */
1495 void
1496 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1498 int i;
1499 #ifdef HAVE_load_multiple
1500 rtx pat;
1501 rtx last;
1502 #endif
1504 if (nregs == 0)
1505 return;
1507 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1508 x = validize_mem (force_const_mem (mode, x));
1510 /* See if the machine can do this with a load multiple insn. */
1511 #ifdef HAVE_load_multiple
1512 if (HAVE_load_multiple)
1514 last = get_last_insn ();
1515 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1516 GEN_INT (nregs));
1517 if (pat)
1519 emit_insn (pat);
1520 return;
1522 else
1523 delete_insns_since (last);
1525 #endif
1527 for (i = 0; i < nregs; i++)
1528 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1529 operand_subword_force (x, i, mode));
1532 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1533 The number of registers to be filled is NREGS. */
1535 void
1536 move_block_from_reg (int regno, rtx x, int nregs)
1538 int i;
1540 if (nregs == 0)
1541 return;
1543 /* See if the machine can do this with a store multiple insn. */
1544 #ifdef HAVE_store_multiple
1545 if (HAVE_store_multiple)
1547 rtx last = get_last_insn ();
1548 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1549 GEN_INT (nregs));
1550 if (pat)
1552 emit_insn (pat);
1553 return;
1555 else
1556 delete_insns_since (last);
1558 #endif
1560 for (i = 0; i < nregs; i++)
1562 rtx tem = operand_subword (x, i, 1, BLKmode);
1564 gcc_assert (tem);
1566 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1570 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1571 ORIG, where ORIG is a non-consecutive group of registers represented by
1572 a PARALLEL. The clone is identical to the original except in that the
1573 original set of registers is replaced by a new set of pseudo registers.
1574 The new set has the same modes as the original set. */
1577 gen_group_rtx (rtx orig)
1579 int i, length;
1580 rtx *tmps;
1582 gcc_assert (GET_CODE (orig) == PARALLEL);
1584 length = XVECLEN (orig, 0);
1585 tmps = XALLOCAVEC (rtx, length);
1587 /* Skip a NULL entry in first slot. */
1588 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1590 if (i)
1591 tmps[0] = 0;
1593 for (; i < length; i++)
1595 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1596 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1598 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1601 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1604 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1605 except that values are placed in TMPS[i], and must later be moved
1606 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1608 static void
1609 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1611 rtx src;
1612 int start, i;
1613 enum machine_mode m = GET_MODE (orig_src);
1615 gcc_assert (GET_CODE (dst) == PARALLEL);
1617 if (m != VOIDmode
1618 && !SCALAR_INT_MODE_P (m)
1619 && !MEM_P (orig_src)
1620 && GET_CODE (orig_src) != CONCAT)
1622 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1623 if (imode == BLKmode)
1624 src = assign_stack_temp (GET_MODE (orig_src), ssize);
1625 else
1626 src = gen_reg_rtx (imode);
1627 if (imode != BLKmode)
1628 src = gen_lowpart (GET_MODE (orig_src), src);
1629 emit_move_insn (src, orig_src);
1630 /* ...and back again. */
1631 if (imode != BLKmode)
1632 src = gen_lowpart (imode, src);
1633 emit_group_load_1 (tmps, dst, src, type, ssize);
1634 return;
1637 /* Check for a NULL entry, used to indicate that the parameter goes
1638 both on the stack and in registers. */
1639 if (XEXP (XVECEXP (dst, 0, 0), 0))
1640 start = 0;
1641 else
1642 start = 1;
1644 /* Process the pieces. */
1645 for (i = start; i < XVECLEN (dst, 0); i++)
1647 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1648 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1649 unsigned int bytelen = GET_MODE_SIZE (mode);
1650 int shift = 0;
1652 /* Handle trailing fragments that run over the size of the struct. */
1653 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1655 /* Arrange to shift the fragment to where it belongs.
1656 extract_bit_field loads to the lsb of the reg. */
1657 if (
1658 #ifdef BLOCK_REG_PADDING
1659 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1660 == (BYTES_BIG_ENDIAN ? upward : downward)
1661 #else
1662 BYTES_BIG_ENDIAN
1663 #endif
1665 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1666 bytelen = ssize - bytepos;
1667 gcc_assert (bytelen > 0);
1670 /* If we won't be loading directly from memory, protect the real source
1671 from strange tricks we might play; but make sure that the source can
1672 be loaded directly into the destination. */
1673 src = orig_src;
1674 if (!MEM_P (orig_src)
1675 && (!CONSTANT_P (orig_src)
1676 || (GET_MODE (orig_src) != mode
1677 && GET_MODE (orig_src) != VOIDmode)))
1679 if (GET_MODE (orig_src) == VOIDmode)
1680 src = gen_reg_rtx (mode);
1681 else
1682 src = gen_reg_rtx (GET_MODE (orig_src));
1684 emit_move_insn (src, orig_src);
1687 /* Optimize the access just a bit. */
1688 if (MEM_P (src)
1689 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1690 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1691 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1692 && bytelen == GET_MODE_SIZE (mode))
1694 tmps[i] = gen_reg_rtx (mode);
1695 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1697 else if (COMPLEX_MODE_P (mode)
1698 && GET_MODE (src) == mode
1699 && bytelen == GET_MODE_SIZE (mode))
1700 /* Let emit_move_complex do the bulk of the work. */
1701 tmps[i] = src;
1702 else if (GET_CODE (src) == CONCAT)
1704 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1705 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1707 if ((bytepos == 0 && bytelen == slen0)
1708 || (bytepos != 0 && bytepos + bytelen <= slen))
1710 /* The following assumes that the concatenated objects all
1711 have the same size. In this case, a simple calculation
1712 can be used to determine the object and the bit field
1713 to be extracted. */
1714 tmps[i] = XEXP (src, bytepos / slen0);
1715 if (! CONSTANT_P (tmps[i])
1716 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1717 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1718 (bytepos % slen0) * BITS_PER_UNIT,
1719 1, NULL_RTX, mode, mode);
1721 else
1723 rtx mem;
1725 gcc_assert (!bytepos);
1726 mem = assign_stack_temp (GET_MODE (src), slen);
1727 emit_move_insn (mem, src);
1728 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1729 0, 1, NULL_RTX, mode, mode);
1732 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1733 SIMD register, which is currently broken. While we get GCC
1734 to emit proper RTL for these cases, let's dump to memory. */
1735 else if (VECTOR_MODE_P (GET_MODE (dst))
1736 && REG_P (src))
1738 int slen = GET_MODE_SIZE (GET_MODE (src));
1739 rtx mem;
1741 mem = assign_stack_temp (GET_MODE (src), slen);
1742 emit_move_insn (mem, src);
1743 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1745 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1746 && XVECLEN (dst, 0) > 1)
1747 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
1748 else if (CONSTANT_P (src))
1750 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1752 if (len == ssize)
1753 tmps[i] = src;
1754 else
1756 rtx first, second;
1758 gcc_assert (2 * len == ssize);
1759 split_double (src, &first, &second);
1760 if (i)
1761 tmps[i] = second;
1762 else
1763 tmps[i] = first;
1766 else if (REG_P (src) && GET_MODE (src) == mode)
1767 tmps[i] = src;
1768 else
1769 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1770 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1771 mode, mode);
1773 if (shift)
1774 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1775 shift, tmps[i], 0);
1779 /* Emit code to move a block SRC of type TYPE to a block DST,
1780 where DST is non-consecutive registers represented by a PARALLEL.
1781 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1782 if not known. */
1784 void
1785 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1787 rtx *tmps;
1788 int i;
1790 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1791 emit_group_load_1 (tmps, dst, src, type, ssize);
1793 /* Copy the extracted pieces into the proper (probable) hard regs. */
1794 for (i = 0; i < XVECLEN (dst, 0); i++)
1796 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1797 if (d == NULL)
1798 continue;
1799 emit_move_insn (d, tmps[i]);
1803 /* Similar, but load SRC into new pseudos in a format that looks like
1804 PARALLEL. This can later be fed to emit_group_move to get things
1805 in the right place. */
1808 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1810 rtvec vec;
1811 int i;
1813 vec = rtvec_alloc (XVECLEN (parallel, 0));
1814 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1816 /* Convert the vector to look just like the original PARALLEL, except
1817 with the computed values. */
1818 for (i = 0; i < XVECLEN (parallel, 0); i++)
1820 rtx e = XVECEXP (parallel, 0, i);
1821 rtx d = XEXP (e, 0);
1823 if (d)
1825 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1826 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1828 RTVEC_ELT (vec, i) = e;
1831 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1834 /* Emit code to move a block SRC to block DST, where SRC and DST are
1835 non-consecutive groups of registers, each represented by a PARALLEL. */
1837 void
1838 emit_group_move (rtx dst, rtx src)
1840 int i;
1842 gcc_assert (GET_CODE (src) == PARALLEL
1843 && GET_CODE (dst) == PARALLEL
1844 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1846 /* Skip first entry if NULL. */
1847 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1848 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1849 XEXP (XVECEXP (src, 0, i), 0));
1852 /* Move a group of registers represented by a PARALLEL into pseudos. */
1855 emit_group_move_into_temps (rtx src)
1857 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1858 int i;
1860 for (i = 0; i < XVECLEN (src, 0); i++)
1862 rtx e = XVECEXP (src, 0, i);
1863 rtx d = XEXP (e, 0);
1865 if (d)
1866 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1867 RTVEC_ELT (vec, i) = e;
1870 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1873 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1874 where SRC is non-consecutive registers represented by a PARALLEL.
1875 SSIZE represents the total size of block ORIG_DST, or -1 if not
1876 known. */
1878 void
1879 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1881 rtx *tmps, dst;
1882 int start, finish, i;
1883 enum machine_mode m = GET_MODE (orig_dst);
1885 gcc_assert (GET_CODE (src) == PARALLEL);
1887 if (!SCALAR_INT_MODE_P (m)
1888 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1890 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1891 if (imode == BLKmode)
1892 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1893 else
1894 dst = gen_reg_rtx (imode);
1895 emit_group_store (dst, src, type, ssize);
1896 if (imode != BLKmode)
1897 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1898 emit_move_insn (orig_dst, dst);
1899 return;
1902 /* Check for a NULL entry, used to indicate that the parameter goes
1903 both on the stack and in registers. */
1904 if (XEXP (XVECEXP (src, 0, 0), 0))
1905 start = 0;
1906 else
1907 start = 1;
1908 finish = XVECLEN (src, 0);
1910 tmps = XALLOCAVEC (rtx, finish);
1912 /* Copy the (probable) hard regs into pseudos. */
1913 for (i = start; i < finish; i++)
1915 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1916 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1918 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1919 emit_move_insn (tmps[i], reg);
1921 else
1922 tmps[i] = reg;
1925 /* If we won't be storing directly into memory, protect the real destination
1926 from strange tricks we might play. */
1927 dst = orig_dst;
1928 if (GET_CODE (dst) == PARALLEL)
1930 rtx temp;
1932 /* We can get a PARALLEL dst if there is a conditional expression in
1933 a return statement. In that case, the dst and src are the same,
1934 so no action is necessary. */
1935 if (rtx_equal_p (dst, src))
1936 return;
1938 /* It is unclear if we can ever reach here, but we may as well handle
1939 it. Allocate a temporary, and split this into a store/load to/from
1940 the temporary. */
1942 temp = assign_stack_temp (GET_MODE (dst), ssize);
1943 emit_group_store (temp, src, type, ssize);
1944 emit_group_load (dst, temp, type, ssize);
1945 return;
1947 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1949 enum machine_mode outer = GET_MODE (dst);
1950 enum machine_mode inner;
1951 HOST_WIDE_INT bytepos;
1952 bool done = false;
1953 rtx temp;
1955 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1956 dst = gen_reg_rtx (outer);
1958 /* Make life a bit easier for combine. */
1959 /* If the first element of the vector is the low part
1960 of the destination mode, use a paradoxical subreg to
1961 initialize the destination. */
1962 if (start < finish)
1964 inner = GET_MODE (tmps[start]);
1965 bytepos = subreg_lowpart_offset (inner, outer);
1966 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1968 temp = simplify_gen_subreg (outer, tmps[start],
1969 inner, 0);
1970 if (temp)
1972 emit_move_insn (dst, temp);
1973 done = true;
1974 start++;
1979 /* If the first element wasn't the low part, try the last. */
1980 if (!done
1981 && start < finish - 1)
1983 inner = GET_MODE (tmps[finish - 1]);
1984 bytepos = subreg_lowpart_offset (inner, outer);
1985 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1987 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1988 inner, 0);
1989 if (temp)
1991 emit_move_insn (dst, temp);
1992 done = true;
1993 finish--;
1998 /* Otherwise, simply initialize the result to zero. */
1999 if (!done)
2000 emit_move_insn (dst, CONST0_RTX (outer));
2003 /* Process the pieces. */
2004 for (i = start; i < finish; i++)
2006 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2007 enum machine_mode mode = GET_MODE (tmps[i]);
2008 unsigned int bytelen = GET_MODE_SIZE (mode);
2009 unsigned int adj_bytelen = bytelen;
2010 rtx dest = dst;
2012 /* Handle trailing fragments that run over the size of the struct. */
2013 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2014 adj_bytelen = ssize - bytepos;
2016 if (GET_CODE (dst) == CONCAT)
2018 if (bytepos + adj_bytelen
2019 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2020 dest = XEXP (dst, 0);
2021 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2023 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2024 dest = XEXP (dst, 1);
2026 else
2028 enum machine_mode dest_mode = GET_MODE (dest);
2029 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2031 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2033 if (GET_MODE_ALIGNMENT (dest_mode)
2034 >= GET_MODE_ALIGNMENT (tmp_mode))
2036 dest = assign_stack_temp (dest_mode,
2037 GET_MODE_SIZE (dest_mode));
2038 emit_move_insn (adjust_address (dest,
2039 tmp_mode,
2040 bytepos),
2041 tmps[i]);
2042 dst = dest;
2044 else
2046 dest = assign_stack_temp (tmp_mode,
2047 GET_MODE_SIZE (tmp_mode));
2048 emit_move_insn (dest, tmps[i]);
2049 dst = adjust_address (dest, dest_mode, bytepos);
2051 break;
2055 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2057 /* store_bit_field always takes its value from the lsb.
2058 Move the fragment to the lsb if it's not already there. */
2059 if (
2060 #ifdef BLOCK_REG_PADDING
2061 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2062 == (BYTES_BIG_ENDIAN ? upward : downward)
2063 #else
2064 BYTES_BIG_ENDIAN
2065 #endif
2068 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2069 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2070 shift, tmps[i], 0);
2072 bytelen = adj_bytelen;
2075 /* Optimize the access just a bit. */
2076 if (MEM_P (dest)
2077 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2078 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2079 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2080 && bytelen == GET_MODE_SIZE (mode))
2081 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2082 else
2083 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2084 0, 0, mode, tmps[i]);
2087 /* Copy from the pseudo into the (probable) hard reg. */
2088 if (orig_dst != dst)
2089 emit_move_insn (orig_dst, dst);
2092 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2093 of the value stored in X. */
2096 maybe_emit_group_store (rtx x, tree type)
2098 enum machine_mode mode = TYPE_MODE (type);
2099 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2100 if (GET_CODE (x) == PARALLEL)
2102 rtx result = gen_reg_rtx (mode);
2103 emit_group_store (result, x, type, int_size_in_bytes (type));
2104 return result;
2106 return x;
2109 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2111 This is used on targets that return BLKmode values in registers. */
2113 void
2114 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2116 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2117 rtx src = NULL, dst = NULL;
2118 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2119 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2120 enum machine_mode mode = GET_MODE (srcreg);
2121 enum machine_mode tmode = GET_MODE (target);
2122 enum machine_mode copy_mode;
2124 /* BLKmode registers created in the back-end shouldn't have survived. */
2125 gcc_assert (mode != BLKmode);
2127 /* If the structure doesn't take up a whole number of words, see whether
2128 SRCREG is padded on the left or on the right. If it's on the left,
2129 set PADDING_CORRECTION to the number of bits to skip.
2131 In most ABIs, the structure will be returned at the least end of
2132 the register, which translates to right padding on little-endian
2133 targets and left padding on big-endian targets. The opposite
2134 holds if the structure is returned at the most significant
2135 end of the register. */
2136 if (bytes % UNITS_PER_WORD != 0
2137 && (targetm.calls.return_in_msb (type)
2138 ? !BYTES_BIG_ENDIAN
2139 : BYTES_BIG_ENDIAN))
2140 padding_correction
2141 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2143 /* We can use a single move if we have an exact mode for the size. */
2144 else if (MEM_P (target)
2145 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2146 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2147 && bytes == GET_MODE_SIZE (mode))
2149 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2150 return;
2153 /* And if we additionally have the same mode for a register. */
2154 else if (REG_P (target)
2155 && GET_MODE (target) == mode
2156 && bytes == GET_MODE_SIZE (mode))
2158 emit_move_insn (target, srcreg);
2159 return;
2162 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2163 into a new pseudo which is a full word. */
2164 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2166 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2167 mode = word_mode;
2170 /* Copy the structure BITSIZE bits at a time. If the target lives in
2171 memory, take care of not reading/writing past its end by selecting
2172 a copy mode suited to BITSIZE. This should always be possible given
2173 how it is computed.
2175 If the target lives in register, make sure not to select a copy mode
2176 larger than the mode of the register.
2178 We could probably emit more efficient code for machines which do not use
2179 strict alignment, but it doesn't seem worth the effort at the current
2180 time. */
2182 copy_mode = word_mode;
2183 if (MEM_P (target))
2185 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2186 if (mem_mode != BLKmode)
2187 copy_mode = mem_mode;
2189 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2190 copy_mode = tmode;
2192 for (bitpos = 0, xbitpos = padding_correction;
2193 bitpos < bytes * BITS_PER_UNIT;
2194 bitpos += bitsize, xbitpos += bitsize)
2196 /* We need a new source operand each time xbitpos is on a
2197 word boundary and when xbitpos == padding_correction
2198 (the first time through). */
2199 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2200 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2202 /* We need a new destination operand each time bitpos is on
2203 a word boundary. */
2204 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2205 dst = target;
2206 else if (bitpos % BITS_PER_WORD == 0)
2207 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2209 /* Use xbitpos for the source extraction (right justified) and
2210 bitpos for the destination store (left justified). */
2211 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2212 extract_bit_field (src, bitsize,
2213 xbitpos % BITS_PER_WORD, 1,
2214 NULL_RTX, copy_mode, copy_mode));
2218 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2219 register if it contains any data, otherwise return null.
2221 This is used on targets that return BLKmode values in registers. */
2224 copy_blkmode_to_reg (enum machine_mode mode, tree src)
2226 int i, n_regs;
2227 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2228 unsigned int bitsize;
2229 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2230 enum machine_mode dst_mode;
2232 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2234 x = expand_normal (src);
2236 bytes = int_size_in_bytes (TREE_TYPE (src));
2237 if (bytes == 0)
2238 return NULL_RTX;
2240 /* If the structure doesn't take up a whole number of words, see
2241 whether the register value should be padded on the left or on
2242 the right. Set PADDING_CORRECTION to the number of padding
2243 bits needed on the left side.
2245 In most ABIs, the structure will be returned at the least end of
2246 the register, which translates to right padding on little-endian
2247 targets and left padding on big-endian targets. The opposite
2248 holds if the structure is returned at the most significant
2249 end of the register. */
2250 if (bytes % UNITS_PER_WORD != 0
2251 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2252 ? !BYTES_BIG_ENDIAN
2253 : BYTES_BIG_ENDIAN))
2254 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2255 * BITS_PER_UNIT));
2257 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2258 dst_words = XALLOCAVEC (rtx, n_regs);
2259 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2261 /* Copy the structure BITSIZE bits at a time. */
2262 for (bitpos = 0, xbitpos = padding_correction;
2263 bitpos < bytes * BITS_PER_UNIT;
2264 bitpos += bitsize, xbitpos += bitsize)
2266 /* We need a new destination pseudo each time xbitpos is
2267 on a word boundary and when xbitpos == padding_correction
2268 (the first time through). */
2269 if (xbitpos % BITS_PER_WORD == 0
2270 || xbitpos == padding_correction)
2272 /* Generate an appropriate register. */
2273 dst_word = gen_reg_rtx (word_mode);
2274 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2276 /* Clear the destination before we move anything into it. */
2277 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2280 /* We need a new source operand each time bitpos is on a word
2281 boundary. */
2282 if (bitpos % BITS_PER_WORD == 0)
2283 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2285 /* Use bitpos for the source extraction (left justified) and
2286 xbitpos for the destination store (right justified). */
2287 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2288 0, 0, word_mode,
2289 extract_bit_field (src_word, bitsize,
2290 bitpos % BITS_PER_WORD, 1,
2291 NULL_RTX, word_mode, word_mode));
2294 if (mode == BLKmode)
2296 /* Find the smallest integer mode large enough to hold the
2297 entire structure. */
2298 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2299 mode != VOIDmode;
2300 mode = GET_MODE_WIDER_MODE (mode))
2301 /* Have we found a large enough mode? */
2302 if (GET_MODE_SIZE (mode) >= bytes)
2303 break;
2305 /* A suitable mode should have been found. */
2306 gcc_assert (mode != VOIDmode);
2309 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2310 dst_mode = word_mode;
2311 else
2312 dst_mode = mode;
2313 dst = gen_reg_rtx (dst_mode);
2315 for (i = 0; i < n_regs; i++)
2316 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2318 if (mode != dst_mode)
2319 dst = gen_lowpart (mode, dst);
2321 return dst;
2324 /* Add a USE expression for REG to the (possibly empty) list pointed
2325 to by CALL_FUSAGE. REG must denote a hard register. */
2327 void
2328 use_reg_mode (rtx *call_fusage, rtx reg, enum machine_mode mode)
2330 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2332 *call_fusage
2333 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2336 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2337 starting at REGNO. All of these registers must be hard registers. */
2339 void
2340 use_regs (rtx *call_fusage, int regno, int nregs)
2342 int i;
2344 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2346 for (i = 0; i < nregs; i++)
2347 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2350 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2351 PARALLEL REGS. This is for calls that pass values in multiple
2352 non-contiguous locations. The Irix 6 ABI has examples of this. */
2354 void
2355 use_group_regs (rtx *call_fusage, rtx regs)
2357 int i;
2359 for (i = 0; i < XVECLEN (regs, 0); i++)
2361 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2363 /* A NULL entry means the parameter goes both on the stack and in
2364 registers. This can also be a MEM for targets that pass values
2365 partially on the stack and partially in registers. */
2366 if (reg != 0 && REG_P (reg))
2367 use_reg (call_fusage, reg);
2371 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2372 assigment and the code of the expresion on the RHS is CODE. Return
2373 NULL otherwise. */
2375 static gimple
2376 get_def_for_expr (tree name, enum tree_code code)
2378 gimple def_stmt;
2380 if (TREE_CODE (name) != SSA_NAME)
2381 return NULL;
2383 def_stmt = get_gimple_for_ssa_name (name);
2384 if (!def_stmt
2385 || gimple_assign_rhs_code (def_stmt) != code)
2386 return NULL;
2388 return def_stmt;
2391 #ifdef HAVE_conditional_move
2392 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2393 assigment and the class of the expresion on the RHS is CLASS. Return
2394 NULL otherwise. */
2396 static gimple
2397 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2399 gimple def_stmt;
2401 if (TREE_CODE (name) != SSA_NAME)
2402 return NULL;
2404 def_stmt = get_gimple_for_ssa_name (name);
2405 if (!def_stmt
2406 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2407 return NULL;
2409 return def_stmt;
2411 #endif
2414 /* Determine whether the LEN bytes generated by CONSTFUN can be
2415 stored to memory using several move instructions. CONSTFUNDATA is
2416 a pointer which will be passed as argument in every CONSTFUN call.
2417 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2418 a memset operation and false if it's a copy of a constant string.
2419 Return nonzero if a call to store_by_pieces should succeed. */
2422 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2423 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2424 void *constfundata, unsigned int align, bool memsetp)
2426 unsigned HOST_WIDE_INT l;
2427 unsigned int max_size;
2428 HOST_WIDE_INT offset = 0;
2429 enum machine_mode mode;
2430 enum insn_code icode;
2431 int reverse;
2432 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2433 rtx cst ATTRIBUTE_UNUSED;
2435 if (len == 0)
2436 return 1;
2438 if (! (memsetp
2439 ? SET_BY_PIECES_P (len, align)
2440 : STORE_BY_PIECES_P (len, align)))
2441 return 0;
2443 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2445 /* We would first store what we can in the largest integer mode, then go to
2446 successively smaller modes. */
2448 for (reverse = 0;
2449 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2450 reverse++)
2452 l = len;
2453 max_size = STORE_MAX_PIECES + 1;
2454 while (max_size > 1 && l > 0)
2456 mode = widest_int_mode_for_size (max_size);
2458 if (mode == VOIDmode)
2459 break;
2461 icode = optab_handler (mov_optab, mode);
2462 if (icode != CODE_FOR_nothing
2463 && align >= GET_MODE_ALIGNMENT (mode))
2465 unsigned int size = GET_MODE_SIZE (mode);
2467 while (l >= size)
2469 if (reverse)
2470 offset -= size;
2472 cst = (*constfun) (constfundata, offset, mode);
2473 if (!targetm.legitimate_constant_p (mode, cst))
2474 return 0;
2476 if (!reverse)
2477 offset += size;
2479 l -= size;
2483 max_size = GET_MODE_SIZE (mode);
2486 /* The code above should have handled everything. */
2487 gcc_assert (!l);
2490 return 1;
2493 /* Generate several move instructions to store LEN bytes generated by
2494 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2495 pointer which will be passed as argument in every CONSTFUN call.
2496 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2497 a memset operation and false if it's a copy of a constant string.
2498 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2499 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2500 stpcpy. */
2503 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2504 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2505 void *constfundata, unsigned int align, bool memsetp, int endp)
2507 enum machine_mode to_addr_mode = get_address_mode (to);
2508 struct store_by_pieces_d data;
2510 if (len == 0)
2512 gcc_assert (endp != 2);
2513 return to;
2516 gcc_assert (memsetp
2517 ? SET_BY_PIECES_P (len, align)
2518 : STORE_BY_PIECES_P (len, align));
2519 data.constfun = constfun;
2520 data.constfundata = constfundata;
2521 data.len = len;
2522 data.to = to;
2523 store_by_pieces_1 (&data, align);
2524 if (endp)
2526 rtx to1;
2528 gcc_assert (!data.reverse);
2529 if (data.autinc_to)
2531 if (endp == 2)
2533 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2534 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2535 else
2536 data.to_addr = copy_to_mode_reg (to_addr_mode,
2537 plus_constant (to_addr_mode,
2538 data.to_addr,
2539 -1));
2541 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2542 data.offset);
2544 else
2546 if (endp == 2)
2547 --data.offset;
2548 to1 = adjust_address (data.to, QImode, data.offset);
2550 return to1;
2552 else
2553 return data.to;
2556 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2557 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2559 static void
2560 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2562 struct store_by_pieces_d data;
2564 if (len == 0)
2565 return;
2567 data.constfun = clear_by_pieces_1;
2568 data.constfundata = NULL;
2569 data.len = len;
2570 data.to = to;
2571 store_by_pieces_1 (&data, align);
2574 /* Callback routine for clear_by_pieces.
2575 Return const0_rtx unconditionally. */
2577 static rtx
2578 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2579 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2580 enum machine_mode mode ATTRIBUTE_UNUSED)
2582 return const0_rtx;
2585 /* Subroutine of clear_by_pieces and store_by_pieces.
2586 Generate several move instructions to store LEN bytes of block TO. (A MEM
2587 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2589 static void
2590 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2591 unsigned int align ATTRIBUTE_UNUSED)
2593 enum machine_mode to_addr_mode = get_address_mode (data->to);
2594 rtx to_addr = XEXP (data->to, 0);
2595 unsigned int max_size = STORE_MAX_PIECES + 1;
2596 enum insn_code icode;
2598 data->offset = 0;
2599 data->to_addr = to_addr;
2600 data->autinc_to
2601 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2602 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2604 data->explicit_inc_to = 0;
2605 data->reverse
2606 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2607 if (data->reverse)
2608 data->offset = data->len;
2610 /* If storing requires more than two move insns,
2611 copy addresses to registers (to make displacements shorter)
2612 and use post-increment if available. */
2613 if (!data->autinc_to
2614 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2616 /* Determine the main mode we'll be using.
2617 MODE might not be used depending on the definitions of the
2618 USE_* macros below. */
2619 enum machine_mode mode ATTRIBUTE_UNUSED
2620 = widest_int_mode_for_size (max_size);
2622 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2624 data->to_addr = copy_to_mode_reg (to_addr_mode,
2625 plus_constant (to_addr_mode,
2626 to_addr,
2627 data->len));
2628 data->autinc_to = 1;
2629 data->explicit_inc_to = -1;
2632 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2633 && ! data->autinc_to)
2635 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2636 data->autinc_to = 1;
2637 data->explicit_inc_to = 1;
2640 if ( !data->autinc_to && CONSTANT_P (to_addr))
2641 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2644 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2646 /* First store what we can in the largest integer mode, then go to
2647 successively smaller modes. */
2649 while (max_size > 1 && data->len > 0)
2651 enum machine_mode mode = widest_int_mode_for_size (max_size);
2653 if (mode == VOIDmode)
2654 break;
2656 icode = optab_handler (mov_optab, mode);
2657 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2658 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2660 max_size = GET_MODE_SIZE (mode);
2663 /* The code above should have handled everything. */
2664 gcc_assert (!data->len);
2667 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2668 with move instructions for mode MODE. GENFUN is the gen_... function
2669 to make a move insn for that mode. DATA has all the other info. */
2671 static void
2672 store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
2673 struct store_by_pieces_d *data)
2675 unsigned int size = GET_MODE_SIZE (mode);
2676 rtx to1, cst;
2678 while (data->len >= size)
2680 if (data->reverse)
2681 data->offset -= size;
2683 if (data->autinc_to)
2684 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2685 data->offset);
2686 else
2687 to1 = adjust_address (data->to, mode, data->offset);
2689 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2690 emit_insn (gen_add2_insn (data->to_addr,
2691 gen_int_mode (-(HOST_WIDE_INT) size,
2692 GET_MODE (data->to_addr))));
2694 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2695 emit_insn ((*genfun) (to1, cst));
2697 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2698 emit_insn (gen_add2_insn (data->to_addr,
2699 gen_int_mode (size,
2700 GET_MODE (data->to_addr))));
2702 if (! data->reverse)
2703 data->offset += size;
2705 data->len -= size;
2709 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2710 its length in bytes. */
2713 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2714 unsigned int expected_align, HOST_WIDE_INT expected_size)
2716 enum machine_mode mode = GET_MODE (object);
2717 unsigned int align;
2719 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2721 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2722 just move a zero. Otherwise, do this a piece at a time. */
2723 if (mode != BLKmode
2724 && CONST_INT_P (size)
2725 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2727 rtx zero = CONST0_RTX (mode);
2728 if (zero != NULL)
2730 emit_move_insn (object, zero);
2731 return NULL;
2734 if (COMPLEX_MODE_P (mode))
2736 zero = CONST0_RTX (GET_MODE_INNER (mode));
2737 if (zero != NULL)
2739 write_complex_part (object, zero, 0);
2740 write_complex_part (object, zero, 1);
2741 return NULL;
2746 if (size == const0_rtx)
2747 return NULL;
2749 align = MEM_ALIGN (object);
2751 if (CONST_INT_P (size)
2752 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2753 clear_by_pieces (object, INTVAL (size), align);
2754 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2755 expected_align, expected_size))
2757 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2758 return set_storage_via_libcall (object, size, const0_rtx,
2759 method == BLOCK_OP_TAILCALL);
2760 else
2761 gcc_unreachable ();
2763 return NULL;
2767 clear_storage (rtx object, rtx size, enum block_op_methods method)
2769 return clear_storage_hints (object, size, method, 0, -1);
2773 /* A subroutine of clear_storage. Expand a call to memset.
2774 Return the return value of memset, 0 otherwise. */
2777 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2779 tree call_expr, fn, object_tree, size_tree, val_tree;
2780 enum machine_mode size_mode;
2781 rtx retval;
2783 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2784 place those into new pseudos into a VAR_DECL and use them later. */
2786 object = copy_addr_to_reg (XEXP (object, 0));
2788 size_mode = TYPE_MODE (sizetype);
2789 size = convert_to_mode (size_mode, size, 1);
2790 size = copy_to_mode_reg (size_mode, size);
2792 /* It is incorrect to use the libcall calling conventions to call
2793 memset in this context. This could be a user call to memset and
2794 the user may wish to examine the return value from memset. For
2795 targets where libcalls and normal calls have different conventions
2796 for returning pointers, we could end up generating incorrect code. */
2798 object_tree = make_tree (ptr_type_node, object);
2799 if (!CONST_INT_P (val))
2800 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2801 size_tree = make_tree (sizetype, size);
2802 val_tree = make_tree (integer_type_node, val);
2804 fn = clear_storage_libcall_fn (true);
2805 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2806 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2808 retval = expand_normal (call_expr);
2810 return retval;
2813 /* A subroutine of set_storage_via_libcall. Create the tree node
2814 for the function we use for block clears. */
2816 tree block_clear_fn;
2818 void
2819 init_block_clear_fn (const char *asmspec)
2821 if (!block_clear_fn)
2823 tree fn, args;
2825 fn = get_identifier ("memset");
2826 args = build_function_type_list (ptr_type_node, ptr_type_node,
2827 integer_type_node, sizetype,
2828 NULL_TREE);
2830 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2831 DECL_EXTERNAL (fn) = 1;
2832 TREE_PUBLIC (fn) = 1;
2833 DECL_ARTIFICIAL (fn) = 1;
2834 TREE_NOTHROW (fn) = 1;
2835 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2836 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2838 block_clear_fn = fn;
2841 if (asmspec)
2842 set_user_assembler_name (block_clear_fn, asmspec);
2845 static tree
2846 clear_storage_libcall_fn (int for_call)
2848 static bool emitted_extern;
2850 if (!block_clear_fn)
2851 init_block_clear_fn (NULL);
2853 if (for_call && !emitted_extern)
2855 emitted_extern = true;
2856 make_decl_rtl (block_clear_fn);
2859 return block_clear_fn;
2862 /* Expand a setmem pattern; return true if successful. */
2864 bool
2865 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2866 unsigned int expected_align, HOST_WIDE_INT expected_size)
2868 /* Try the most limited insn first, because there's no point
2869 including more than one in the machine description unless
2870 the more limited one has some advantage. */
2872 enum machine_mode mode;
2874 if (expected_align < align)
2875 expected_align = align;
2877 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2878 mode = GET_MODE_WIDER_MODE (mode))
2880 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2882 if (code != CODE_FOR_nothing
2883 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2884 here because if SIZE is less than the mode mask, as it is
2885 returned by the macro, it will definitely be less than the
2886 actual mode mask. Since SIZE is within the Pmode address
2887 space, we limit MODE to Pmode. */
2888 && ((CONST_INT_P (size)
2889 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2890 <= (GET_MODE_MASK (mode) >> 1)))
2891 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
2893 struct expand_operand ops[6];
2894 unsigned int nops;
2896 nops = insn_data[(int) code].n_generator_args;
2897 gcc_assert (nops == 4 || nops == 6);
2899 create_fixed_operand (&ops[0], object);
2900 /* The check above guarantees that this size conversion is valid. */
2901 create_convert_operand_to (&ops[1], size, mode, true);
2902 create_convert_operand_from (&ops[2], val, byte_mode, true);
2903 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2904 if (nops == 6)
2906 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2907 create_integer_operand (&ops[5], expected_size);
2909 if (maybe_expand_insn (code, nops, ops))
2910 return true;
2914 return false;
2918 /* Write to one of the components of the complex value CPLX. Write VAL to
2919 the real part if IMAG_P is false, and the imaginary part if its true. */
2921 static void
2922 write_complex_part (rtx cplx, rtx val, bool imag_p)
2924 enum machine_mode cmode;
2925 enum machine_mode imode;
2926 unsigned ibitsize;
2928 if (GET_CODE (cplx) == CONCAT)
2930 emit_move_insn (XEXP (cplx, imag_p), val);
2931 return;
2934 cmode = GET_MODE (cplx);
2935 imode = GET_MODE_INNER (cmode);
2936 ibitsize = GET_MODE_BITSIZE (imode);
2938 /* For MEMs simplify_gen_subreg may generate an invalid new address
2939 because, e.g., the original address is considered mode-dependent
2940 by the target, which restricts simplify_subreg from invoking
2941 adjust_address_nv. Instead of preparing fallback support for an
2942 invalid address, we call adjust_address_nv directly. */
2943 if (MEM_P (cplx))
2945 emit_move_insn (adjust_address_nv (cplx, imode,
2946 imag_p ? GET_MODE_SIZE (imode) : 0),
2947 val);
2948 return;
2951 /* If the sub-object is at least word sized, then we know that subregging
2952 will work. This special case is important, since store_bit_field
2953 wants to operate on integer modes, and there's rarely an OImode to
2954 correspond to TCmode. */
2955 if (ibitsize >= BITS_PER_WORD
2956 /* For hard regs we have exact predicates. Assume we can split
2957 the original object if it spans an even number of hard regs.
2958 This special case is important for SCmode on 64-bit platforms
2959 where the natural size of floating-point regs is 32-bit. */
2960 || (REG_P (cplx)
2961 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2962 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2964 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2965 imag_p ? GET_MODE_SIZE (imode) : 0);
2966 if (part)
2968 emit_move_insn (part, val);
2969 return;
2971 else
2972 /* simplify_gen_subreg may fail for sub-word MEMs. */
2973 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2976 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
2979 /* Extract one of the components of the complex value CPLX. Extract the
2980 real part if IMAG_P is false, and the imaginary part if it's true. */
2982 static rtx
2983 read_complex_part (rtx cplx, bool imag_p)
2985 enum machine_mode cmode, imode;
2986 unsigned ibitsize;
2988 if (GET_CODE (cplx) == CONCAT)
2989 return XEXP (cplx, imag_p);
2991 cmode = GET_MODE (cplx);
2992 imode = GET_MODE_INNER (cmode);
2993 ibitsize = GET_MODE_BITSIZE (imode);
2995 /* Special case reads from complex constants that got spilled to memory. */
2996 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2998 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2999 if (decl && TREE_CODE (decl) == COMPLEX_CST)
3001 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3002 if (CONSTANT_CLASS_P (part))
3003 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3007 /* For MEMs simplify_gen_subreg may generate an invalid new address
3008 because, e.g., the original address is considered mode-dependent
3009 by the target, which restricts simplify_subreg from invoking
3010 adjust_address_nv. Instead of preparing fallback support for an
3011 invalid address, we call adjust_address_nv directly. */
3012 if (MEM_P (cplx))
3013 return adjust_address_nv (cplx, imode,
3014 imag_p ? GET_MODE_SIZE (imode) : 0);
3016 /* If the sub-object is at least word sized, then we know that subregging
3017 will work. This special case is important, since extract_bit_field
3018 wants to operate on integer modes, and there's rarely an OImode to
3019 correspond to TCmode. */
3020 if (ibitsize >= BITS_PER_WORD
3021 /* For hard regs we have exact predicates. Assume we can split
3022 the original object if it spans an even number of hard regs.
3023 This special case is important for SCmode on 64-bit platforms
3024 where the natural size of floating-point regs is 32-bit. */
3025 || (REG_P (cplx)
3026 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3027 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3029 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3030 imag_p ? GET_MODE_SIZE (imode) : 0);
3031 if (ret)
3032 return ret;
3033 else
3034 /* simplify_gen_subreg may fail for sub-word MEMs. */
3035 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3038 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3039 true, NULL_RTX, imode, imode);
3042 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3043 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3044 represented in NEW_MODE. If FORCE is true, this will never happen, as
3045 we'll force-create a SUBREG if needed. */
3047 static rtx
3048 emit_move_change_mode (enum machine_mode new_mode,
3049 enum machine_mode old_mode, rtx x, bool force)
3051 rtx ret;
3053 if (push_operand (x, GET_MODE (x)))
3055 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3056 MEM_COPY_ATTRIBUTES (ret, x);
3058 else if (MEM_P (x))
3060 /* We don't have to worry about changing the address since the
3061 size in bytes is supposed to be the same. */
3062 if (reload_in_progress)
3064 /* Copy the MEM to change the mode and move any
3065 substitutions from the old MEM to the new one. */
3066 ret = adjust_address_nv (x, new_mode, 0);
3067 copy_replacements (x, ret);
3069 else
3070 ret = adjust_address (x, new_mode, 0);
3072 else
3074 /* Note that we do want simplify_subreg's behavior of validating
3075 that the new mode is ok for a hard register. If we were to use
3076 simplify_gen_subreg, we would create the subreg, but would
3077 probably run into the target not being able to implement it. */
3078 /* Except, of course, when FORCE is true, when this is exactly what
3079 we want. Which is needed for CCmodes on some targets. */
3080 if (force)
3081 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3082 else
3083 ret = simplify_subreg (new_mode, x, old_mode, 0);
3086 return ret;
3089 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3090 an integer mode of the same size as MODE. Returns the instruction
3091 emitted, or NULL if such a move could not be generated. */
3093 static rtx
3094 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
3096 enum machine_mode imode;
3097 enum insn_code code;
3099 /* There must exist a mode of the exact size we require. */
3100 imode = int_mode_for_mode (mode);
3101 if (imode == BLKmode)
3102 return NULL_RTX;
3104 /* The target must support moves in this mode. */
3105 code = optab_handler (mov_optab, imode);
3106 if (code == CODE_FOR_nothing)
3107 return NULL_RTX;
3109 x = emit_move_change_mode (imode, mode, x, force);
3110 if (x == NULL_RTX)
3111 return NULL_RTX;
3112 y = emit_move_change_mode (imode, mode, y, force);
3113 if (y == NULL_RTX)
3114 return NULL_RTX;
3115 return emit_insn (GEN_FCN (code) (x, y));
3118 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3119 Return an equivalent MEM that does not use an auto-increment. */
3121 static rtx
3122 emit_move_resolve_push (enum machine_mode mode, rtx x)
3124 enum rtx_code code = GET_CODE (XEXP (x, 0));
3125 HOST_WIDE_INT adjust;
3126 rtx temp;
3128 adjust = GET_MODE_SIZE (mode);
3129 #ifdef PUSH_ROUNDING
3130 adjust = PUSH_ROUNDING (adjust);
3131 #endif
3132 if (code == PRE_DEC || code == POST_DEC)
3133 adjust = -adjust;
3134 else if (code == PRE_MODIFY || code == POST_MODIFY)
3136 rtx expr = XEXP (XEXP (x, 0), 1);
3137 HOST_WIDE_INT val;
3139 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3140 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3141 val = INTVAL (XEXP (expr, 1));
3142 if (GET_CODE (expr) == MINUS)
3143 val = -val;
3144 gcc_assert (adjust == val || adjust == -val);
3145 adjust = val;
3148 /* Do not use anti_adjust_stack, since we don't want to update
3149 stack_pointer_delta. */
3150 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3151 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3152 0, OPTAB_LIB_WIDEN);
3153 if (temp != stack_pointer_rtx)
3154 emit_move_insn (stack_pointer_rtx, temp);
3156 switch (code)
3158 case PRE_INC:
3159 case PRE_DEC:
3160 case PRE_MODIFY:
3161 temp = stack_pointer_rtx;
3162 break;
3163 case POST_INC:
3164 case POST_DEC:
3165 case POST_MODIFY:
3166 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3167 break;
3168 default:
3169 gcc_unreachable ();
3172 return replace_equiv_address (x, temp);
3175 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3176 X is known to satisfy push_operand, and MODE is known to be complex.
3177 Returns the last instruction emitted. */
3180 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3182 enum machine_mode submode = GET_MODE_INNER (mode);
3183 bool imag_first;
3185 #ifdef PUSH_ROUNDING
3186 unsigned int submodesize = GET_MODE_SIZE (submode);
3188 /* In case we output to the stack, but the size is smaller than the
3189 machine can push exactly, we need to use move instructions. */
3190 if (PUSH_ROUNDING (submodesize) != submodesize)
3192 x = emit_move_resolve_push (mode, x);
3193 return emit_move_insn (x, y);
3195 #endif
3197 /* Note that the real part always precedes the imag part in memory
3198 regardless of machine's endianness. */
3199 switch (GET_CODE (XEXP (x, 0)))
3201 case PRE_DEC:
3202 case POST_DEC:
3203 imag_first = true;
3204 break;
3205 case PRE_INC:
3206 case POST_INC:
3207 imag_first = false;
3208 break;
3209 default:
3210 gcc_unreachable ();
3213 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3214 read_complex_part (y, imag_first));
3215 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3216 read_complex_part (y, !imag_first));
3219 /* A subroutine of emit_move_complex. Perform the move from Y to X
3220 via two moves of the parts. Returns the last instruction emitted. */
3223 emit_move_complex_parts (rtx x, rtx y)
3225 /* Show the output dies here. This is necessary for SUBREGs
3226 of pseudos since we cannot track their lifetimes correctly;
3227 hard regs shouldn't appear here except as return values. */
3228 if (!reload_completed && !reload_in_progress
3229 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3230 emit_clobber (x);
3232 write_complex_part (x, read_complex_part (y, false), false);
3233 write_complex_part (x, read_complex_part (y, true), true);
3235 return get_last_insn ();
3238 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3239 MODE is known to be complex. Returns the last instruction emitted. */
3241 static rtx
3242 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3244 bool try_int;
3246 /* Need to take special care for pushes, to maintain proper ordering
3247 of the data, and possibly extra padding. */
3248 if (push_operand (x, mode))
3249 return emit_move_complex_push (mode, x, y);
3251 /* See if we can coerce the target into moving both values at once, except
3252 for floating point where we favor moving as parts if this is easy. */
3253 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3254 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3255 && !(REG_P (x)
3256 && HARD_REGISTER_P (x)
3257 && hard_regno_nregs[REGNO (x)][mode] == 1)
3258 && !(REG_P (y)
3259 && HARD_REGISTER_P (y)
3260 && hard_regno_nregs[REGNO (y)][mode] == 1))
3261 try_int = false;
3262 /* Not possible if the values are inherently not adjacent. */
3263 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3264 try_int = false;
3265 /* Is possible if both are registers (or subregs of registers). */
3266 else if (register_operand (x, mode) && register_operand (y, mode))
3267 try_int = true;
3268 /* If one of the operands is a memory, and alignment constraints
3269 are friendly enough, we may be able to do combined memory operations.
3270 We do not attempt this if Y is a constant because that combination is
3271 usually better with the by-parts thing below. */
3272 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3273 && (!STRICT_ALIGNMENT
3274 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3275 try_int = true;
3276 else
3277 try_int = false;
3279 if (try_int)
3281 rtx ret;
3283 /* For memory to memory moves, optimal behavior can be had with the
3284 existing block move logic. */
3285 if (MEM_P (x) && MEM_P (y))
3287 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3288 BLOCK_OP_NO_LIBCALL);
3289 return get_last_insn ();
3292 ret = emit_move_via_integer (mode, x, y, true);
3293 if (ret)
3294 return ret;
3297 return emit_move_complex_parts (x, y);
3300 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3301 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3303 static rtx
3304 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3306 rtx ret;
3308 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3309 if (mode != CCmode)
3311 enum insn_code code = optab_handler (mov_optab, CCmode);
3312 if (code != CODE_FOR_nothing)
3314 x = emit_move_change_mode (CCmode, mode, x, true);
3315 y = emit_move_change_mode (CCmode, mode, y, true);
3316 return emit_insn (GEN_FCN (code) (x, y));
3320 /* Otherwise, find the MODE_INT mode of the same width. */
3321 ret = emit_move_via_integer (mode, x, y, false);
3322 gcc_assert (ret != NULL);
3323 return ret;
3326 /* Return true if word I of OP lies entirely in the
3327 undefined bits of a paradoxical subreg. */
3329 static bool
3330 undefined_operand_subword_p (const_rtx op, int i)
3332 enum machine_mode innermode, innermostmode;
3333 int offset;
3334 if (GET_CODE (op) != SUBREG)
3335 return false;
3336 innermode = GET_MODE (op);
3337 innermostmode = GET_MODE (SUBREG_REG (op));
3338 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3339 /* The SUBREG_BYTE represents offset, as if the value were stored in
3340 memory, except for a paradoxical subreg where we define
3341 SUBREG_BYTE to be 0; undo this exception as in
3342 simplify_subreg. */
3343 if (SUBREG_BYTE (op) == 0
3344 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3346 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3347 if (WORDS_BIG_ENDIAN)
3348 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3349 if (BYTES_BIG_ENDIAN)
3350 offset += difference % UNITS_PER_WORD;
3352 if (offset >= GET_MODE_SIZE (innermostmode)
3353 || offset <= -GET_MODE_SIZE (word_mode))
3354 return true;
3355 return false;
3358 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3359 MODE is any multi-word or full-word mode that lacks a move_insn
3360 pattern. Note that you will get better code if you define such
3361 patterns, even if they must turn into multiple assembler instructions. */
3363 static rtx
3364 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3366 rtx last_insn = 0;
3367 rtx seq, inner;
3368 bool need_clobber;
3369 int i;
3371 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3373 /* If X is a push on the stack, do the push now and replace
3374 X with a reference to the stack pointer. */
3375 if (push_operand (x, mode))
3376 x = emit_move_resolve_push (mode, x);
3378 /* If we are in reload, see if either operand is a MEM whose address
3379 is scheduled for replacement. */
3380 if (reload_in_progress && MEM_P (x)
3381 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3382 x = replace_equiv_address_nv (x, inner);
3383 if (reload_in_progress && MEM_P (y)
3384 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3385 y = replace_equiv_address_nv (y, inner);
3387 start_sequence ();
3389 need_clobber = false;
3390 for (i = 0;
3391 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3392 i++)
3394 rtx xpart = operand_subword (x, i, 1, mode);
3395 rtx ypart;
3397 /* Do not generate code for a move if it would come entirely
3398 from the undefined bits of a paradoxical subreg. */
3399 if (undefined_operand_subword_p (y, i))
3400 continue;
3402 ypart = operand_subword (y, i, 1, mode);
3404 /* If we can't get a part of Y, put Y into memory if it is a
3405 constant. Otherwise, force it into a register. Then we must
3406 be able to get a part of Y. */
3407 if (ypart == 0 && CONSTANT_P (y))
3409 y = use_anchored_address (force_const_mem (mode, y));
3410 ypart = operand_subword (y, i, 1, mode);
3412 else if (ypart == 0)
3413 ypart = operand_subword_force (y, i, mode);
3415 gcc_assert (xpart && ypart);
3417 need_clobber |= (GET_CODE (xpart) == SUBREG);
3419 last_insn = emit_move_insn (xpart, ypart);
3422 seq = get_insns ();
3423 end_sequence ();
3425 /* Show the output dies here. This is necessary for SUBREGs
3426 of pseudos since we cannot track their lifetimes correctly;
3427 hard regs shouldn't appear here except as return values.
3428 We never want to emit such a clobber after reload. */
3429 if (x != y
3430 && ! (reload_in_progress || reload_completed)
3431 && need_clobber != 0)
3432 emit_clobber (x);
3434 emit_insn (seq);
3436 return last_insn;
3439 /* Low level part of emit_move_insn.
3440 Called just like emit_move_insn, but assumes X and Y
3441 are basically valid. */
3444 emit_move_insn_1 (rtx x, rtx y)
3446 enum machine_mode mode = GET_MODE (x);
3447 enum insn_code code;
3449 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3451 code = optab_handler (mov_optab, mode);
3452 if (code != CODE_FOR_nothing)
3453 return emit_insn (GEN_FCN (code) (x, y));
3455 /* Expand complex moves by moving real part and imag part. */
3456 if (COMPLEX_MODE_P (mode))
3457 return emit_move_complex (mode, x, y);
3459 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3460 || ALL_FIXED_POINT_MODE_P (mode))
3462 rtx result = emit_move_via_integer (mode, x, y, true);
3464 /* If we can't find an integer mode, use multi words. */
3465 if (result)
3466 return result;
3467 else
3468 return emit_move_multi_word (mode, x, y);
3471 if (GET_MODE_CLASS (mode) == MODE_CC)
3472 return emit_move_ccmode (mode, x, y);
3474 /* Try using a move pattern for the corresponding integer mode. This is
3475 only safe when simplify_subreg can convert MODE constants into integer
3476 constants. At present, it can only do this reliably if the value
3477 fits within a HOST_WIDE_INT. */
3478 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3480 rtx ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3482 if (ret)
3484 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3485 return ret;
3489 return emit_move_multi_word (mode, x, y);
3492 /* Generate code to copy Y into X.
3493 Both Y and X must have the same mode, except that
3494 Y can be a constant with VOIDmode.
3495 This mode cannot be BLKmode; use emit_block_move for that.
3497 Return the last instruction emitted. */
3500 emit_move_insn (rtx x, rtx y)
3502 enum machine_mode mode = GET_MODE (x);
3503 rtx y_cst = NULL_RTX;
3504 rtx last_insn, set;
3506 gcc_assert (mode != BLKmode
3507 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3509 if (CONSTANT_P (y))
3511 if (optimize
3512 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3513 && (last_insn = compress_float_constant (x, y)))
3514 return last_insn;
3516 y_cst = y;
3518 if (!targetm.legitimate_constant_p (mode, y))
3520 y = force_const_mem (mode, y);
3522 /* If the target's cannot_force_const_mem prevented the spill,
3523 assume that the target's move expanders will also take care
3524 of the non-legitimate constant. */
3525 if (!y)
3526 y = y_cst;
3527 else
3528 y = use_anchored_address (y);
3532 /* If X or Y are memory references, verify that their addresses are valid
3533 for the machine. */
3534 if (MEM_P (x)
3535 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3536 MEM_ADDR_SPACE (x))
3537 && ! push_operand (x, GET_MODE (x))))
3538 x = validize_mem (x);
3540 if (MEM_P (y)
3541 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3542 MEM_ADDR_SPACE (y)))
3543 y = validize_mem (y);
3545 gcc_assert (mode != BLKmode);
3547 last_insn = emit_move_insn_1 (x, y);
3549 if (y_cst && REG_P (x)
3550 && (set = single_set (last_insn)) != NULL_RTX
3551 && SET_DEST (set) == x
3552 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3553 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3555 return last_insn;
3558 /* If Y is representable exactly in a narrower mode, and the target can
3559 perform the extension directly from constant or memory, then emit the
3560 move as an extension. */
3562 static rtx
3563 compress_float_constant (rtx x, rtx y)
3565 enum machine_mode dstmode = GET_MODE (x);
3566 enum machine_mode orig_srcmode = GET_MODE (y);
3567 enum machine_mode srcmode;
3568 REAL_VALUE_TYPE r;
3569 int oldcost, newcost;
3570 bool speed = optimize_insn_for_speed_p ();
3572 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3574 if (targetm.legitimate_constant_p (dstmode, y))
3575 oldcost = set_src_cost (y, speed);
3576 else
3577 oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3579 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3580 srcmode != orig_srcmode;
3581 srcmode = GET_MODE_WIDER_MODE (srcmode))
3583 enum insn_code ic;
3584 rtx trunc_y, last_insn;
3586 /* Skip if the target can't extend this way. */
3587 ic = can_extend_p (dstmode, srcmode, 0);
3588 if (ic == CODE_FOR_nothing)
3589 continue;
3591 /* Skip if the narrowed value isn't exact. */
3592 if (! exact_real_truncate (srcmode, &r))
3593 continue;
3595 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3597 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3599 /* Skip if the target needs extra instructions to perform
3600 the extension. */
3601 if (!insn_operand_matches (ic, 1, trunc_y))
3602 continue;
3603 /* This is valid, but may not be cheaper than the original. */
3604 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3605 speed);
3606 if (oldcost < newcost)
3607 continue;
3609 else if (float_extend_from_mem[dstmode][srcmode])
3611 trunc_y = force_const_mem (srcmode, trunc_y);
3612 /* This is valid, but may not be cheaper than the original. */
3613 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3614 speed);
3615 if (oldcost < newcost)
3616 continue;
3617 trunc_y = validize_mem (trunc_y);
3619 else
3620 continue;
3622 /* For CSE's benefit, force the compressed constant pool entry
3623 into a new pseudo. This constant may be used in different modes,
3624 and if not, combine will put things back together for us. */
3625 trunc_y = force_reg (srcmode, trunc_y);
3626 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3627 last_insn = get_last_insn ();
3629 if (REG_P (x))
3630 set_unique_reg_note (last_insn, REG_EQUAL, y);
3632 return last_insn;
3635 return NULL_RTX;
3638 /* Pushing data onto the stack. */
3640 /* Push a block of length SIZE (perhaps variable)
3641 and return an rtx to address the beginning of the block.
3642 The value may be virtual_outgoing_args_rtx.
3644 EXTRA is the number of bytes of padding to push in addition to SIZE.
3645 BELOW nonzero means this padding comes at low addresses;
3646 otherwise, the padding comes at high addresses. */
3649 push_block (rtx size, int extra, int below)
3651 rtx temp;
3653 size = convert_modes (Pmode, ptr_mode, size, 1);
3654 if (CONSTANT_P (size))
3655 anti_adjust_stack (plus_constant (Pmode, size, extra));
3656 else if (REG_P (size) && extra == 0)
3657 anti_adjust_stack (size);
3658 else
3660 temp = copy_to_mode_reg (Pmode, size);
3661 if (extra != 0)
3662 temp = expand_binop (Pmode, add_optab, temp,
3663 gen_int_mode (extra, Pmode),
3664 temp, 0, OPTAB_LIB_WIDEN);
3665 anti_adjust_stack (temp);
3668 #ifndef STACK_GROWS_DOWNWARD
3669 if (0)
3670 #else
3671 if (1)
3672 #endif
3674 temp = virtual_outgoing_args_rtx;
3675 if (extra != 0 && below)
3676 temp = plus_constant (Pmode, temp, extra);
3678 else
3680 if (CONST_INT_P (size))
3681 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3682 -INTVAL (size) - (below ? 0 : extra));
3683 else if (extra != 0 && !below)
3684 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3685 negate_rtx (Pmode, plus_constant (Pmode, size,
3686 extra)));
3687 else
3688 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3689 negate_rtx (Pmode, size));
3692 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3695 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3697 static rtx
3698 mem_autoinc_base (rtx mem)
3700 if (MEM_P (mem))
3702 rtx addr = XEXP (mem, 0);
3703 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3704 return XEXP (addr, 0);
3706 return NULL;
3709 /* A utility routine used here, in reload, and in try_split. The insns
3710 after PREV up to and including LAST are known to adjust the stack,
3711 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3712 placing notes as appropriate. PREV may be NULL, indicating the
3713 entire insn sequence prior to LAST should be scanned.
3715 The set of allowed stack pointer modifications is small:
3716 (1) One or more auto-inc style memory references (aka pushes),
3717 (2) One or more addition/subtraction with the SP as destination,
3718 (3) A single move insn with the SP as destination,
3719 (4) A call_pop insn,
3720 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3722 Insns in the sequence that do not modify the SP are ignored,
3723 except for noreturn calls.
3725 The return value is the amount of adjustment that can be trivially
3726 verified, via immediate operand or auto-inc. If the adjustment
3727 cannot be trivially extracted, the return value is INT_MIN. */
3729 HOST_WIDE_INT
3730 find_args_size_adjust (rtx insn)
3732 rtx dest, set, pat;
3733 int i;
3735 pat = PATTERN (insn);
3736 set = NULL;
3738 /* Look for a call_pop pattern. */
3739 if (CALL_P (insn))
3741 /* We have to allow non-call_pop patterns for the case
3742 of emit_single_push_insn of a TLS address. */
3743 if (GET_CODE (pat) != PARALLEL)
3744 return 0;
3746 /* All call_pop have a stack pointer adjust in the parallel.
3747 The call itself is always first, and the stack adjust is
3748 usually last, so search from the end. */
3749 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3751 set = XVECEXP (pat, 0, i);
3752 if (GET_CODE (set) != SET)
3753 continue;
3754 dest = SET_DEST (set);
3755 if (dest == stack_pointer_rtx)
3756 break;
3758 /* We'd better have found the stack pointer adjust. */
3759 if (i == 0)
3760 return 0;
3761 /* Fall through to process the extracted SET and DEST
3762 as if it was a standalone insn. */
3764 else if (GET_CODE (pat) == SET)
3765 set = pat;
3766 else if ((set = single_set (insn)) != NULL)
3768 else if (GET_CODE (pat) == PARALLEL)
3770 /* ??? Some older ports use a parallel with a stack adjust
3771 and a store for a PUSH_ROUNDING pattern, rather than a
3772 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3773 /* ??? See h8300 and m68k, pushqi1. */
3774 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3776 set = XVECEXP (pat, 0, i);
3777 if (GET_CODE (set) != SET)
3778 continue;
3779 dest = SET_DEST (set);
3780 if (dest == stack_pointer_rtx)
3781 break;
3783 /* We do not expect an auto-inc of the sp in the parallel. */
3784 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3785 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3786 != stack_pointer_rtx);
3788 if (i < 0)
3789 return 0;
3791 else
3792 return 0;
3794 dest = SET_DEST (set);
3796 /* Look for direct modifications of the stack pointer. */
3797 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3799 /* Look for a trivial adjustment, otherwise assume nothing. */
3800 /* Note that the SPU restore_stack_block pattern refers to
3801 the stack pointer in V4SImode. Consider that non-trivial. */
3802 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3803 && GET_CODE (SET_SRC (set)) == PLUS
3804 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3805 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3806 return INTVAL (XEXP (SET_SRC (set), 1));
3807 /* ??? Reload can generate no-op moves, which will be cleaned
3808 up later. Recognize it and continue searching. */
3809 else if (rtx_equal_p (dest, SET_SRC (set)))
3810 return 0;
3811 else
3812 return HOST_WIDE_INT_MIN;
3814 else
3816 rtx mem, addr;
3818 /* Otherwise only think about autoinc patterns. */
3819 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3821 mem = dest;
3822 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3823 != stack_pointer_rtx);
3825 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3826 mem = SET_SRC (set);
3827 else
3828 return 0;
3830 addr = XEXP (mem, 0);
3831 switch (GET_CODE (addr))
3833 case PRE_INC:
3834 case POST_INC:
3835 return GET_MODE_SIZE (GET_MODE (mem));
3836 case PRE_DEC:
3837 case POST_DEC:
3838 return -GET_MODE_SIZE (GET_MODE (mem));
3839 case PRE_MODIFY:
3840 case POST_MODIFY:
3841 addr = XEXP (addr, 1);
3842 gcc_assert (GET_CODE (addr) == PLUS);
3843 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3844 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3845 return INTVAL (XEXP (addr, 1));
3846 default:
3847 gcc_unreachable ();
3853 fixup_args_size_notes (rtx prev, rtx last, int end_args_size)
3855 int args_size = end_args_size;
3856 bool saw_unknown = false;
3857 rtx insn;
3859 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3861 HOST_WIDE_INT this_delta;
3863 if (!NONDEBUG_INSN_P (insn))
3864 continue;
3866 this_delta = find_args_size_adjust (insn);
3867 if (this_delta == 0)
3869 if (!CALL_P (insn)
3870 || ACCUMULATE_OUTGOING_ARGS
3871 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3872 continue;
3875 gcc_assert (!saw_unknown);
3876 if (this_delta == HOST_WIDE_INT_MIN)
3877 saw_unknown = true;
3879 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3880 #ifdef STACK_GROWS_DOWNWARD
3881 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
3882 #endif
3883 args_size -= this_delta;
3886 return saw_unknown ? INT_MIN : args_size;
3889 #ifdef PUSH_ROUNDING
3890 /* Emit single push insn. */
3892 static void
3893 emit_single_push_insn_1 (enum machine_mode mode, rtx x, tree type)
3895 rtx dest_addr;
3896 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3897 rtx dest;
3898 enum insn_code icode;
3900 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3901 /* If there is push pattern, use it. Otherwise try old way of throwing
3902 MEM representing push operation to move expander. */
3903 icode = optab_handler (push_optab, mode);
3904 if (icode != CODE_FOR_nothing)
3906 struct expand_operand ops[1];
3908 create_input_operand (&ops[0], x, mode);
3909 if (maybe_expand_insn (icode, 1, ops))
3910 return;
3912 if (GET_MODE_SIZE (mode) == rounded_size)
3913 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3914 /* If we are to pad downward, adjust the stack pointer first and
3915 then store X into the stack location using an offset. This is
3916 because emit_move_insn does not know how to pad; it does not have
3917 access to type. */
3918 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3920 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3921 HOST_WIDE_INT offset;
3923 emit_move_insn (stack_pointer_rtx,
3924 expand_binop (Pmode,
3925 #ifdef STACK_GROWS_DOWNWARD
3926 sub_optab,
3927 #else
3928 add_optab,
3929 #endif
3930 stack_pointer_rtx,
3931 gen_int_mode (rounded_size, Pmode),
3932 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3934 offset = (HOST_WIDE_INT) padding_size;
3935 #ifdef STACK_GROWS_DOWNWARD
3936 if (STACK_PUSH_CODE == POST_DEC)
3937 /* We have already decremented the stack pointer, so get the
3938 previous value. */
3939 offset += (HOST_WIDE_INT) rounded_size;
3940 #else
3941 if (STACK_PUSH_CODE == POST_INC)
3942 /* We have already incremented the stack pointer, so get the
3943 previous value. */
3944 offset -= (HOST_WIDE_INT) rounded_size;
3945 #endif
3946 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3947 gen_int_mode (offset, Pmode));
3949 else
3951 #ifdef STACK_GROWS_DOWNWARD
3952 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3953 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3954 gen_int_mode (-(HOST_WIDE_INT) rounded_size,
3955 Pmode));
3956 #else
3957 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3958 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3959 gen_int_mode (rounded_size, Pmode));
3960 #endif
3961 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3964 dest = gen_rtx_MEM (mode, dest_addr);
3966 if (type != 0)
3968 set_mem_attributes (dest, type, 1);
3970 if (flag_optimize_sibling_calls)
3971 /* Function incoming arguments may overlap with sibling call
3972 outgoing arguments and we cannot allow reordering of reads
3973 from function arguments with stores to outgoing arguments
3974 of sibling calls. */
3975 set_mem_alias_set (dest, 0);
3977 emit_move_insn (dest, x);
3980 /* Emit and annotate a single push insn. */
3982 static void
3983 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3985 int delta, old_delta = stack_pointer_delta;
3986 rtx prev = get_last_insn ();
3987 rtx last;
3989 emit_single_push_insn_1 (mode, x, type);
3991 last = get_last_insn ();
3993 /* Notice the common case where we emitted exactly one insn. */
3994 if (PREV_INSN (last) == prev)
3996 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
3997 return;
4000 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4001 gcc_assert (delta == INT_MIN || delta == old_delta);
4003 #endif
4005 /* Generate code to push X onto the stack, assuming it has mode MODE and
4006 type TYPE.
4007 MODE is redundant except when X is a CONST_INT (since they don't
4008 carry mode info).
4009 SIZE is an rtx for the size of data to be copied (in bytes),
4010 needed only if X is BLKmode.
4012 ALIGN (in bits) is maximum alignment we can assume.
4014 If PARTIAL and REG are both nonzero, then copy that many of the first
4015 bytes of X into registers starting with REG, and push the rest of X.
4016 The amount of space pushed is decreased by PARTIAL bytes.
4017 REG must be a hard register in this case.
4018 If REG is zero but PARTIAL is not, take any all others actions for an
4019 argument partially in registers, but do not actually load any
4020 registers.
4022 EXTRA is the amount in bytes of extra space to leave next to this arg.
4023 This is ignored if an argument block has already been allocated.
4025 On a machine that lacks real push insns, ARGS_ADDR is the address of
4026 the bottom of the argument block for this call. We use indexing off there
4027 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4028 argument block has not been preallocated.
4030 ARGS_SO_FAR is the size of args previously pushed for this call.
4032 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4033 for arguments passed in registers. If nonzero, it will be the number
4034 of bytes required. */
4036 void
4037 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
4038 unsigned int align, int partial, rtx reg, int extra,
4039 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4040 rtx alignment_pad)
4042 rtx xinner;
4043 enum direction stack_direction
4044 #ifdef STACK_GROWS_DOWNWARD
4045 = downward;
4046 #else
4047 = upward;
4048 #endif
4050 /* Decide where to pad the argument: `downward' for below,
4051 `upward' for above, or `none' for don't pad it.
4052 Default is below for small data on big-endian machines; else above. */
4053 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4055 /* Invert direction if stack is post-decrement.
4056 FIXME: why? */
4057 if (STACK_PUSH_CODE == POST_DEC)
4058 if (where_pad != none)
4059 where_pad = (where_pad == downward ? upward : downward);
4061 xinner = x;
4063 if (mode == BLKmode
4064 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4066 /* Copy a block into the stack, entirely or partially. */
4068 rtx temp;
4069 int used;
4070 int offset;
4071 int skip;
4073 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4074 used = partial - offset;
4076 if (mode != BLKmode)
4078 /* A value is to be stored in an insufficiently aligned
4079 stack slot; copy via a suitably aligned slot if
4080 necessary. */
4081 size = GEN_INT (GET_MODE_SIZE (mode));
4082 if (!MEM_P (xinner))
4084 temp = assign_temp (type, 1, 1);
4085 emit_move_insn (temp, xinner);
4086 xinner = temp;
4090 gcc_assert (size);
4092 /* USED is now the # of bytes we need not copy to the stack
4093 because registers will take care of them. */
4095 if (partial != 0)
4096 xinner = adjust_address (xinner, BLKmode, used);
4098 /* If the partial register-part of the arg counts in its stack size,
4099 skip the part of stack space corresponding to the registers.
4100 Otherwise, start copying to the beginning of the stack space,
4101 by setting SKIP to 0. */
4102 skip = (reg_parm_stack_space == 0) ? 0 : used;
4104 #ifdef PUSH_ROUNDING
4105 /* Do it with several push insns if that doesn't take lots of insns
4106 and if there is no difficulty with push insns that skip bytes
4107 on the stack for alignment purposes. */
4108 if (args_addr == 0
4109 && PUSH_ARGS
4110 && CONST_INT_P (size)
4111 && skip == 0
4112 && MEM_ALIGN (xinner) >= align
4113 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
4114 /* Here we avoid the case of a structure whose weak alignment
4115 forces many pushes of a small amount of data,
4116 and such small pushes do rounding that causes trouble. */
4117 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4118 || align >= BIGGEST_ALIGNMENT
4119 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4120 == (align / BITS_PER_UNIT)))
4121 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4123 /* Push padding now if padding above and stack grows down,
4124 or if padding below and stack grows up.
4125 But if space already allocated, this has already been done. */
4126 if (extra && args_addr == 0
4127 && where_pad != none && where_pad != stack_direction)
4128 anti_adjust_stack (GEN_INT (extra));
4130 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4132 else
4133 #endif /* PUSH_ROUNDING */
4135 rtx target;
4137 /* Otherwise make space on the stack and copy the data
4138 to the address of that space. */
4140 /* Deduct words put into registers from the size we must copy. */
4141 if (partial != 0)
4143 if (CONST_INT_P (size))
4144 size = GEN_INT (INTVAL (size) - used);
4145 else
4146 size = expand_binop (GET_MODE (size), sub_optab, size,
4147 gen_int_mode (used, GET_MODE (size)),
4148 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4151 /* Get the address of the stack space.
4152 In this case, we do not deal with EXTRA separately.
4153 A single stack adjust will do. */
4154 if (! args_addr)
4156 temp = push_block (size, extra, where_pad == downward);
4157 extra = 0;
4159 else if (CONST_INT_P (args_so_far))
4160 temp = memory_address (BLKmode,
4161 plus_constant (Pmode, args_addr,
4162 skip + INTVAL (args_so_far)));
4163 else
4164 temp = memory_address (BLKmode,
4165 plus_constant (Pmode,
4166 gen_rtx_PLUS (Pmode,
4167 args_addr,
4168 args_so_far),
4169 skip));
4171 if (!ACCUMULATE_OUTGOING_ARGS)
4173 /* If the source is referenced relative to the stack pointer,
4174 copy it to another register to stabilize it. We do not need
4175 to do this if we know that we won't be changing sp. */
4177 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4178 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4179 temp = copy_to_reg (temp);
4182 target = gen_rtx_MEM (BLKmode, temp);
4184 /* We do *not* set_mem_attributes here, because incoming arguments
4185 may overlap with sibling call outgoing arguments and we cannot
4186 allow reordering of reads from function arguments with stores
4187 to outgoing arguments of sibling calls. We do, however, want
4188 to record the alignment of the stack slot. */
4189 /* ALIGN may well be better aligned than TYPE, e.g. due to
4190 PARM_BOUNDARY. Assume the caller isn't lying. */
4191 set_mem_align (target, align);
4193 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4196 else if (partial > 0)
4198 /* Scalar partly in registers. */
4200 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4201 int i;
4202 int not_stack;
4203 /* # bytes of start of argument
4204 that we must make space for but need not store. */
4205 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4206 int args_offset = INTVAL (args_so_far);
4207 int skip;
4209 /* Push padding now if padding above and stack grows down,
4210 or if padding below and stack grows up.
4211 But if space already allocated, this has already been done. */
4212 if (extra && args_addr == 0
4213 && where_pad != none && where_pad != stack_direction)
4214 anti_adjust_stack (GEN_INT (extra));
4216 /* If we make space by pushing it, we might as well push
4217 the real data. Otherwise, we can leave OFFSET nonzero
4218 and leave the space uninitialized. */
4219 if (args_addr == 0)
4220 offset = 0;
4222 /* Now NOT_STACK gets the number of words that we don't need to
4223 allocate on the stack. Convert OFFSET to words too. */
4224 not_stack = (partial - offset) / UNITS_PER_WORD;
4225 offset /= UNITS_PER_WORD;
4227 /* If the partial register-part of the arg counts in its stack size,
4228 skip the part of stack space corresponding to the registers.
4229 Otherwise, start copying to the beginning of the stack space,
4230 by setting SKIP to 0. */
4231 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4233 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4234 x = validize_mem (force_const_mem (mode, x));
4236 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4237 SUBREGs of such registers are not allowed. */
4238 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4239 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4240 x = copy_to_reg (x);
4242 /* Loop over all the words allocated on the stack for this arg. */
4243 /* We can do it by words, because any scalar bigger than a word
4244 has a size a multiple of a word. */
4245 #ifndef PUSH_ARGS_REVERSED
4246 for (i = not_stack; i < size; i++)
4247 #else
4248 for (i = size - 1; i >= not_stack; i--)
4249 #endif
4250 if (i >= not_stack + offset)
4251 emit_push_insn (operand_subword_force (x, i, mode),
4252 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4253 0, args_addr,
4254 GEN_INT (args_offset + ((i - not_stack + skip)
4255 * UNITS_PER_WORD)),
4256 reg_parm_stack_space, alignment_pad);
4258 else
4260 rtx addr;
4261 rtx dest;
4263 /* Push padding now if padding above and stack grows down,
4264 or if padding below and stack grows up.
4265 But if space already allocated, this has already been done. */
4266 if (extra && args_addr == 0
4267 && where_pad != none && where_pad != stack_direction)
4268 anti_adjust_stack (GEN_INT (extra));
4270 #ifdef PUSH_ROUNDING
4271 if (args_addr == 0 && PUSH_ARGS)
4272 emit_single_push_insn (mode, x, type);
4273 else
4274 #endif
4276 if (CONST_INT_P (args_so_far))
4277 addr
4278 = memory_address (mode,
4279 plus_constant (Pmode, args_addr,
4280 INTVAL (args_so_far)));
4281 else
4282 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4283 args_so_far));
4284 dest = gen_rtx_MEM (mode, addr);
4286 /* We do *not* set_mem_attributes here, because incoming arguments
4287 may overlap with sibling call outgoing arguments and we cannot
4288 allow reordering of reads from function arguments with stores
4289 to outgoing arguments of sibling calls. We do, however, want
4290 to record the alignment of the stack slot. */
4291 /* ALIGN may well be better aligned than TYPE, e.g. due to
4292 PARM_BOUNDARY. Assume the caller isn't lying. */
4293 set_mem_align (dest, align);
4295 emit_move_insn (dest, x);
4299 /* If part should go in registers, copy that part
4300 into the appropriate registers. Do this now, at the end,
4301 since mem-to-mem copies above may do function calls. */
4302 if (partial > 0 && reg != 0)
4304 /* Handle calls that pass values in multiple non-contiguous locations.
4305 The Irix 6 ABI has examples of this. */
4306 if (GET_CODE (reg) == PARALLEL)
4307 emit_group_load (reg, x, type, -1);
4308 else
4310 gcc_assert (partial % UNITS_PER_WORD == 0);
4311 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4315 if (extra && args_addr == 0 && where_pad == stack_direction)
4316 anti_adjust_stack (GEN_INT (extra));
4318 if (alignment_pad && args_addr == 0)
4319 anti_adjust_stack (alignment_pad);
4322 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4323 operations. */
4325 static rtx
4326 get_subtarget (rtx x)
4328 return (optimize
4329 || x == 0
4330 /* Only registers can be subtargets. */
4331 || !REG_P (x)
4332 /* Don't use hard regs to avoid extending their life. */
4333 || REGNO (x) < FIRST_PSEUDO_REGISTER
4334 ? 0 : x);
4337 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4338 FIELD is a bitfield. Returns true if the optimization was successful,
4339 and there's nothing else to do. */
4341 static bool
4342 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4343 unsigned HOST_WIDE_INT bitpos,
4344 unsigned HOST_WIDE_INT bitregion_start,
4345 unsigned HOST_WIDE_INT bitregion_end,
4346 enum machine_mode mode1, rtx str_rtx,
4347 tree to, tree src)
4349 enum machine_mode str_mode = GET_MODE (str_rtx);
4350 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4351 tree op0, op1;
4352 rtx value, result;
4353 optab binop;
4354 gimple srcstmt;
4355 enum tree_code code;
4357 if (mode1 != VOIDmode
4358 || bitsize >= BITS_PER_WORD
4359 || str_bitsize > BITS_PER_WORD
4360 || TREE_SIDE_EFFECTS (to)
4361 || TREE_THIS_VOLATILE (to))
4362 return false;
4364 STRIP_NOPS (src);
4365 if (TREE_CODE (src) != SSA_NAME)
4366 return false;
4367 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4368 return false;
4370 srcstmt = get_gimple_for_ssa_name (src);
4371 if (!srcstmt
4372 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4373 return false;
4375 code = gimple_assign_rhs_code (srcstmt);
4377 op0 = gimple_assign_rhs1 (srcstmt);
4379 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4380 to find its initialization. Hopefully the initialization will
4381 be from a bitfield load. */
4382 if (TREE_CODE (op0) == SSA_NAME)
4384 gimple op0stmt = get_gimple_for_ssa_name (op0);
4386 /* We want to eventually have OP0 be the same as TO, which
4387 should be a bitfield. */
4388 if (!op0stmt
4389 || !is_gimple_assign (op0stmt)
4390 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4391 return false;
4392 op0 = gimple_assign_rhs1 (op0stmt);
4395 op1 = gimple_assign_rhs2 (srcstmt);
4397 if (!operand_equal_p (to, op0, 0))
4398 return false;
4400 if (MEM_P (str_rtx))
4402 unsigned HOST_WIDE_INT offset1;
4404 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4405 str_mode = word_mode;
4406 str_mode = get_best_mode (bitsize, bitpos,
4407 bitregion_start, bitregion_end,
4408 MEM_ALIGN (str_rtx), str_mode, 0);
4409 if (str_mode == VOIDmode)
4410 return false;
4411 str_bitsize = GET_MODE_BITSIZE (str_mode);
4413 offset1 = bitpos;
4414 bitpos %= str_bitsize;
4415 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4416 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4418 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4419 return false;
4421 /* If the bit field covers the whole REG/MEM, store_field
4422 will likely generate better code. */
4423 if (bitsize >= str_bitsize)
4424 return false;
4426 /* We can't handle fields split across multiple entities. */
4427 if (bitpos + bitsize > str_bitsize)
4428 return false;
4430 if (BYTES_BIG_ENDIAN)
4431 bitpos = str_bitsize - bitpos - bitsize;
4433 switch (code)
4435 case PLUS_EXPR:
4436 case MINUS_EXPR:
4437 /* For now, just optimize the case of the topmost bitfield
4438 where we don't need to do any masking and also
4439 1 bit bitfields where xor can be used.
4440 We might win by one instruction for the other bitfields
4441 too if insv/extv instructions aren't used, so that
4442 can be added later. */
4443 if (bitpos + bitsize != str_bitsize
4444 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4445 break;
4447 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4448 value = convert_modes (str_mode,
4449 TYPE_MODE (TREE_TYPE (op1)), value,
4450 TYPE_UNSIGNED (TREE_TYPE (op1)));
4452 /* We may be accessing data outside the field, which means
4453 we can alias adjacent data. */
4454 if (MEM_P (str_rtx))
4456 str_rtx = shallow_copy_rtx (str_rtx);
4457 set_mem_alias_set (str_rtx, 0);
4458 set_mem_expr (str_rtx, 0);
4461 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4462 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4464 value = expand_and (str_mode, value, const1_rtx, NULL);
4465 binop = xor_optab;
4467 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4468 result = expand_binop (str_mode, binop, str_rtx,
4469 value, str_rtx, 1, OPTAB_WIDEN);
4470 if (result != str_rtx)
4471 emit_move_insn (str_rtx, result);
4472 return true;
4474 case BIT_IOR_EXPR:
4475 case BIT_XOR_EXPR:
4476 if (TREE_CODE (op1) != INTEGER_CST)
4477 break;
4478 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4479 value = convert_modes (str_mode,
4480 TYPE_MODE (TREE_TYPE (op1)), value,
4481 TYPE_UNSIGNED (TREE_TYPE (op1)));
4483 /* We may be accessing data outside the field, which means
4484 we can alias adjacent data. */
4485 if (MEM_P (str_rtx))
4487 str_rtx = shallow_copy_rtx (str_rtx);
4488 set_mem_alias_set (str_rtx, 0);
4489 set_mem_expr (str_rtx, 0);
4492 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4493 if (bitpos + bitsize != str_bitsize)
4495 rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4496 str_mode);
4497 value = expand_and (str_mode, value, mask, NULL_RTX);
4499 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4500 result = expand_binop (str_mode, binop, str_rtx,
4501 value, str_rtx, 1, OPTAB_WIDEN);
4502 if (result != str_rtx)
4503 emit_move_insn (str_rtx, result);
4504 return true;
4506 default:
4507 break;
4510 return false;
4513 /* In the C++ memory model, consecutive bit fields in a structure are
4514 considered one memory location.
4516 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4517 returns the bit range of consecutive bits in which this COMPONENT_REF
4518 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4519 and *OFFSET may be adjusted in the process.
4521 If the access does not need to be restricted, 0 is returned in both
4522 *BITSTART and *BITEND. */
4524 static void
4525 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4526 unsigned HOST_WIDE_INT *bitend,
4527 tree exp,
4528 HOST_WIDE_INT *bitpos,
4529 tree *offset)
4531 HOST_WIDE_INT bitoffset;
4532 tree field, repr;
4534 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4536 field = TREE_OPERAND (exp, 1);
4537 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4538 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4539 need to limit the range we can access. */
4540 if (!repr)
4542 *bitstart = *bitend = 0;
4543 return;
4546 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4547 part of a larger bit field, then the representative does not serve any
4548 useful purpose. This can occur in Ada. */
4549 if (handled_component_p (TREE_OPERAND (exp, 0)))
4551 enum machine_mode rmode;
4552 HOST_WIDE_INT rbitsize, rbitpos;
4553 tree roffset;
4554 int unsignedp;
4555 int volatilep = 0;
4556 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4557 &roffset, &rmode, &unsignedp, &volatilep, false);
4558 if ((rbitpos % BITS_PER_UNIT) != 0)
4560 *bitstart = *bitend = 0;
4561 return;
4565 /* Compute the adjustment to bitpos from the offset of the field
4566 relative to the representative. DECL_FIELD_OFFSET of field and
4567 repr are the same by construction if they are not constants,
4568 see finish_bitfield_layout. */
4569 if (host_integerp (DECL_FIELD_OFFSET (field), 1)
4570 && host_integerp (DECL_FIELD_OFFSET (repr), 1))
4571 bitoffset = (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
4572 - tree_low_cst (DECL_FIELD_OFFSET (repr), 1)) * BITS_PER_UNIT;
4573 else
4574 bitoffset = 0;
4575 bitoffset += (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
4576 - tree_low_cst (DECL_FIELD_BIT_OFFSET (repr), 1));
4578 /* If the adjustment is larger than bitpos, we would have a negative bit
4579 position for the lower bound and this may wreak havoc later. Adjust
4580 offset and bitpos to make the lower bound non-negative in that case. */
4581 if (bitoffset > *bitpos)
4583 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4584 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4586 *bitpos += adjust;
4587 if (*offset == NULL_TREE)
4588 *offset = size_int (-adjust / BITS_PER_UNIT);
4589 else
4590 *offset
4591 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4592 *bitstart = 0;
4594 else
4595 *bitstart = *bitpos - bitoffset;
4597 *bitend = *bitstart + tree_low_cst (DECL_SIZE (repr), 1) - 1;
4600 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4601 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4602 DECL_RTL was not set yet, return NORTL. */
4604 static inline bool
4605 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4607 if (TREE_CODE (addr) != ADDR_EXPR)
4608 return false;
4610 tree base = TREE_OPERAND (addr, 0);
4612 if (!DECL_P (base)
4613 || TREE_ADDRESSABLE (base)
4614 || DECL_MODE (base) == BLKmode)
4615 return false;
4617 if (!DECL_RTL_SET_P (base))
4618 return nortl;
4620 return (!MEM_P (DECL_RTL (base)));
4623 /* Returns true if the MEM_REF REF refers to an object that does not
4624 reside in memory and has non-BLKmode. */
4626 static inline bool
4627 mem_ref_refers_to_non_mem_p (tree ref)
4629 tree base = TREE_OPERAND (ref, 0);
4630 return addr_expr_of_non_mem_decl_p_1 (base, false);
4633 /* Return TRUE iff OP is an ADDR_EXPR of a DECL that's not
4634 addressable. This is very much like mem_ref_refers_to_non_mem_p,
4635 but instead of the MEM_REF, it takes its base, and it doesn't
4636 assume a DECL is in memory just because its RTL is not set yet. */
4638 bool
4639 addr_expr_of_non_mem_decl_p (tree op)
4641 return addr_expr_of_non_mem_decl_p_1 (op, true);
4644 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4645 is true, try generating a nontemporal store. */
4647 void
4648 expand_assignment (tree to, tree from, bool nontemporal)
4650 rtx to_rtx = 0;
4651 rtx result;
4652 enum machine_mode mode;
4653 unsigned int align;
4654 enum insn_code icode;
4656 /* Don't crash if the lhs of the assignment was erroneous. */
4657 if (TREE_CODE (to) == ERROR_MARK)
4659 expand_normal (from);
4660 return;
4663 /* Optimize away no-op moves without side-effects. */
4664 if (operand_equal_p (to, from, 0))
4665 return;
4667 /* Handle misaligned stores. */
4668 mode = TYPE_MODE (TREE_TYPE (to));
4669 if ((TREE_CODE (to) == MEM_REF
4670 || TREE_CODE (to) == TARGET_MEM_REF)
4671 && mode != BLKmode
4672 && !mem_ref_refers_to_non_mem_p (to)
4673 && ((align = get_object_alignment (to))
4674 < GET_MODE_ALIGNMENT (mode))
4675 && (((icode = optab_handler (movmisalign_optab, mode))
4676 != CODE_FOR_nothing)
4677 || SLOW_UNALIGNED_ACCESS (mode, align)))
4679 rtx reg, mem;
4681 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4682 reg = force_not_mem (reg);
4683 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4685 if (icode != CODE_FOR_nothing)
4687 struct expand_operand ops[2];
4689 create_fixed_operand (&ops[0], mem);
4690 create_input_operand (&ops[1], reg, mode);
4691 /* The movmisalign<mode> pattern cannot fail, else the assignment
4692 would silently be omitted. */
4693 expand_insn (icode, 2, ops);
4695 else
4696 store_bit_field (mem, GET_MODE_BITSIZE (mode),
4697 0, 0, 0, mode, reg);
4698 return;
4701 /* Assignment of a structure component needs special treatment
4702 if the structure component's rtx is not simply a MEM.
4703 Assignment of an array element at a constant index, and assignment of
4704 an array element in an unaligned packed structure field, has the same
4705 problem. Same for (partially) storing into a non-memory object. */
4706 if (handled_component_p (to)
4707 || (TREE_CODE (to) == MEM_REF
4708 && mem_ref_refers_to_non_mem_p (to))
4709 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4711 enum machine_mode mode1;
4712 HOST_WIDE_INT bitsize, bitpos;
4713 unsigned HOST_WIDE_INT bitregion_start = 0;
4714 unsigned HOST_WIDE_INT bitregion_end = 0;
4715 tree offset;
4716 int unsignedp;
4717 int volatilep = 0;
4718 tree tem;
4720 push_temp_slots ();
4721 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4722 &unsignedp, &volatilep, true);
4724 /* Make sure bitpos is not negative, it can wreak havoc later. */
4725 if (bitpos < 0)
4727 gcc_assert (offset == NULL_TREE);
4728 offset = size_int (bitpos >> (BITS_PER_UNIT == 8
4729 ? 3 : exact_log2 (BITS_PER_UNIT)));
4730 bitpos &= BITS_PER_UNIT - 1;
4733 if (TREE_CODE (to) == COMPONENT_REF
4734 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4735 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4737 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4739 /* If the bitfield is volatile, we want to access it in the
4740 field's mode, not the computed mode.
4741 If a MEM has VOIDmode (external with incomplete type),
4742 use BLKmode for it instead. */
4743 if (MEM_P (to_rtx))
4745 if (volatilep && flag_strict_volatile_bitfields > 0)
4746 to_rtx = adjust_address (to_rtx, mode1, 0);
4747 else if (GET_MODE (to_rtx) == VOIDmode)
4748 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4751 if (offset != 0)
4753 enum machine_mode address_mode;
4754 rtx offset_rtx;
4756 if (!MEM_P (to_rtx))
4758 /* We can get constant negative offsets into arrays with broken
4759 user code. Translate this to a trap instead of ICEing. */
4760 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4761 expand_builtin_trap ();
4762 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4765 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4766 address_mode = get_address_mode (to_rtx);
4767 if (GET_MODE (offset_rtx) != address_mode)
4768 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4770 /* A constant address in TO_RTX can have VOIDmode, we must not try
4771 to call force_reg for that case. Avoid that case. */
4772 if (MEM_P (to_rtx)
4773 && GET_MODE (to_rtx) == BLKmode
4774 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4775 && bitsize > 0
4776 && (bitpos % bitsize) == 0
4777 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4778 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4780 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4781 bitpos = 0;
4784 to_rtx = offset_address (to_rtx, offset_rtx,
4785 highest_pow2_factor_for_target (to,
4786 offset));
4789 /* No action is needed if the target is not a memory and the field
4790 lies completely outside that target. This can occur if the source
4791 code contains an out-of-bounds access to a small array. */
4792 if (!MEM_P (to_rtx)
4793 && GET_MODE (to_rtx) != BLKmode
4794 && (unsigned HOST_WIDE_INT) bitpos
4795 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4797 expand_normal (from);
4798 result = NULL;
4800 /* Handle expand_expr of a complex value returning a CONCAT. */
4801 else if (GET_CODE (to_rtx) == CONCAT)
4803 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4804 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4805 && bitpos == 0
4806 && bitsize == mode_bitsize)
4807 result = store_expr (from, to_rtx, false, nontemporal);
4808 else if (bitsize == mode_bitsize / 2
4809 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4810 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4811 nontemporal);
4812 else if (bitpos + bitsize <= mode_bitsize / 2)
4813 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4814 bitregion_start, bitregion_end,
4815 mode1, from,
4816 get_alias_set (to), nontemporal);
4817 else if (bitpos >= mode_bitsize / 2)
4818 result = store_field (XEXP (to_rtx, 1), bitsize,
4819 bitpos - mode_bitsize / 2,
4820 bitregion_start, bitregion_end,
4821 mode1, from,
4822 get_alias_set (to), nontemporal);
4823 else if (bitpos == 0 && bitsize == mode_bitsize)
4825 rtx from_rtx;
4826 result = expand_normal (from);
4827 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4828 TYPE_MODE (TREE_TYPE (from)), 0);
4829 emit_move_insn (XEXP (to_rtx, 0),
4830 read_complex_part (from_rtx, false));
4831 emit_move_insn (XEXP (to_rtx, 1),
4832 read_complex_part (from_rtx, true));
4834 else
4836 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4837 GET_MODE_SIZE (GET_MODE (to_rtx)));
4838 write_complex_part (temp, XEXP (to_rtx, 0), false);
4839 write_complex_part (temp, XEXP (to_rtx, 1), true);
4840 result = store_field (temp, bitsize, bitpos,
4841 bitregion_start, bitregion_end,
4842 mode1, from,
4843 get_alias_set (to), nontemporal);
4844 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4845 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4848 else
4850 if (MEM_P (to_rtx))
4852 /* If the field is at offset zero, we could have been given the
4853 DECL_RTX of the parent struct. Don't munge it. */
4854 to_rtx = shallow_copy_rtx (to_rtx);
4855 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4856 if (volatilep)
4857 MEM_VOLATILE_P (to_rtx) = 1;
4860 if (optimize_bitfield_assignment_op (bitsize, bitpos,
4861 bitregion_start, bitregion_end,
4862 mode1,
4863 to_rtx, to, from))
4864 result = NULL;
4865 else
4866 result = store_field (to_rtx, bitsize, bitpos,
4867 bitregion_start, bitregion_end,
4868 mode1, from,
4869 get_alias_set (to), nontemporal);
4872 if (result)
4873 preserve_temp_slots (result);
4874 pop_temp_slots ();
4875 return;
4878 /* If the rhs is a function call and its value is not an aggregate,
4879 call the function before we start to compute the lhs.
4880 This is needed for correct code for cases such as
4881 val = setjmp (buf) on machines where reference to val
4882 requires loading up part of an address in a separate insn.
4884 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4885 since it might be a promoted variable where the zero- or sign- extension
4886 needs to be done. Handling this in the normal way is safe because no
4887 computation is done before the call. The same is true for SSA names. */
4888 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4889 && COMPLETE_TYPE_P (TREE_TYPE (from))
4890 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4891 && ! (((TREE_CODE (to) == VAR_DECL
4892 || TREE_CODE (to) == PARM_DECL
4893 || TREE_CODE (to) == RESULT_DECL)
4894 && REG_P (DECL_RTL (to)))
4895 || TREE_CODE (to) == SSA_NAME))
4897 rtx value;
4899 push_temp_slots ();
4900 value = expand_normal (from);
4901 if (to_rtx == 0)
4902 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4904 /* Handle calls that return values in multiple non-contiguous locations.
4905 The Irix 6 ABI has examples of this. */
4906 if (GET_CODE (to_rtx) == PARALLEL)
4908 if (GET_CODE (value) == PARALLEL)
4909 emit_group_move (to_rtx, value);
4910 else
4911 emit_group_load (to_rtx, value, TREE_TYPE (from),
4912 int_size_in_bytes (TREE_TYPE (from)));
4914 else if (GET_CODE (value) == PARALLEL)
4915 emit_group_store (to_rtx, value, TREE_TYPE (from),
4916 int_size_in_bytes (TREE_TYPE (from)));
4917 else if (GET_MODE (to_rtx) == BLKmode)
4919 /* Handle calls that return BLKmode values in registers. */
4920 if (REG_P (value))
4921 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
4922 else
4923 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4925 else
4927 if (POINTER_TYPE_P (TREE_TYPE (to)))
4928 value = convert_memory_address_addr_space
4929 (GET_MODE (to_rtx), value,
4930 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
4932 emit_move_insn (to_rtx, value);
4934 preserve_temp_slots (to_rtx);
4935 pop_temp_slots ();
4936 return;
4939 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
4940 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4942 /* Don't move directly into a return register. */
4943 if (TREE_CODE (to) == RESULT_DECL
4944 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4946 rtx temp;
4948 push_temp_slots ();
4950 /* If the source is itself a return value, it still is in a pseudo at
4951 this point so we can move it back to the return register directly. */
4952 if (REG_P (to_rtx)
4953 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
4954 && TREE_CODE (from) != CALL_EXPR)
4955 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
4956 else
4957 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4959 /* Handle calls that return values in multiple non-contiguous locations.
4960 The Irix 6 ABI has examples of this. */
4961 if (GET_CODE (to_rtx) == PARALLEL)
4963 if (GET_CODE (temp) == PARALLEL)
4964 emit_group_move (to_rtx, temp);
4965 else
4966 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4967 int_size_in_bytes (TREE_TYPE (from)));
4969 else if (temp)
4970 emit_move_insn (to_rtx, temp);
4972 preserve_temp_slots (to_rtx);
4973 pop_temp_slots ();
4974 return;
4977 /* In case we are returning the contents of an object which overlaps
4978 the place the value is being stored, use a safe function when copying
4979 a value through a pointer into a structure value return block. */
4980 if (TREE_CODE (to) == RESULT_DECL
4981 && TREE_CODE (from) == INDIRECT_REF
4982 && ADDR_SPACE_GENERIC_P
4983 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
4984 && refs_may_alias_p (to, from)
4985 && cfun->returns_struct
4986 && !cfun->returns_pcc_struct)
4988 rtx from_rtx, size;
4990 push_temp_slots ();
4991 size = expr_size (from);
4992 from_rtx = expand_normal (from);
4994 emit_library_call (memmove_libfunc, LCT_NORMAL,
4995 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4996 XEXP (from_rtx, 0), Pmode,
4997 convert_to_mode (TYPE_MODE (sizetype),
4998 size, TYPE_UNSIGNED (sizetype)),
4999 TYPE_MODE (sizetype));
5001 preserve_temp_slots (to_rtx);
5002 pop_temp_slots ();
5003 return;
5006 /* Compute FROM and store the value in the rtx we got. */
5008 push_temp_slots ();
5009 result = store_expr (from, to_rtx, 0, nontemporal);
5010 preserve_temp_slots (result);
5011 pop_temp_slots ();
5012 return;
5015 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5016 succeeded, false otherwise. */
5018 bool
5019 emit_storent_insn (rtx to, rtx from)
5021 struct expand_operand ops[2];
5022 enum machine_mode mode = GET_MODE (to);
5023 enum insn_code code = optab_handler (storent_optab, mode);
5025 if (code == CODE_FOR_nothing)
5026 return false;
5028 create_fixed_operand (&ops[0], to);
5029 create_input_operand (&ops[1], from, mode);
5030 return maybe_expand_insn (code, 2, ops);
5033 /* Generate code for computing expression EXP,
5034 and storing the value into TARGET.
5036 If the mode is BLKmode then we may return TARGET itself.
5037 It turns out that in BLKmode it doesn't cause a problem.
5038 because C has no operators that could combine two different
5039 assignments into the same BLKmode object with different values
5040 with no sequence point. Will other languages need this to
5041 be more thorough?
5043 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5044 stack, and block moves may need to be treated specially.
5046 If NONTEMPORAL is true, try using a nontemporal store instruction. */
5049 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
5051 rtx temp;
5052 rtx alt_rtl = NULL_RTX;
5053 location_t loc = curr_insn_location ();
5055 if (VOID_TYPE_P (TREE_TYPE (exp)))
5057 /* C++ can generate ?: expressions with a throw expression in one
5058 branch and an rvalue in the other. Here, we resolve attempts to
5059 store the throw expression's nonexistent result. */
5060 gcc_assert (!call_param_p);
5061 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5062 return NULL_RTX;
5064 if (TREE_CODE (exp) == COMPOUND_EXPR)
5066 /* Perform first part of compound expression, then assign from second
5067 part. */
5068 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5069 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5070 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5071 nontemporal);
5073 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5075 /* For conditional expression, get safe form of the target. Then
5076 test the condition, doing the appropriate assignment on either
5077 side. This avoids the creation of unnecessary temporaries.
5078 For non-BLKmode, it is more efficient not to do this. */
5080 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
5082 do_pending_stack_adjust ();
5083 NO_DEFER_POP;
5084 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5085 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5086 nontemporal);
5087 emit_jump_insn (gen_jump (lab2));
5088 emit_barrier ();
5089 emit_label (lab1);
5090 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
5091 nontemporal);
5092 emit_label (lab2);
5093 OK_DEFER_POP;
5095 return NULL_RTX;
5097 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5098 /* If this is a scalar in a register that is stored in a wider mode
5099 than the declared mode, compute the result into its declared mode
5100 and then convert to the wider mode. Our value is the computed
5101 expression. */
5103 rtx inner_target = 0;
5105 /* We can do the conversion inside EXP, which will often result
5106 in some optimizations. Do the conversion in two steps: first
5107 change the signedness, if needed, then the extend. But don't
5108 do this if the type of EXP is a subtype of something else
5109 since then the conversion might involve more than just
5110 converting modes. */
5111 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5112 && TREE_TYPE (TREE_TYPE (exp)) == 0
5113 && GET_MODE_PRECISION (GET_MODE (target))
5114 == TYPE_PRECISION (TREE_TYPE (exp)))
5116 if (TYPE_UNSIGNED (TREE_TYPE (exp))
5117 != SUBREG_PROMOTED_UNSIGNED_P (target))
5119 /* Some types, e.g. Fortran's logical*4, won't have a signed
5120 version, so use the mode instead. */
5121 tree ntype
5122 = (signed_or_unsigned_type_for
5123 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
5124 if (ntype == NULL)
5125 ntype = lang_hooks.types.type_for_mode
5126 (TYPE_MODE (TREE_TYPE (exp)),
5127 SUBREG_PROMOTED_UNSIGNED_P (target));
5129 exp = fold_convert_loc (loc, ntype, exp);
5132 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5133 (GET_MODE (SUBREG_REG (target)),
5134 SUBREG_PROMOTED_UNSIGNED_P (target)),
5135 exp);
5137 inner_target = SUBREG_REG (target);
5140 temp = expand_expr (exp, inner_target, VOIDmode,
5141 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5143 /* If TEMP is a VOIDmode constant, use convert_modes to make
5144 sure that we properly convert it. */
5145 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5147 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5148 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
5149 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5150 GET_MODE (target), temp,
5151 SUBREG_PROMOTED_UNSIGNED_P (target));
5154 convert_move (SUBREG_REG (target), temp,
5155 SUBREG_PROMOTED_UNSIGNED_P (target));
5157 return NULL_RTX;
5159 else if ((TREE_CODE (exp) == STRING_CST
5160 || (TREE_CODE (exp) == MEM_REF
5161 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5162 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5163 == STRING_CST
5164 && integer_zerop (TREE_OPERAND (exp, 1))))
5165 && !nontemporal && !call_param_p
5166 && MEM_P (target))
5168 /* Optimize initialization of an array with a STRING_CST. */
5169 HOST_WIDE_INT exp_len, str_copy_len;
5170 rtx dest_mem;
5171 tree str = TREE_CODE (exp) == STRING_CST
5172 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5174 exp_len = int_expr_size (exp);
5175 if (exp_len <= 0)
5176 goto normal_expr;
5178 if (TREE_STRING_LENGTH (str) <= 0)
5179 goto normal_expr;
5181 str_copy_len = strlen (TREE_STRING_POINTER (str));
5182 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5183 goto normal_expr;
5185 str_copy_len = TREE_STRING_LENGTH (str);
5186 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5187 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5189 str_copy_len += STORE_MAX_PIECES - 1;
5190 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5192 str_copy_len = MIN (str_copy_len, exp_len);
5193 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5194 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5195 MEM_ALIGN (target), false))
5196 goto normal_expr;
5198 dest_mem = target;
5200 dest_mem = store_by_pieces (dest_mem,
5201 str_copy_len, builtin_strncpy_read_str,
5202 CONST_CAST (char *,
5203 TREE_STRING_POINTER (str)),
5204 MEM_ALIGN (target), false,
5205 exp_len > str_copy_len ? 1 : 0);
5206 if (exp_len > str_copy_len)
5207 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5208 GEN_INT (exp_len - str_copy_len),
5209 BLOCK_OP_NORMAL);
5210 return NULL_RTX;
5212 else
5214 rtx tmp_target;
5216 normal_expr:
5217 /* If we want to use a nontemporal store, force the value to
5218 register first. */
5219 tmp_target = nontemporal ? NULL_RTX : target;
5220 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5221 (call_param_p
5222 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5223 &alt_rtl);
5226 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5227 the same as that of TARGET, adjust the constant. This is needed, for
5228 example, in case it is a CONST_DOUBLE and we want only a word-sized
5229 value. */
5230 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5231 && TREE_CODE (exp) != ERROR_MARK
5232 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5233 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5234 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5236 /* If value was not generated in the target, store it there.
5237 Convert the value to TARGET's type first if necessary and emit the
5238 pending incrementations that have been queued when expanding EXP.
5239 Note that we cannot emit the whole queue blindly because this will
5240 effectively disable the POST_INC optimization later.
5242 If TEMP and TARGET compare equal according to rtx_equal_p, but
5243 one or both of them are volatile memory refs, we have to distinguish
5244 two cases:
5245 - expand_expr has used TARGET. In this case, we must not generate
5246 another copy. This can be detected by TARGET being equal according
5247 to == .
5248 - expand_expr has not used TARGET - that means that the source just
5249 happens to have the same RTX form. Since temp will have been created
5250 by expand_expr, it will compare unequal according to == .
5251 We must generate a copy in this case, to reach the correct number
5252 of volatile memory references. */
5254 if ((! rtx_equal_p (temp, target)
5255 || (temp != target && (side_effects_p (temp)
5256 || side_effects_p (target))))
5257 && TREE_CODE (exp) != ERROR_MARK
5258 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5259 but TARGET is not valid memory reference, TEMP will differ
5260 from TARGET although it is really the same location. */
5261 && !(alt_rtl
5262 && rtx_equal_p (alt_rtl, target)
5263 && !side_effects_p (alt_rtl)
5264 && !side_effects_p (target))
5265 /* If there's nothing to copy, don't bother. Don't call
5266 expr_size unless necessary, because some front-ends (C++)
5267 expr_size-hook must not be given objects that are not
5268 supposed to be bit-copied or bit-initialized. */
5269 && expr_size (exp) != const0_rtx)
5271 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5273 if (GET_MODE (target) == BLKmode)
5275 /* Handle calls that return BLKmode values in registers. */
5276 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5277 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5278 else
5279 store_bit_field (target,
5280 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5281 0, 0, 0, GET_MODE (temp), temp);
5283 else
5284 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5287 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5289 /* Handle copying a string constant into an array. The string
5290 constant may be shorter than the array. So copy just the string's
5291 actual length, and clear the rest. First get the size of the data
5292 type of the string, which is actually the size of the target. */
5293 rtx size = expr_size (exp);
5295 if (CONST_INT_P (size)
5296 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5297 emit_block_move (target, temp, size,
5298 (call_param_p
5299 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5300 else
5302 enum machine_mode pointer_mode
5303 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5304 enum machine_mode address_mode = get_address_mode (target);
5306 /* Compute the size of the data to copy from the string. */
5307 tree copy_size
5308 = size_binop_loc (loc, MIN_EXPR,
5309 make_tree (sizetype, size),
5310 size_int (TREE_STRING_LENGTH (exp)));
5311 rtx copy_size_rtx
5312 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5313 (call_param_p
5314 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5315 rtx label = 0;
5317 /* Copy that much. */
5318 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5319 TYPE_UNSIGNED (sizetype));
5320 emit_block_move (target, temp, copy_size_rtx,
5321 (call_param_p
5322 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5324 /* Figure out how much is left in TARGET that we have to clear.
5325 Do all calculations in pointer_mode. */
5326 if (CONST_INT_P (copy_size_rtx))
5328 size = plus_constant (address_mode, size,
5329 -INTVAL (copy_size_rtx));
5330 target = adjust_address (target, BLKmode,
5331 INTVAL (copy_size_rtx));
5333 else
5335 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5336 copy_size_rtx, NULL_RTX, 0,
5337 OPTAB_LIB_WIDEN);
5339 if (GET_MODE (copy_size_rtx) != address_mode)
5340 copy_size_rtx = convert_to_mode (address_mode,
5341 copy_size_rtx,
5342 TYPE_UNSIGNED (sizetype));
5344 target = offset_address (target, copy_size_rtx,
5345 highest_pow2_factor (copy_size));
5346 label = gen_label_rtx ();
5347 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5348 GET_MODE (size), 0, label);
5351 if (size != const0_rtx)
5352 clear_storage (target, size, BLOCK_OP_NORMAL);
5354 if (label)
5355 emit_label (label);
5358 /* Handle calls that return values in multiple non-contiguous locations.
5359 The Irix 6 ABI has examples of this. */
5360 else if (GET_CODE (target) == PARALLEL)
5362 if (GET_CODE (temp) == PARALLEL)
5363 emit_group_move (target, temp);
5364 else
5365 emit_group_load (target, temp, TREE_TYPE (exp),
5366 int_size_in_bytes (TREE_TYPE (exp)));
5368 else if (GET_CODE (temp) == PARALLEL)
5369 emit_group_store (target, temp, TREE_TYPE (exp),
5370 int_size_in_bytes (TREE_TYPE (exp)));
5371 else if (GET_MODE (temp) == BLKmode)
5372 emit_block_move (target, temp, expr_size (exp),
5373 (call_param_p
5374 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5375 /* If we emit a nontemporal store, there is nothing else to do. */
5376 else if (nontemporal && emit_storent_insn (target, temp))
5378 else
5380 temp = force_operand (temp, target);
5381 if (temp != target)
5382 emit_move_insn (target, temp);
5386 return NULL_RTX;
5389 /* Return true if field F of structure TYPE is a flexible array. */
5391 static bool
5392 flexible_array_member_p (const_tree f, const_tree type)
5394 const_tree tf;
5396 tf = TREE_TYPE (f);
5397 return (DECL_CHAIN (f) == NULL
5398 && TREE_CODE (tf) == ARRAY_TYPE
5399 && TYPE_DOMAIN (tf)
5400 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5401 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5402 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5403 && int_size_in_bytes (type) >= 0);
5406 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5407 must have in order for it to completely initialize a value of type TYPE.
5408 Return -1 if the number isn't known.
5410 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5412 static HOST_WIDE_INT
5413 count_type_elements (const_tree type, bool for_ctor_p)
5415 switch (TREE_CODE (type))
5417 case ARRAY_TYPE:
5419 tree nelts;
5421 nelts = array_type_nelts (type);
5422 if (nelts && host_integerp (nelts, 1))
5424 unsigned HOST_WIDE_INT n;
5426 n = tree_low_cst (nelts, 1) + 1;
5427 if (n == 0 || for_ctor_p)
5428 return n;
5429 else
5430 return n * count_type_elements (TREE_TYPE (type), false);
5432 return for_ctor_p ? -1 : 1;
5435 case RECORD_TYPE:
5437 unsigned HOST_WIDE_INT n;
5438 tree f;
5440 n = 0;
5441 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5442 if (TREE_CODE (f) == FIELD_DECL)
5444 if (!for_ctor_p)
5445 n += count_type_elements (TREE_TYPE (f), false);
5446 else if (!flexible_array_member_p (f, type))
5447 /* Don't count flexible arrays, which are not supposed
5448 to be initialized. */
5449 n += 1;
5452 return n;
5455 case UNION_TYPE:
5456 case QUAL_UNION_TYPE:
5458 tree f;
5459 HOST_WIDE_INT n, m;
5461 gcc_assert (!for_ctor_p);
5462 /* Estimate the number of scalars in each field and pick the
5463 maximum. Other estimates would do instead; the idea is simply
5464 to make sure that the estimate is not sensitive to the ordering
5465 of the fields. */
5466 n = 1;
5467 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5468 if (TREE_CODE (f) == FIELD_DECL)
5470 m = count_type_elements (TREE_TYPE (f), false);
5471 /* If the field doesn't span the whole union, add an extra
5472 scalar for the rest. */
5473 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5474 TYPE_SIZE (type)) != 1)
5475 m++;
5476 if (n < m)
5477 n = m;
5479 return n;
5482 case COMPLEX_TYPE:
5483 return 2;
5485 case VECTOR_TYPE:
5486 return TYPE_VECTOR_SUBPARTS (type);
5488 case INTEGER_TYPE:
5489 case REAL_TYPE:
5490 case FIXED_POINT_TYPE:
5491 case ENUMERAL_TYPE:
5492 case BOOLEAN_TYPE:
5493 case POINTER_TYPE:
5494 case OFFSET_TYPE:
5495 case REFERENCE_TYPE:
5496 case NULLPTR_TYPE:
5497 return 1;
5499 case ERROR_MARK:
5500 return 0;
5502 case VOID_TYPE:
5503 case METHOD_TYPE:
5504 case FUNCTION_TYPE:
5505 case LANG_TYPE:
5506 default:
5507 gcc_unreachable ();
5511 /* Helper for categorize_ctor_elements. Identical interface. */
5513 static bool
5514 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5515 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5517 unsigned HOST_WIDE_INT idx;
5518 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5519 tree value, purpose, elt_type;
5521 /* Whether CTOR is a valid constant initializer, in accordance with what
5522 initializer_constant_valid_p does. If inferred from the constructor
5523 elements, true until proven otherwise. */
5524 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5525 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5527 nz_elts = 0;
5528 init_elts = 0;
5529 num_fields = 0;
5530 elt_type = NULL_TREE;
5532 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5534 HOST_WIDE_INT mult = 1;
5536 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5538 tree lo_index = TREE_OPERAND (purpose, 0);
5539 tree hi_index = TREE_OPERAND (purpose, 1);
5541 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
5542 mult = (tree_low_cst (hi_index, 1)
5543 - tree_low_cst (lo_index, 1) + 1);
5545 num_fields += mult;
5546 elt_type = TREE_TYPE (value);
5548 switch (TREE_CODE (value))
5550 case CONSTRUCTOR:
5552 HOST_WIDE_INT nz = 0, ic = 0;
5554 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5555 p_complete);
5557 nz_elts += mult * nz;
5558 init_elts += mult * ic;
5560 if (const_from_elts_p && const_p)
5561 const_p = const_elt_p;
5563 break;
5565 case INTEGER_CST:
5566 case REAL_CST:
5567 case FIXED_CST:
5568 if (!initializer_zerop (value))
5569 nz_elts += mult;
5570 init_elts += mult;
5571 break;
5573 case STRING_CST:
5574 nz_elts += mult * TREE_STRING_LENGTH (value);
5575 init_elts += mult * TREE_STRING_LENGTH (value);
5576 break;
5578 case COMPLEX_CST:
5579 if (!initializer_zerop (TREE_REALPART (value)))
5580 nz_elts += mult;
5581 if (!initializer_zerop (TREE_IMAGPART (value)))
5582 nz_elts += mult;
5583 init_elts += mult;
5584 break;
5586 case VECTOR_CST:
5588 unsigned i;
5589 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5591 tree v = VECTOR_CST_ELT (value, i);
5592 if (!initializer_zerop (v))
5593 nz_elts += mult;
5594 init_elts += mult;
5597 break;
5599 default:
5601 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5602 nz_elts += mult * tc;
5603 init_elts += mult * tc;
5605 if (const_from_elts_p && const_p)
5606 const_p = initializer_constant_valid_p (value, elt_type)
5607 != NULL_TREE;
5609 break;
5613 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5614 num_fields, elt_type))
5615 *p_complete = false;
5617 *p_nz_elts += nz_elts;
5618 *p_init_elts += init_elts;
5620 return const_p;
5623 /* Examine CTOR to discover:
5624 * how many scalar fields are set to nonzero values,
5625 and place it in *P_NZ_ELTS;
5626 * how many scalar fields in total are in CTOR,
5627 and place it in *P_ELT_COUNT.
5628 * whether the constructor is complete -- in the sense that every
5629 meaningful byte is explicitly given a value --
5630 and place it in *P_COMPLETE.
5632 Return whether or not CTOR is a valid static constant initializer, the same
5633 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5635 bool
5636 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5637 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5639 *p_nz_elts = 0;
5640 *p_init_elts = 0;
5641 *p_complete = true;
5643 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5646 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5647 of which had type LAST_TYPE. Each element was itself a complete
5648 initializer, in the sense that every meaningful byte was explicitly
5649 given a value. Return true if the same is true for the constructor
5650 as a whole. */
5652 bool
5653 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5654 const_tree last_type)
5656 if (TREE_CODE (type) == UNION_TYPE
5657 || TREE_CODE (type) == QUAL_UNION_TYPE)
5659 if (num_elts == 0)
5660 return false;
5662 gcc_assert (num_elts == 1 && last_type);
5664 /* ??? We could look at each element of the union, and find the
5665 largest element. Which would avoid comparing the size of the
5666 initialized element against any tail padding in the union.
5667 Doesn't seem worth the effort... */
5668 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5671 return count_type_elements (type, true) == num_elts;
5674 /* Return 1 if EXP contains mostly (3/4) zeros. */
5676 static int
5677 mostly_zeros_p (const_tree exp)
5679 if (TREE_CODE (exp) == CONSTRUCTOR)
5681 HOST_WIDE_INT nz_elts, init_elts;
5682 bool complete_p;
5684 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5685 return !complete_p || nz_elts < init_elts / 4;
5688 return initializer_zerop (exp);
5691 /* Return 1 if EXP contains all zeros. */
5693 static int
5694 all_zeros_p (const_tree exp)
5696 if (TREE_CODE (exp) == CONSTRUCTOR)
5698 HOST_WIDE_INT nz_elts, init_elts;
5699 bool complete_p;
5701 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5702 return nz_elts == 0;
5705 return initializer_zerop (exp);
5708 /* Helper function for store_constructor.
5709 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5710 CLEARED is as for store_constructor.
5711 ALIAS_SET is the alias set to use for any stores.
5713 This provides a recursive shortcut back to store_constructor when it isn't
5714 necessary to go through store_field. This is so that we can pass through
5715 the cleared field to let store_constructor know that we may not have to
5716 clear a substructure if the outer structure has already been cleared. */
5718 static void
5719 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5720 HOST_WIDE_INT bitpos, enum machine_mode mode,
5721 tree exp, int cleared, alias_set_type alias_set)
5723 if (TREE_CODE (exp) == CONSTRUCTOR
5724 /* We can only call store_constructor recursively if the size and
5725 bit position are on a byte boundary. */
5726 && bitpos % BITS_PER_UNIT == 0
5727 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5728 /* If we have a nonzero bitpos for a register target, then we just
5729 let store_field do the bitfield handling. This is unlikely to
5730 generate unnecessary clear instructions anyways. */
5731 && (bitpos == 0 || MEM_P (target)))
5733 if (MEM_P (target))
5734 target
5735 = adjust_address (target,
5736 GET_MODE (target) == BLKmode
5737 || 0 != (bitpos
5738 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5739 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5742 /* Update the alias set, if required. */
5743 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5744 && MEM_ALIAS_SET (target) != 0)
5746 target = copy_rtx (target);
5747 set_mem_alias_set (target, alias_set);
5750 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5752 else
5753 store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false);
5756 /* Store the value of constructor EXP into the rtx TARGET.
5757 TARGET is either a REG or a MEM; we know it cannot conflict, since
5758 safe_from_p has been called.
5759 CLEARED is true if TARGET is known to have been zero'd.
5760 SIZE is the number of bytes of TARGET we are allowed to modify: this
5761 may not be the same as the size of EXP if we are assigning to a field
5762 which has been packed to exclude padding bits. */
5764 static void
5765 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5767 tree type = TREE_TYPE (exp);
5768 #ifdef WORD_REGISTER_OPERATIONS
5769 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5770 #endif
5772 switch (TREE_CODE (type))
5774 case RECORD_TYPE:
5775 case UNION_TYPE:
5776 case QUAL_UNION_TYPE:
5778 unsigned HOST_WIDE_INT idx;
5779 tree field, value;
5781 /* If size is zero or the target is already cleared, do nothing. */
5782 if (size == 0 || cleared)
5783 cleared = 1;
5784 /* We either clear the aggregate or indicate the value is dead. */
5785 else if ((TREE_CODE (type) == UNION_TYPE
5786 || TREE_CODE (type) == QUAL_UNION_TYPE)
5787 && ! CONSTRUCTOR_ELTS (exp))
5788 /* If the constructor is empty, clear the union. */
5790 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5791 cleared = 1;
5794 /* If we are building a static constructor into a register,
5795 set the initial value as zero so we can fold the value into
5796 a constant. But if more than one register is involved,
5797 this probably loses. */
5798 else if (REG_P (target) && TREE_STATIC (exp)
5799 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5801 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5802 cleared = 1;
5805 /* If the constructor has fewer fields than the structure or
5806 if we are initializing the structure to mostly zeros, clear
5807 the whole structure first. Don't do this if TARGET is a
5808 register whose mode size isn't equal to SIZE since
5809 clear_storage can't handle this case. */
5810 else if (size > 0
5811 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
5812 != fields_length (type))
5813 || mostly_zeros_p (exp))
5814 && (!REG_P (target)
5815 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5816 == size)))
5818 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5819 cleared = 1;
5822 if (REG_P (target) && !cleared)
5823 emit_clobber (target);
5825 /* Store each element of the constructor into the
5826 corresponding field of TARGET. */
5827 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5829 enum machine_mode mode;
5830 HOST_WIDE_INT bitsize;
5831 HOST_WIDE_INT bitpos = 0;
5832 tree offset;
5833 rtx to_rtx = target;
5835 /* Just ignore missing fields. We cleared the whole
5836 structure, above, if any fields are missing. */
5837 if (field == 0)
5838 continue;
5840 if (cleared && initializer_zerop (value))
5841 continue;
5843 if (host_integerp (DECL_SIZE (field), 1))
5844 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5845 else
5846 bitsize = -1;
5848 mode = DECL_MODE (field);
5849 if (DECL_BIT_FIELD (field))
5850 mode = VOIDmode;
5852 offset = DECL_FIELD_OFFSET (field);
5853 if (host_integerp (offset, 0)
5854 && host_integerp (bit_position (field), 0))
5856 bitpos = int_bit_position (field);
5857 offset = 0;
5859 else
5860 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5862 if (offset)
5864 enum machine_mode address_mode;
5865 rtx offset_rtx;
5867 offset
5868 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5869 make_tree (TREE_TYPE (exp),
5870 target));
5872 offset_rtx = expand_normal (offset);
5873 gcc_assert (MEM_P (to_rtx));
5875 address_mode = get_address_mode (to_rtx);
5876 if (GET_MODE (offset_rtx) != address_mode)
5877 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5879 to_rtx = offset_address (to_rtx, offset_rtx,
5880 highest_pow2_factor (offset));
5883 #ifdef WORD_REGISTER_OPERATIONS
5884 /* If this initializes a field that is smaller than a
5885 word, at the start of a word, try to widen it to a full
5886 word. This special case allows us to output C++ member
5887 function initializations in a form that the optimizers
5888 can understand. */
5889 if (REG_P (target)
5890 && bitsize < BITS_PER_WORD
5891 && bitpos % BITS_PER_WORD == 0
5892 && GET_MODE_CLASS (mode) == MODE_INT
5893 && TREE_CODE (value) == INTEGER_CST
5894 && exp_size >= 0
5895 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5897 tree type = TREE_TYPE (value);
5899 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5901 type = lang_hooks.types.type_for_mode
5902 (word_mode, TYPE_UNSIGNED (type));
5903 value = fold_convert (type, value);
5906 if (BYTES_BIG_ENDIAN)
5907 value
5908 = fold_build2 (LSHIFT_EXPR, type, value,
5909 build_int_cst (type,
5910 BITS_PER_WORD - bitsize));
5911 bitsize = BITS_PER_WORD;
5912 mode = word_mode;
5914 #endif
5916 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5917 && DECL_NONADDRESSABLE_P (field))
5919 to_rtx = copy_rtx (to_rtx);
5920 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5923 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5924 value, cleared,
5925 get_alias_set (TREE_TYPE (field)));
5927 break;
5929 case ARRAY_TYPE:
5931 tree value, index;
5932 unsigned HOST_WIDE_INT i;
5933 int need_to_clear;
5934 tree domain;
5935 tree elttype = TREE_TYPE (type);
5936 int const_bounds_p;
5937 HOST_WIDE_INT minelt = 0;
5938 HOST_WIDE_INT maxelt = 0;
5940 domain = TYPE_DOMAIN (type);
5941 const_bounds_p = (TYPE_MIN_VALUE (domain)
5942 && TYPE_MAX_VALUE (domain)
5943 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5944 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5946 /* If we have constant bounds for the range of the type, get them. */
5947 if (const_bounds_p)
5949 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5950 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5953 /* If the constructor has fewer elements than the array, clear
5954 the whole array first. Similarly if this is static
5955 constructor of a non-BLKmode object. */
5956 if (cleared)
5957 need_to_clear = 0;
5958 else if (REG_P (target) && TREE_STATIC (exp))
5959 need_to_clear = 1;
5960 else
5962 unsigned HOST_WIDE_INT idx;
5963 tree index, value;
5964 HOST_WIDE_INT count = 0, zero_count = 0;
5965 need_to_clear = ! const_bounds_p;
5967 /* This loop is a more accurate version of the loop in
5968 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5969 is also needed to check for missing elements. */
5970 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5972 HOST_WIDE_INT this_node_count;
5974 if (need_to_clear)
5975 break;
5977 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5979 tree lo_index = TREE_OPERAND (index, 0);
5980 tree hi_index = TREE_OPERAND (index, 1);
5982 if (! host_integerp (lo_index, 1)
5983 || ! host_integerp (hi_index, 1))
5985 need_to_clear = 1;
5986 break;
5989 this_node_count = (tree_low_cst (hi_index, 1)
5990 - tree_low_cst (lo_index, 1) + 1);
5992 else
5993 this_node_count = 1;
5995 count += this_node_count;
5996 if (mostly_zeros_p (value))
5997 zero_count += this_node_count;
6000 /* Clear the entire array first if there are any missing
6001 elements, or if the incidence of zero elements is >=
6002 75%. */
6003 if (! need_to_clear
6004 && (count < maxelt - minelt + 1
6005 || 4 * zero_count >= 3 * count))
6006 need_to_clear = 1;
6009 if (need_to_clear && size > 0)
6011 if (REG_P (target))
6012 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6013 else
6014 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6015 cleared = 1;
6018 if (!cleared && REG_P (target))
6019 /* Inform later passes that the old value is dead. */
6020 emit_clobber (target);
6022 /* Store each element of the constructor into the
6023 corresponding element of TARGET, determined by counting the
6024 elements. */
6025 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6027 enum machine_mode mode;
6028 HOST_WIDE_INT bitsize;
6029 HOST_WIDE_INT bitpos;
6030 rtx xtarget = target;
6032 if (cleared && initializer_zerop (value))
6033 continue;
6035 mode = TYPE_MODE (elttype);
6036 if (mode == BLKmode)
6037 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
6038 ? tree_low_cst (TYPE_SIZE (elttype), 1)
6039 : -1);
6040 else
6041 bitsize = GET_MODE_BITSIZE (mode);
6043 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6045 tree lo_index = TREE_OPERAND (index, 0);
6046 tree hi_index = TREE_OPERAND (index, 1);
6047 rtx index_r, pos_rtx;
6048 HOST_WIDE_INT lo, hi, count;
6049 tree position;
6051 /* If the range is constant and "small", unroll the loop. */
6052 if (const_bounds_p
6053 && host_integerp (lo_index, 0)
6054 && host_integerp (hi_index, 0)
6055 && (lo = tree_low_cst (lo_index, 0),
6056 hi = tree_low_cst (hi_index, 0),
6057 count = hi - lo + 1,
6058 (!MEM_P (target)
6059 || count <= 2
6060 || (host_integerp (TYPE_SIZE (elttype), 1)
6061 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
6062 <= 40 * 8)))))
6064 lo -= minelt; hi -= minelt;
6065 for (; lo <= hi; lo++)
6067 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
6069 if (MEM_P (target)
6070 && !MEM_KEEP_ALIAS_SET_P (target)
6071 && TREE_CODE (type) == ARRAY_TYPE
6072 && TYPE_NONALIASED_COMPONENT (type))
6074 target = copy_rtx (target);
6075 MEM_KEEP_ALIAS_SET_P (target) = 1;
6078 store_constructor_field
6079 (target, bitsize, bitpos, mode, value, cleared,
6080 get_alias_set (elttype));
6083 else
6085 rtx loop_start = gen_label_rtx ();
6086 rtx loop_end = gen_label_rtx ();
6087 tree exit_cond;
6089 expand_normal (hi_index);
6091 index = build_decl (EXPR_LOCATION (exp),
6092 VAR_DECL, NULL_TREE, domain);
6093 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6094 SET_DECL_RTL (index, index_r);
6095 store_expr (lo_index, index_r, 0, false);
6097 /* Build the head of the loop. */
6098 do_pending_stack_adjust ();
6099 emit_label (loop_start);
6101 /* Assign value to element index. */
6102 position =
6103 fold_convert (ssizetype,
6104 fold_build2 (MINUS_EXPR,
6105 TREE_TYPE (index),
6106 index,
6107 TYPE_MIN_VALUE (domain)));
6109 position =
6110 size_binop (MULT_EXPR, position,
6111 fold_convert (ssizetype,
6112 TYPE_SIZE_UNIT (elttype)));
6114 pos_rtx = expand_normal (position);
6115 xtarget = offset_address (target, pos_rtx,
6116 highest_pow2_factor (position));
6117 xtarget = adjust_address (xtarget, mode, 0);
6118 if (TREE_CODE (value) == CONSTRUCTOR)
6119 store_constructor (value, xtarget, cleared,
6120 bitsize / BITS_PER_UNIT);
6121 else
6122 store_expr (value, xtarget, 0, false);
6124 /* Generate a conditional jump to exit the loop. */
6125 exit_cond = build2 (LT_EXPR, integer_type_node,
6126 index, hi_index);
6127 jumpif (exit_cond, loop_end, -1);
6129 /* Update the loop counter, and jump to the head of
6130 the loop. */
6131 expand_assignment (index,
6132 build2 (PLUS_EXPR, TREE_TYPE (index),
6133 index, integer_one_node),
6134 false);
6136 emit_jump (loop_start);
6138 /* Build the end of the loop. */
6139 emit_label (loop_end);
6142 else if ((index != 0 && ! host_integerp (index, 0))
6143 || ! host_integerp (TYPE_SIZE (elttype), 1))
6145 tree position;
6147 if (index == 0)
6148 index = ssize_int (1);
6150 if (minelt)
6151 index = fold_convert (ssizetype,
6152 fold_build2 (MINUS_EXPR,
6153 TREE_TYPE (index),
6154 index,
6155 TYPE_MIN_VALUE (domain)));
6157 position =
6158 size_binop (MULT_EXPR, index,
6159 fold_convert (ssizetype,
6160 TYPE_SIZE_UNIT (elttype)));
6161 xtarget = offset_address (target,
6162 expand_normal (position),
6163 highest_pow2_factor (position));
6164 xtarget = adjust_address (xtarget, mode, 0);
6165 store_expr (value, xtarget, 0, false);
6167 else
6169 if (index != 0)
6170 bitpos = ((tree_low_cst (index, 0) - minelt)
6171 * tree_low_cst (TYPE_SIZE (elttype), 1));
6172 else
6173 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
6175 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6176 && TREE_CODE (type) == ARRAY_TYPE
6177 && TYPE_NONALIASED_COMPONENT (type))
6179 target = copy_rtx (target);
6180 MEM_KEEP_ALIAS_SET_P (target) = 1;
6182 store_constructor_field (target, bitsize, bitpos, mode, value,
6183 cleared, get_alias_set (elttype));
6186 break;
6189 case VECTOR_TYPE:
6191 unsigned HOST_WIDE_INT idx;
6192 constructor_elt *ce;
6193 int i;
6194 int need_to_clear;
6195 int icode = CODE_FOR_nothing;
6196 tree elttype = TREE_TYPE (type);
6197 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
6198 enum machine_mode eltmode = TYPE_MODE (elttype);
6199 HOST_WIDE_INT bitsize;
6200 HOST_WIDE_INT bitpos;
6201 rtvec vector = NULL;
6202 unsigned n_elts;
6203 alias_set_type alias;
6205 gcc_assert (eltmode != BLKmode);
6207 n_elts = TYPE_VECTOR_SUBPARTS (type);
6208 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6210 enum machine_mode mode = GET_MODE (target);
6212 icode = (int) optab_handler (vec_init_optab, mode);
6213 if (icode != CODE_FOR_nothing)
6215 unsigned int i;
6217 vector = rtvec_alloc (n_elts);
6218 for (i = 0; i < n_elts; i++)
6219 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6223 /* If the constructor has fewer elements than the vector,
6224 clear the whole array first. Similarly if this is static
6225 constructor of a non-BLKmode object. */
6226 if (cleared)
6227 need_to_clear = 0;
6228 else if (REG_P (target) && TREE_STATIC (exp))
6229 need_to_clear = 1;
6230 else
6232 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6233 tree value;
6235 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6237 int n_elts_here = tree_low_cst
6238 (int_const_binop (TRUNC_DIV_EXPR,
6239 TYPE_SIZE (TREE_TYPE (value)),
6240 TYPE_SIZE (elttype)), 1);
6242 count += n_elts_here;
6243 if (mostly_zeros_p (value))
6244 zero_count += n_elts_here;
6247 /* Clear the entire vector first if there are any missing elements,
6248 or if the incidence of zero elements is >= 75%. */
6249 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6252 if (need_to_clear && size > 0 && !vector)
6254 if (REG_P (target))
6255 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6256 else
6257 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6258 cleared = 1;
6261 /* Inform later passes that the old value is dead. */
6262 if (!cleared && !vector && REG_P (target))
6263 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6265 if (MEM_P (target))
6266 alias = MEM_ALIAS_SET (target);
6267 else
6268 alias = get_alias_set (elttype);
6270 /* Store each element of the constructor into the corresponding
6271 element of TARGET, determined by counting the elements. */
6272 for (idx = 0, i = 0;
6273 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6274 idx++, i += bitsize / elt_size)
6276 HOST_WIDE_INT eltpos;
6277 tree value = ce->value;
6279 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
6280 if (cleared && initializer_zerop (value))
6281 continue;
6283 if (ce->index)
6284 eltpos = tree_low_cst (ce->index, 1);
6285 else
6286 eltpos = i;
6288 if (vector)
6290 /* Vector CONSTRUCTORs should only be built from smaller
6291 vectors in the case of BLKmode vectors. */
6292 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6293 RTVEC_ELT (vector, eltpos)
6294 = expand_normal (value);
6296 else
6298 enum machine_mode value_mode =
6299 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6300 ? TYPE_MODE (TREE_TYPE (value))
6301 : eltmode;
6302 bitpos = eltpos * elt_size;
6303 store_constructor_field (target, bitsize, bitpos, value_mode,
6304 value, cleared, alias);
6308 if (vector)
6309 emit_insn (GEN_FCN (icode)
6310 (target,
6311 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6312 break;
6315 default:
6316 gcc_unreachable ();
6320 /* Store the value of EXP (an expression tree)
6321 into a subfield of TARGET which has mode MODE and occupies
6322 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6323 If MODE is VOIDmode, it means that we are storing into a bit-field.
6325 BITREGION_START is bitpos of the first bitfield in this region.
6326 BITREGION_END is the bitpos of the ending bitfield in this region.
6327 These two fields are 0, if the C++ memory model does not apply,
6328 or we are not interested in keeping track of bitfield regions.
6330 Always return const0_rtx unless we have something particular to
6331 return.
6333 ALIAS_SET is the alias set for the destination. This value will
6334 (in general) be different from that for TARGET, since TARGET is a
6335 reference to the containing structure.
6337 If NONTEMPORAL is true, try generating a nontemporal store. */
6339 static rtx
6340 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6341 unsigned HOST_WIDE_INT bitregion_start,
6342 unsigned HOST_WIDE_INT bitregion_end,
6343 enum machine_mode mode, tree exp,
6344 alias_set_type alias_set, bool nontemporal)
6346 if (TREE_CODE (exp) == ERROR_MARK)
6347 return const0_rtx;
6349 /* If we have nothing to store, do nothing unless the expression has
6350 side-effects. */
6351 if (bitsize == 0)
6352 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6354 if (GET_CODE (target) == CONCAT)
6356 /* We're storing into a struct containing a single __complex. */
6358 gcc_assert (!bitpos);
6359 return store_expr (exp, target, 0, nontemporal);
6362 /* If the structure is in a register or if the component
6363 is a bit field, we cannot use addressing to access it.
6364 Use bit-field techniques or SUBREG to store in it. */
6366 if (mode == VOIDmode
6367 || (mode != BLKmode && ! direct_store[(int) mode]
6368 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6369 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6370 || REG_P (target)
6371 || GET_CODE (target) == SUBREG
6372 /* If the field isn't aligned enough to store as an ordinary memref,
6373 store it as a bit field. */
6374 || (mode != BLKmode
6375 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6376 || bitpos % GET_MODE_ALIGNMENT (mode))
6377 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6378 || (bitpos % BITS_PER_UNIT != 0)))
6379 || (bitsize >= 0 && mode != BLKmode
6380 && GET_MODE_BITSIZE (mode) > bitsize)
6381 /* If the RHS and field are a constant size and the size of the
6382 RHS isn't the same size as the bitfield, we must use bitfield
6383 operations. */
6384 || (bitsize >= 0
6385 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6386 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6387 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6388 decl we must use bitfield operations. */
6389 || (bitsize >= 0
6390 && TREE_CODE (exp) == MEM_REF
6391 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6392 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6393 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6394 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6396 rtx temp;
6397 gimple nop_def;
6399 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6400 implies a mask operation. If the precision is the same size as
6401 the field we're storing into, that mask is redundant. This is
6402 particularly common with bit field assignments generated by the
6403 C front end. */
6404 nop_def = get_def_for_expr (exp, NOP_EXPR);
6405 if (nop_def)
6407 tree type = TREE_TYPE (exp);
6408 if (INTEGRAL_TYPE_P (type)
6409 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6410 && bitsize == TYPE_PRECISION (type))
6412 tree op = gimple_assign_rhs1 (nop_def);
6413 type = TREE_TYPE (op);
6414 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6415 exp = op;
6419 temp = expand_normal (exp);
6421 /* If BITSIZE is narrower than the size of the type of EXP
6422 we will be narrowing TEMP. Normally, what's wanted are the
6423 low-order bits. However, if EXP's type is a record and this is
6424 big-endian machine, we want the upper BITSIZE bits. */
6425 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6426 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6427 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6428 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6429 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6430 NULL_RTX, 1);
6432 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6433 if (mode != VOIDmode && mode != BLKmode
6434 && mode != TYPE_MODE (TREE_TYPE (exp)))
6435 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6437 /* If the modes of TEMP and TARGET are both BLKmode, both
6438 must be in memory and BITPOS must be aligned on a byte
6439 boundary. If so, we simply do a block copy. Likewise
6440 for a BLKmode-like TARGET. */
6441 if (GET_MODE (temp) == BLKmode
6442 && (GET_MODE (target) == BLKmode
6443 || (MEM_P (target)
6444 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6445 && (bitpos % BITS_PER_UNIT) == 0
6446 && (bitsize % BITS_PER_UNIT) == 0)))
6448 gcc_assert (MEM_P (target) && MEM_P (temp)
6449 && (bitpos % BITS_PER_UNIT) == 0);
6451 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6452 emit_block_move (target, temp,
6453 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6454 / BITS_PER_UNIT),
6455 BLOCK_OP_NORMAL);
6457 return const0_rtx;
6460 /* Handle calls that return values in multiple non-contiguous locations.
6461 The Irix 6 ABI has examples of this. */
6462 if (GET_CODE (temp) == PARALLEL)
6464 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6465 rtx temp_target;
6466 if (mode == BLKmode)
6467 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6468 temp_target = gen_reg_rtx (mode);
6469 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6470 temp = temp_target;
6472 else if (mode == BLKmode)
6474 /* Handle calls that return BLKmode values in registers. */
6475 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6477 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6478 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6479 temp = temp_target;
6481 else
6483 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6484 rtx temp_target;
6485 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6486 temp_target = gen_reg_rtx (mode);
6487 temp_target
6488 = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
6489 temp_target, mode, mode);
6490 temp = temp_target;
6494 /* Store the value in the bitfield. */
6495 store_bit_field (target, bitsize, bitpos,
6496 bitregion_start, bitregion_end,
6497 mode, temp);
6499 return const0_rtx;
6501 else
6503 /* Now build a reference to just the desired component. */
6504 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6506 if (to_rtx == target)
6507 to_rtx = copy_rtx (to_rtx);
6509 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6510 set_mem_alias_set (to_rtx, alias_set);
6512 return store_expr (exp, to_rtx, 0, nontemporal);
6516 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6517 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6518 codes and find the ultimate containing object, which we return.
6520 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6521 bit position, and *PUNSIGNEDP to the signedness of the field.
6522 If the position of the field is variable, we store a tree
6523 giving the variable offset (in units) in *POFFSET.
6524 This offset is in addition to the bit position.
6525 If the position is not variable, we store 0 in *POFFSET.
6527 If any of the extraction expressions is volatile,
6528 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6530 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6531 Otherwise, it is a mode that can be used to access the field.
6533 If the field describes a variable-sized object, *PMODE is set to
6534 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6535 this case, but the address of the object can be found.
6537 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6538 look through nodes that serve as markers of a greater alignment than
6539 the one that can be deduced from the expression. These nodes make it
6540 possible for front-ends to prevent temporaries from being created by
6541 the middle-end on alignment considerations. For that purpose, the
6542 normal operating mode at high-level is to always pass FALSE so that
6543 the ultimate containing object is really returned; moreover, the
6544 associated predicate handled_component_p will always return TRUE
6545 on these nodes, thus indicating that they are essentially handled
6546 by get_inner_reference. TRUE should only be passed when the caller
6547 is scanning the expression in order to build another representation
6548 and specifically knows how to handle these nodes; as such, this is
6549 the normal operating mode in the RTL expanders. */
6551 tree
6552 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6553 HOST_WIDE_INT *pbitpos, tree *poffset,
6554 enum machine_mode *pmode, int *punsignedp,
6555 int *pvolatilep, bool keep_aligning)
6557 tree size_tree = 0;
6558 enum machine_mode mode = VOIDmode;
6559 bool blkmode_bitfield = false;
6560 tree offset = size_zero_node;
6561 double_int bit_offset = double_int_zero;
6563 /* First get the mode, signedness, and size. We do this from just the
6564 outermost expression. */
6565 *pbitsize = -1;
6566 if (TREE_CODE (exp) == COMPONENT_REF)
6568 tree field = TREE_OPERAND (exp, 1);
6569 size_tree = DECL_SIZE (field);
6570 if (flag_strict_volatile_bitfields > 0
6571 && TREE_THIS_VOLATILE (exp)
6572 && DECL_BIT_FIELD_TYPE (field)
6573 && DECL_MODE (field) != BLKmode)
6574 /* Volatile bitfields should be accessed in the mode of the
6575 field's type, not the mode computed based on the bit
6576 size. */
6577 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6578 else if (!DECL_BIT_FIELD (field))
6579 mode = DECL_MODE (field);
6580 else if (DECL_MODE (field) == BLKmode)
6581 blkmode_bitfield = true;
6583 *punsignedp = DECL_UNSIGNED (field);
6585 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6587 size_tree = TREE_OPERAND (exp, 1);
6588 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6589 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6591 /* For vector types, with the correct size of access, use the mode of
6592 inner type. */
6593 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6594 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6595 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6596 mode = TYPE_MODE (TREE_TYPE (exp));
6598 else
6600 mode = TYPE_MODE (TREE_TYPE (exp));
6601 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6603 if (mode == BLKmode)
6604 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6605 else
6606 *pbitsize = GET_MODE_BITSIZE (mode);
6609 if (size_tree != 0)
6611 if (! host_integerp (size_tree, 1))
6612 mode = BLKmode, *pbitsize = -1;
6613 else
6614 *pbitsize = tree_low_cst (size_tree, 1);
6617 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6618 and find the ultimate containing object. */
6619 while (1)
6621 switch (TREE_CODE (exp))
6623 case BIT_FIELD_REF:
6624 bit_offset += tree_to_double_int (TREE_OPERAND (exp, 2));
6625 break;
6627 case COMPONENT_REF:
6629 tree field = TREE_OPERAND (exp, 1);
6630 tree this_offset = component_ref_field_offset (exp);
6632 /* If this field hasn't been filled in yet, don't go past it.
6633 This should only happen when folding expressions made during
6634 type construction. */
6635 if (this_offset == 0)
6636 break;
6638 offset = size_binop (PLUS_EXPR, offset, this_offset);
6639 bit_offset += tree_to_double_int (DECL_FIELD_BIT_OFFSET (field));
6641 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6643 break;
6645 case ARRAY_REF:
6646 case ARRAY_RANGE_REF:
6648 tree index = TREE_OPERAND (exp, 1);
6649 tree low_bound = array_ref_low_bound (exp);
6650 tree unit_size = array_ref_element_size (exp);
6652 /* We assume all arrays have sizes that are a multiple of a byte.
6653 First subtract the lower bound, if any, in the type of the
6654 index, then convert to sizetype and multiply by the size of
6655 the array element. */
6656 if (! integer_zerop (low_bound))
6657 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6658 index, low_bound);
6660 offset = size_binop (PLUS_EXPR, offset,
6661 size_binop (MULT_EXPR,
6662 fold_convert (sizetype, index),
6663 unit_size));
6665 break;
6667 case REALPART_EXPR:
6668 break;
6670 case IMAGPART_EXPR:
6671 bit_offset += double_int::from_uhwi (*pbitsize);
6672 break;
6674 case VIEW_CONVERT_EXPR:
6675 if (keep_aligning && STRICT_ALIGNMENT
6676 && (TYPE_ALIGN (TREE_TYPE (exp))
6677 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6678 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6679 < BIGGEST_ALIGNMENT)
6680 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6681 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6682 goto done;
6683 break;
6685 case MEM_REF:
6686 /* Hand back the decl for MEM[&decl, off]. */
6687 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6689 tree off = TREE_OPERAND (exp, 1);
6690 if (!integer_zerop (off))
6692 double_int boff, coff = mem_ref_offset (exp);
6693 boff = coff.lshift (BITS_PER_UNIT == 8
6694 ? 3 : exact_log2 (BITS_PER_UNIT));
6695 bit_offset += boff;
6697 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6699 goto done;
6701 default:
6702 goto done;
6705 /* If any reference in the chain is volatile, the effect is volatile. */
6706 if (TREE_THIS_VOLATILE (exp))
6707 *pvolatilep = 1;
6709 exp = TREE_OPERAND (exp, 0);
6711 done:
6713 /* If OFFSET is constant, see if we can return the whole thing as a
6714 constant bit position. Make sure to handle overflow during
6715 this conversion. */
6716 if (TREE_CODE (offset) == INTEGER_CST)
6718 double_int tem = tree_to_double_int (offset);
6719 tem = tem.sext (TYPE_PRECISION (sizetype));
6720 tem = tem.lshift (BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT));
6721 tem += bit_offset;
6722 if (tem.fits_shwi ())
6724 *pbitpos = tem.to_shwi ();
6725 *poffset = offset = NULL_TREE;
6729 /* Otherwise, split it up. */
6730 if (offset)
6732 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6733 if (bit_offset.is_negative ())
6735 double_int mask
6736 = double_int::mask (BITS_PER_UNIT == 8
6737 ? 3 : exact_log2 (BITS_PER_UNIT));
6738 double_int tem = bit_offset.and_not (mask);
6739 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6740 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6741 bit_offset -= tem;
6742 tem = tem.arshift (BITS_PER_UNIT == 8
6743 ? 3 : exact_log2 (BITS_PER_UNIT),
6744 HOST_BITS_PER_DOUBLE_INT);
6745 offset = size_binop (PLUS_EXPR, offset,
6746 double_int_to_tree (sizetype, tem));
6749 *pbitpos = bit_offset.to_shwi ();
6750 *poffset = offset;
6753 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6754 if (mode == VOIDmode
6755 && blkmode_bitfield
6756 && (*pbitpos % BITS_PER_UNIT) == 0
6757 && (*pbitsize % BITS_PER_UNIT) == 0)
6758 *pmode = BLKmode;
6759 else
6760 *pmode = mode;
6762 return exp;
6765 /* Return a tree of sizetype representing the size, in bytes, of the element
6766 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6768 tree
6769 array_ref_element_size (tree exp)
6771 tree aligned_size = TREE_OPERAND (exp, 3);
6772 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6773 location_t loc = EXPR_LOCATION (exp);
6775 /* If a size was specified in the ARRAY_REF, it's the size measured
6776 in alignment units of the element type. So multiply by that value. */
6777 if (aligned_size)
6779 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6780 sizetype from another type of the same width and signedness. */
6781 if (TREE_TYPE (aligned_size) != sizetype)
6782 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6783 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6784 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6787 /* Otherwise, take the size from that of the element type. Substitute
6788 any PLACEHOLDER_EXPR that we have. */
6789 else
6790 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6793 /* Return a tree representing the lower bound of the array mentioned in
6794 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6796 tree
6797 array_ref_low_bound (tree exp)
6799 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6801 /* If a lower bound is specified in EXP, use it. */
6802 if (TREE_OPERAND (exp, 2))
6803 return TREE_OPERAND (exp, 2);
6805 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6806 substituting for a PLACEHOLDER_EXPR as needed. */
6807 if (domain_type && TYPE_MIN_VALUE (domain_type))
6808 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6810 /* Otherwise, return a zero of the appropriate type. */
6811 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6814 /* Returns true if REF is an array reference to an array at the end of
6815 a structure. If this is the case, the array may be allocated larger
6816 than its upper bound implies. */
6818 bool
6819 array_at_struct_end_p (tree ref)
6821 if (TREE_CODE (ref) != ARRAY_REF
6822 && TREE_CODE (ref) != ARRAY_RANGE_REF)
6823 return false;
6825 while (handled_component_p (ref))
6827 /* If the reference chain contains a component reference to a
6828 non-union type and there follows another field the reference
6829 is not at the end of a structure. */
6830 if (TREE_CODE (ref) == COMPONENT_REF
6831 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
6833 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
6834 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
6835 nextf = DECL_CHAIN (nextf);
6836 if (nextf)
6837 return false;
6840 ref = TREE_OPERAND (ref, 0);
6843 /* If the reference is based on a declared entity, the size of the array
6844 is constrained by its given domain. */
6845 if (DECL_P (ref))
6846 return false;
6848 return true;
6851 /* Return a tree representing the upper bound of the array mentioned in
6852 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6854 tree
6855 array_ref_up_bound (tree exp)
6857 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6859 /* If there is a domain type and it has an upper bound, use it, substituting
6860 for a PLACEHOLDER_EXPR as needed. */
6861 if (domain_type && TYPE_MAX_VALUE (domain_type))
6862 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6864 /* Otherwise fail. */
6865 return NULL_TREE;
6868 /* Return a tree representing the offset, in bytes, of the field referenced
6869 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6871 tree
6872 component_ref_field_offset (tree exp)
6874 tree aligned_offset = TREE_OPERAND (exp, 2);
6875 tree field = TREE_OPERAND (exp, 1);
6876 location_t loc = EXPR_LOCATION (exp);
6878 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6879 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6880 value. */
6881 if (aligned_offset)
6883 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6884 sizetype from another type of the same width and signedness. */
6885 if (TREE_TYPE (aligned_offset) != sizetype)
6886 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
6887 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
6888 size_int (DECL_OFFSET_ALIGN (field)
6889 / BITS_PER_UNIT));
6892 /* Otherwise, take the offset from that of the field. Substitute
6893 any PLACEHOLDER_EXPR that we have. */
6894 else
6895 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6898 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6900 static unsigned HOST_WIDE_INT
6901 target_align (const_tree target)
6903 /* We might have a chain of nested references with intermediate misaligning
6904 bitfields components, so need to recurse to find out. */
6906 unsigned HOST_WIDE_INT this_align, outer_align;
6908 switch (TREE_CODE (target))
6910 case BIT_FIELD_REF:
6911 return 1;
6913 case COMPONENT_REF:
6914 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
6915 outer_align = target_align (TREE_OPERAND (target, 0));
6916 return MIN (this_align, outer_align);
6918 case ARRAY_REF:
6919 case ARRAY_RANGE_REF:
6920 this_align = TYPE_ALIGN (TREE_TYPE (target));
6921 outer_align = target_align (TREE_OPERAND (target, 0));
6922 return MIN (this_align, outer_align);
6924 CASE_CONVERT:
6925 case NON_LVALUE_EXPR:
6926 case VIEW_CONVERT_EXPR:
6927 this_align = TYPE_ALIGN (TREE_TYPE (target));
6928 outer_align = target_align (TREE_OPERAND (target, 0));
6929 return MAX (this_align, outer_align);
6931 default:
6932 return TYPE_ALIGN (TREE_TYPE (target));
6937 /* Given an rtx VALUE that may contain additions and multiplications, return
6938 an equivalent value that just refers to a register, memory, or constant.
6939 This is done by generating instructions to perform the arithmetic and
6940 returning a pseudo-register containing the value.
6942 The returned value may be a REG, SUBREG, MEM or constant. */
6945 force_operand (rtx value, rtx target)
6947 rtx op1, op2;
6948 /* Use subtarget as the target for operand 0 of a binary operation. */
6949 rtx subtarget = get_subtarget (target);
6950 enum rtx_code code = GET_CODE (value);
6952 /* Check for subreg applied to an expression produced by loop optimizer. */
6953 if (code == SUBREG
6954 && !REG_P (SUBREG_REG (value))
6955 && !MEM_P (SUBREG_REG (value)))
6957 value
6958 = simplify_gen_subreg (GET_MODE (value),
6959 force_reg (GET_MODE (SUBREG_REG (value)),
6960 force_operand (SUBREG_REG (value),
6961 NULL_RTX)),
6962 GET_MODE (SUBREG_REG (value)),
6963 SUBREG_BYTE (value));
6964 code = GET_CODE (value);
6967 /* Check for a PIC address load. */
6968 if ((code == PLUS || code == MINUS)
6969 && XEXP (value, 0) == pic_offset_table_rtx
6970 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6971 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6972 || GET_CODE (XEXP (value, 1)) == CONST))
6974 if (!subtarget)
6975 subtarget = gen_reg_rtx (GET_MODE (value));
6976 emit_move_insn (subtarget, value);
6977 return subtarget;
6980 if (ARITHMETIC_P (value))
6982 op2 = XEXP (value, 1);
6983 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6984 subtarget = 0;
6985 if (code == MINUS && CONST_INT_P (op2))
6987 code = PLUS;
6988 op2 = negate_rtx (GET_MODE (value), op2);
6991 /* Check for an addition with OP2 a constant integer and our first
6992 operand a PLUS of a virtual register and something else. In that
6993 case, we want to emit the sum of the virtual register and the
6994 constant first and then add the other value. This allows virtual
6995 register instantiation to simply modify the constant rather than
6996 creating another one around this addition. */
6997 if (code == PLUS && CONST_INT_P (op2)
6998 && GET_CODE (XEXP (value, 0)) == PLUS
6999 && REG_P (XEXP (XEXP (value, 0), 0))
7000 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7001 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7003 rtx temp = expand_simple_binop (GET_MODE (value), code,
7004 XEXP (XEXP (value, 0), 0), op2,
7005 subtarget, 0, OPTAB_LIB_WIDEN);
7006 return expand_simple_binop (GET_MODE (value), code, temp,
7007 force_operand (XEXP (XEXP (value,
7008 0), 1), 0),
7009 target, 0, OPTAB_LIB_WIDEN);
7012 op1 = force_operand (XEXP (value, 0), subtarget);
7013 op2 = force_operand (op2, NULL_RTX);
7014 switch (code)
7016 case MULT:
7017 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7018 case DIV:
7019 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7020 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7021 target, 1, OPTAB_LIB_WIDEN);
7022 else
7023 return expand_divmod (0,
7024 FLOAT_MODE_P (GET_MODE (value))
7025 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7026 GET_MODE (value), op1, op2, target, 0);
7027 case MOD:
7028 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7029 target, 0);
7030 case UDIV:
7031 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7032 target, 1);
7033 case UMOD:
7034 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7035 target, 1);
7036 case ASHIFTRT:
7037 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7038 target, 0, OPTAB_LIB_WIDEN);
7039 default:
7040 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7041 target, 1, OPTAB_LIB_WIDEN);
7044 if (UNARY_P (value))
7046 if (!target)
7047 target = gen_reg_rtx (GET_MODE (value));
7048 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7049 switch (code)
7051 case ZERO_EXTEND:
7052 case SIGN_EXTEND:
7053 case TRUNCATE:
7054 case FLOAT_EXTEND:
7055 case FLOAT_TRUNCATE:
7056 convert_move (target, op1, code == ZERO_EXTEND);
7057 return target;
7059 case FIX:
7060 case UNSIGNED_FIX:
7061 expand_fix (target, op1, code == UNSIGNED_FIX);
7062 return target;
7064 case FLOAT:
7065 case UNSIGNED_FLOAT:
7066 expand_float (target, op1, code == UNSIGNED_FLOAT);
7067 return target;
7069 default:
7070 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7074 #ifdef INSN_SCHEDULING
7075 /* On machines that have insn scheduling, we want all memory reference to be
7076 explicit, so we need to deal with such paradoxical SUBREGs. */
7077 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7078 value
7079 = simplify_gen_subreg (GET_MODE (value),
7080 force_reg (GET_MODE (SUBREG_REG (value)),
7081 force_operand (SUBREG_REG (value),
7082 NULL_RTX)),
7083 GET_MODE (SUBREG_REG (value)),
7084 SUBREG_BYTE (value));
7085 #endif
7087 return value;
7090 /* Subroutine of expand_expr: return nonzero iff there is no way that
7091 EXP can reference X, which is being modified. TOP_P is nonzero if this
7092 call is going to be used to determine whether we need a temporary
7093 for EXP, as opposed to a recursive call to this function.
7095 It is always safe for this routine to return zero since it merely
7096 searches for optimization opportunities. */
7099 safe_from_p (const_rtx x, tree exp, int top_p)
7101 rtx exp_rtl = 0;
7102 int i, nops;
7104 if (x == 0
7105 /* If EXP has varying size, we MUST use a target since we currently
7106 have no way of allocating temporaries of variable size
7107 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7108 So we assume here that something at a higher level has prevented a
7109 clash. This is somewhat bogus, but the best we can do. Only
7110 do this when X is BLKmode and when we are at the top level. */
7111 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7112 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7113 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7114 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7115 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7116 != INTEGER_CST)
7117 && GET_MODE (x) == BLKmode)
7118 /* If X is in the outgoing argument area, it is always safe. */
7119 || (MEM_P (x)
7120 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7121 || (GET_CODE (XEXP (x, 0)) == PLUS
7122 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7123 return 1;
7125 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7126 find the underlying pseudo. */
7127 if (GET_CODE (x) == SUBREG)
7129 x = SUBREG_REG (x);
7130 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7131 return 0;
7134 /* Now look at our tree code and possibly recurse. */
7135 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7137 case tcc_declaration:
7138 exp_rtl = DECL_RTL_IF_SET (exp);
7139 break;
7141 case tcc_constant:
7142 return 1;
7144 case tcc_exceptional:
7145 if (TREE_CODE (exp) == TREE_LIST)
7147 while (1)
7149 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7150 return 0;
7151 exp = TREE_CHAIN (exp);
7152 if (!exp)
7153 return 1;
7154 if (TREE_CODE (exp) != TREE_LIST)
7155 return safe_from_p (x, exp, 0);
7158 else if (TREE_CODE (exp) == CONSTRUCTOR)
7160 constructor_elt *ce;
7161 unsigned HOST_WIDE_INT idx;
7163 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7164 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7165 || !safe_from_p (x, ce->value, 0))
7166 return 0;
7167 return 1;
7169 else if (TREE_CODE (exp) == ERROR_MARK)
7170 return 1; /* An already-visited SAVE_EXPR? */
7171 else
7172 return 0;
7174 case tcc_statement:
7175 /* The only case we look at here is the DECL_INITIAL inside a
7176 DECL_EXPR. */
7177 return (TREE_CODE (exp) != DECL_EXPR
7178 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7179 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7180 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7182 case tcc_binary:
7183 case tcc_comparison:
7184 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7185 return 0;
7186 /* Fall through. */
7188 case tcc_unary:
7189 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7191 case tcc_expression:
7192 case tcc_reference:
7193 case tcc_vl_exp:
7194 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7195 the expression. If it is set, we conflict iff we are that rtx or
7196 both are in memory. Otherwise, we check all operands of the
7197 expression recursively. */
7199 switch (TREE_CODE (exp))
7201 case ADDR_EXPR:
7202 /* If the operand is static or we are static, we can't conflict.
7203 Likewise if we don't conflict with the operand at all. */
7204 if (staticp (TREE_OPERAND (exp, 0))
7205 || TREE_STATIC (exp)
7206 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7207 return 1;
7209 /* Otherwise, the only way this can conflict is if we are taking
7210 the address of a DECL a that address if part of X, which is
7211 very rare. */
7212 exp = TREE_OPERAND (exp, 0);
7213 if (DECL_P (exp))
7215 if (!DECL_RTL_SET_P (exp)
7216 || !MEM_P (DECL_RTL (exp)))
7217 return 0;
7218 else
7219 exp_rtl = XEXP (DECL_RTL (exp), 0);
7221 break;
7223 case MEM_REF:
7224 if (MEM_P (x)
7225 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7226 get_alias_set (exp)))
7227 return 0;
7228 break;
7230 case CALL_EXPR:
7231 /* Assume that the call will clobber all hard registers and
7232 all of memory. */
7233 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7234 || MEM_P (x))
7235 return 0;
7236 break;
7238 case WITH_CLEANUP_EXPR:
7239 case CLEANUP_POINT_EXPR:
7240 /* Lowered by gimplify.c. */
7241 gcc_unreachable ();
7243 case SAVE_EXPR:
7244 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7246 default:
7247 break;
7250 /* If we have an rtx, we do not need to scan our operands. */
7251 if (exp_rtl)
7252 break;
7254 nops = TREE_OPERAND_LENGTH (exp);
7255 for (i = 0; i < nops; i++)
7256 if (TREE_OPERAND (exp, i) != 0
7257 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7258 return 0;
7260 break;
7262 case tcc_type:
7263 /* Should never get a type here. */
7264 gcc_unreachable ();
7267 /* If we have an rtl, find any enclosed object. Then see if we conflict
7268 with it. */
7269 if (exp_rtl)
7271 if (GET_CODE (exp_rtl) == SUBREG)
7273 exp_rtl = SUBREG_REG (exp_rtl);
7274 if (REG_P (exp_rtl)
7275 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7276 return 0;
7279 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7280 are memory and they conflict. */
7281 return ! (rtx_equal_p (x, exp_rtl)
7282 || (MEM_P (x) && MEM_P (exp_rtl)
7283 && true_dependence (exp_rtl, VOIDmode, x)));
7286 /* If we reach here, it is safe. */
7287 return 1;
7291 /* Return the highest power of two that EXP is known to be a multiple of.
7292 This is used in updating alignment of MEMs in array references. */
7294 unsigned HOST_WIDE_INT
7295 highest_pow2_factor (const_tree exp)
7297 unsigned HOST_WIDE_INT ret;
7298 int trailing_zeros = tree_ctz (exp);
7299 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7300 return BIGGEST_ALIGNMENT;
7301 ret = (unsigned HOST_WIDE_INT) 1 << trailing_zeros;
7302 if (ret > BIGGEST_ALIGNMENT)
7303 return BIGGEST_ALIGNMENT;
7304 return ret;
7307 /* Similar, except that the alignment requirements of TARGET are
7308 taken into account. Assume it is at least as aligned as its
7309 type, unless it is a COMPONENT_REF in which case the layout of
7310 the structure gives the alignment. */
7312 static unsigned HOST_WIDE_INT
7313 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7315 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7316 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7318 return MAX (factor, talign);
7321 #ifdef HAVE_conditional_move
7322 /* Convert the tree comparison code TCODE to the rtl one where the
7323 signedness is UNSIGNEDP. */
7325 static enum rtx_code
7326 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7328 enum rtx_code code;
7329 switch (tcode)
7331 case EQ_EXPR:
7332 code = EQ;
7333 break;
7334 case NE_EXPR:
7335 code = NE;
7336 break;
7337 case LT_EXPR:
7338 code = unsignedp ? LTU : LT;
7339 break;
7340 case LE_EXPR:
7341 code = unsignedp ? LEU : LE;
7342 break;
7343 case GT_EXPR:
7344 code = unsignedp ? GTU : GT;
7345 break;
7346 case GE_EXPR:
7347 code = unsignedp ? GEU : GE;
7348 break;
7349 case UNORDERED_EXPR:
7350 code = UNORDERED;
7351 break;
7352 case ORDERED_EXPR:
7353 code = ORDERED;
7354 break;
7355 case UNLT_EXPR:
7356 code = UNLT;
7357 break;
7358 case UNLE_EXPR:
7359 code = UNLE;
7360 break;
7361 case UNGT_EXPR:
7362 code = UNGT;
7363 break;
7364 case UNGE_EXPR:
7365 code = UNGE;
7366 break;
7367 case UNEQ_EXPR:
7368 code = UNEQ;
7369 break;
7370 case LTGT_EXPR:
7371 code = LTGT;
7372 break;
7374 default:
7375 gcc_unreachable ();
7377 return code;
7379 #endif
7381 /* Subroutine of expand_expr. Expand the two operands of a binary
7382 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7383 The value may be stored in TARGET if TARGET is nonzero. The
7384 MODIFIER argument is as documented by expand_expr. */
7386 static void
7387 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7388 enum expand_modifier modifier)
7390 if (! safe_from_p (target, exp1, 1))
7391 target = 0;
7392 if (operand_equal_p (exp0, exp1, 0))
7394 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7395 *op1 = copy_rtx (*op0);
7397 else
7399 /* If we need to preserve evaluation order, copy exp0 into its own
7400 temporary variable so that it can't be clobbered by exp1. */
7401 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7402 exp0 = save_expr (exp0);
7403 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7404 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7409 /* Return a MEM that contains constant EXP. DEFER is as for
7410 output_constant_def and MODIFIER is as for expand_expr. */
7412 static rtx
7413 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7415 rtx mem;
7417 mem = output_constant_def (exp, defer);
7418 if (modifier != EXPAND_INITIALIZER)
7419 mem = use_anchored_address (mem);
7420 return mem;
7423 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7424 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7426 static rtx
7427 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
7428 enum expand_modifier modifier, addr_space_t as)
7430 rtx result, subtarget;
7431 tree inner, offset;
7432 HOST_WIDE_INT bitsize, bitpos;
7433 int volatilep, unsignedp;
7434 enum machine_mode mode1;
7436 /* If we are taking the address of a constant and are at the top level,
7437 we have to use output_constant_def since we can't call force_const_mem
7438 at top level. */
7439 /* ??? This should be considered a front-end bug. We should not be
7440 generating ADDR_EXPR of something that isn't an LVALUE. The only
7441 exception here is STRING_CST. */
7442 if (CONSTANT_CLASS_P (exp))
7444 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7445 if (modifier < EXPAND_SUM)
7446 result = force_operand (result, target);
7447 return result;
7450 /* Everything must be something allowed by is_gimple_addressable. */
7451 switch (TREE_CODE (exp))
7453 case INDIRECT_REF:
7454 /* This case will happen via recursion for &a->b. */
7455 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7457 case MEM_REF:
7459 tree tem = TREE_OPERAND (exp, 0);
7460 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7461 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7462 return expand_expr (tem, target, tmode, modifier);
7465 case CONST_DECL:
7466 /* Expand the initializer like constants above. */
7467 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7468 0, modifier), 0);
7469 if (modifier < EXPAND_SUM)
7470 result = force_operand (result, target);
7471 return result;
7473 case REALPART_EXPR:
7474 /* The real part of the complex number is always first, therefore
7475 the address is the same as the address of the parent object. */
7476 offset = 0;
7477 bitpos = 0;
7478 inner = TREE_OPERAND (exp, 0);
7479 break;
7481 case IMAGPART_EXPR:
7482 /* The imaginary part of the complex number is always second.
7483 The expression is therefore always offset by the size of the
7484 scalar type. */
7485 offset = 0;
7486 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7487 inner = TREE_OPERAND (exp, 0);
7488 break;
7490 case COMPOUND_LITERAL_EXPR:
7491 /* Allow COMPOUND_LITERAL_EXPR in initializers, if e.g.
7492 rtl_for_decl_init is called on DECL_INITIAL with
7493 COMPOUNT_LITERAL_EXPRs in it, they aren't gimplified. */
7494 if (modifier == EXPAND_INITIALIZER
7495 && COMPOUND_LITERAL_EXPR_DECL (exp))
7496 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7497 target, tmode, modifier, as);
7498 /* FALLTHRU */
7499 default:
7500 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7501 expand_expr, as that can have various side effects; LABEL_DECLs for
7502 example, may not have their DECL_RTL set yet. Expand the rtl of
7503 CONSTRUCTORs too, which should yield a memory reference for the
7504 constructor's contents. Assume language specific tree nodes can
7505 be expanded in some interesting way. */
7506 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7507 if (DECL_P (exp)
7508 || TREE_CODE (exp) == CONSTRUCTOR
7509 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7511 result = expand_expr (exp, target, tmode,
7512 modifier == EXPAND_INITIALIZER
7513 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7515 /* If the DECL isn't in memory, then the DECL wasn't properly
7516 marked TREE_ADDRESSABLE, which will be either a front-end
7517 or a tree optimizer bug. */
7519 if (TREE_ADDRESSABLE (exp)
7520 && ! MEM_P (result)
7521 && ! targetm.calls.allocate_stack_slots_for_args ())
7523 error ("local frame unavailable (naked function?)");
7524 return result;
7526 else
7527 gcc_assert (MEM_P (result));
7528 result = XEXP (result, 0);
7530 /* ??? Is this needed anymore? */
7531 if (DECL_P (exp))
7532 TREE_USED (exp) = 1;
7534 if (modifier != EXPAND_INITIALIZER
7535 && modifier != EXPAND_CONST_ADDRESS
7536 && modifier != EXPAND_SUM)
7537 result = force_operand (result, target);
7538 return result;
7541 /* Pass FALSE as the last argument to get_inner_reference although
7542 we are expanding to RTL. The rationale is that we know how to
7543 handle "aligning nodes" here: we can just bypass them because
7544 they won't change the final object whose address will be returned
7545 (they actually exist only for that purpose). */
7546 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7547 &mode1, &unsignedp, &volatilep, false);
7548 break;
7551 /* We must have made progress. */
7552 gcc_assert (inner != exp);
7554 subtarget = offset || bitpos ? NULL_RTX : target;
7555 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7556 inner alignment, force the inner to be sufficiently aligned. */
7557 if (CONSTANT_CLASS_P (inner)
7558 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7560 inner = copy_node (inner);
7561 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7562 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7563 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7565 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7567 if (offset)
7569 rtx tmp;
7571 if (modifier != EXPAND_NORMAL)
7572 result = force_operand (result, NULL);
7573 tmp = expand_expr (offset, NULL_RTX, tmode,
7574 modifier == EXPAND_INITIALIZER
7575 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7577 result = convert_memory_address_addr_space (tmode, result, as);
7578 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7580 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7581 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7582 else
7584 subtarget = bitpos ? NULL_RTX : target;
7585 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7586 1, OPTAB_LIB_WIDEN);
7590 if (bitpos)
7592 /* Someone beforehand should have rejected taking the address
7593 of such an object. */
7594 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7596 result = convert_memory_address_addr_space (tmode, result, as);
7597 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7598 if (modifier < EXPAND_SUM)
7599 result = force_operand (result, target);
7602 return result;
7605 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7606 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7608 static rtx
7609 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
7610 enum expand_modifier modifier)
7612 addr_space_t as = ADDR_SPACE_GENERIC;
7613 enum machine_mode address_mode = Pmode;
7614 enum machine_mode pointer_mode = ptr_mode;
7615 enum machine_mode rmode;
7616 rtx result;
7618 /* Target mode of VOIDmode says "whatever's natural". */
7619 if (tmode == VOIDmode)
7620 tmode = TYPE_MODE (TREE_TYPE (exp));
7622 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7624 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7625 address_mode = targetm.addr_space.address_mode (as);
7626 pointer_mode = targetm.addr_space.pointer_mode (as);
7629 /* We can get called with some Weird Things if the user does silliness
7630 like "(short) &a". In that case, convert_memory_address won't do
7631 the right thing, so ignore the given target mode. */
7632 if (tmode != address_mode && tmode != pointer_mode)
7633 tmode = address_mode;
7635 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7636 tmode, modifier, as);
7638 /* Despite expand_expr claims concerning ignoring TMODE when not
7639 strictly convenient, stuff breaks if we don't honor it. Note
7640 that combined with the above, we only do this for pointer modes. */
7641 rmode = GET_MODE (result);
7642 if (rmode == VOIDmode)
7643 rmode = tmode;
7644 if (rmode != tmode)
7645 result = convert_memory_address_addr_space (tmode, result, as);
7647 return result;
7650 /* Generate code for computing CONSTRUCTOR EXP.
7651 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7652 is TRUE, instead of creating a temporary variable in memory
7653 NULL is returned and the caller needs to handle it differently. */
7655 static rtx
7656 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7657 bool avoid_temp_mem)
7659 tree type = TREE_TYPE (exp);
7660 enum machine_mode mode = TYPE_MODE (type);
7662 /* Try to avoid creating a temporary at all. This is possible
7663 if all of the initializer is zero.
7664 FIXME: try to handle all [0..255] initializers we can handle
7665 with memset. */
7666 if (TREE_STATIC (exp)
7667 && !TREE_ADDRESSABLE (exp)
7668 && target != 0 && mode == BLKmode
7669 && all_zeros_p (exp))
7671 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7672 return target;
7675 /* All elts simple constants => refer to a constant in memory. But
7676 if this is a non-BLKmode mode, let it store a field at a time
7677 since that should make a CONST_INT or CONST_DOUBLE when we
7678 fold. Likewise, if we have a target we can use, it is best to
7679 store directly into the target unless the type is large enough
7680 that memcpy will be used. If we are making an initializer and
7681 all operands are constant, put it in memory as well.
7683 FIXME: Avoid trying to fill vector constructors piece-meal.
7684 Output them with output_constant_def below unless we're sure
7685 they're zeros. This should go away when vector initializers
7686 are treated like VECTOR_CST instead of arrays. */
7687 if ((TREE_STATIC (exp)
7688 && ((mode == BLKmode
7689 && ! (target != 0 && safe_from_p (target, exp, 1)))
7690 || TREE_ADDRESSABLE (exp)
7691 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7692 && (! MOVE_BY_PIECES_P
7693 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7694 TYPE_ALIGN (type)))
7695 && ! mostly_zeros_p (exp))))
7696 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7697 && TREE_CONSTANT (exp)))
7699 rtx constructor;
7701 if (avoid_temp_mem)
7702 return NULL_RTX;
7704 constructor = expand_expr_constant (exp, 1, modifier);
7706 if (modifier != EXPAND_CONST_ADDRESS
7707 && modifier != EXPAND_INITIALIZER
7708 && modifier != EXPAND_SUM)
7709 constructor = validize_mem (constructor);
7711 return constructor;
7714 /* Handle calls that pass values in multiple non-contiguous
7715 locations. The Irix 6 ABI has examples of this. */
7716 if (target == 0 || ! safe_from_p (target, exp, 1)
7717 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7719 if (avoid_temp_mem)
7720 return NULL_RTX;
7722 target
7723 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
7724 | (TREE_READONLY (exp)
7725 * TYPE_QUAL_CONST))),
7726 TREE_ADDRESSABLE (exp), 1);
7729 store_constructor (exp, target, 0, int_expr_size (exp));
7730 return target;
7734 /* expand_expr: generate code for computing expression EXP.
7735 An rtx for the computed value is returned. The value is never null.
7736 In the case of a void EXP, const0_rtx is returned.
7738 The value may be stored in TARGET if TARGET is nonzero.
7739 TARGET is just a suggestion; callers must assume that
7740 the rtx returned may not be the same as TARGET.
7742 If TARGET is CONST0_RTX, it means that the value will be ignored.
7744 If TMODE is not VOIDmode, it suggests generating the
7745 result in mode TMODE. But this is done only when convenient.
7746 Otherwise, TMODE is ignored and the value generated in its natural mode.
7747 TMODE is just a suggestion; callers must assume that
7748 the rtx returned may not have mode TMODE.
7750 Note that TARGET may have neither TMODE nor MODE. In that case, it
7751 probably will not be used.
7753 If MODIFIER is EXPAND_SUM then when EXP is an addition
7754 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7755 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7756 products as above, or REG or MEM, or constant.
7757 Ordinarily in such cases we would output mul or add instructions
7758 and then return a pseudo reg containing the sum.
7760 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7761 it also marks a label as absolutely required (it can't be dead).
7762 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7763 This is used for outputting expressions used in initializers.
7765 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7766 with a constant address even if that address is not normally legitimate.
7767 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7769 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7770 a call parameter. Such targets require special care as we haven't yet
7771 marked TARGET so that it's safe from being trashed by libcalls. We
7772 don't want to use TARGET for anything but the final result;
7773 Intermediate values must go elsewhere. Additionally, calls to
7774 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7776 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7777 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7778 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7779 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7780 recursively. */
7783 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7784 enum expand_modifier modifier, rtx *alt_rtl)
7786 rtx ret;
7788 /* Handle ERROR_MARK before anybody tries to access its type. */
7789 if (TREE_CODE (exp) == ERROR_MARK
7790 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7792 ret = CONST0_RTX (tmode);
7793 return ret ? ret : const0_rtx;
7796 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7797 return ret;
7800 /* Try to expand the conditional expression which is represented by
7801 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
7802 return the rtl reg which repsents the result. Otherwise return
7803 NULL_RTL. */
7805 static rtx
7806 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
7807 tree treeop1 ATTRIBUTE_UNUSED,
7808 tree treeop2 ATTRIBUTE_UNUSED)
7810 #ifdef HAVE_conditional_move
7811 rtx insn;
7812 rtx op00, op01, op1, op2;
7813 enum rtx_code comparison_code;
7814 enum machine_mode comparison_mode;
7815 gimple srcstmt;
7816 rtx temp;
7817 tree type = TREE_TYPE (treeop1);
7818 int unsignedp = TYPE_UNSIGNED (type);
7819 enum machine_mode mode = TYPE_MODE (type);
7820 enum machine_mode orig_mode = mode;
7822 /* If we cannot do a conditional move on the mode, try doing it
7823 with the promoted mode. */
7824 if (!can_conditionally_move_p (mode))
7826 mode = promote_mode (type, mode, &unsignedp);
7827 if (!can_conditionally_move_p (mode))
7828 return NULL_RTX;
7829 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
7831 else
7832 temp = assign_temp (type, 0, 1);
7834 start_sequence ();
7835 expand_operands (treeop1, treeop2,
7836 temp, &op1, &op2, EXPAND_NORMAL);
7838 if (TREE_CODE (treeop0) == SSA_NAME
7839 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
7841 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
7842 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
7843 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
7844 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
7845 comparison_mode = TYPE_MODE (type);
7846 unsignedp = TYPE_UNSIGNED (type);
7847 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7849 else if (TREE_CODE_CLASS (TREE_CODE (treeop0)) == tcc_comparison)
7851 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
7852 enum tree_code cmpcode = TREE_CODE (treeop0);
7853 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
7854 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
7855 unsignedp = TYPE_UNSIGNED (type);
7856 comparison_mode = TYPE_MODE (type);
7857 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7859 else
7861 op00 = expand_normal (treeop0);
7862 op01 = const0_rtx;
7863 comparison_code = NE;
7864 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
7867 if (GET_MODE (op1) != mode)
7868 op1 = gen_lowpart (mode, op1);
7870 if (GET_MODE (op2) != mode)
7871 op2 = gen_lowpart (mode, op2);
7873 /* Try to emit the conditional move. */
7874 insn = emit_conditional_move (temp, comparison_code,
7875 op00, op01, comparison_mode,
7876 op1, op2, mode,
7877 unsignedp);
7879 /* If we could do the conditional move, emit the sequence,
7880 and return. */
7881 if (insn)
7883 rtx seq = get_insns ();
7884 end_sequence ();
7885 emit_insn (seq);
7886 return convert_modes (orig_mode, mode, temp, 0);
7889 /* Otherwise discard the sequence and fall back to code with
7890 branches. */
7891 end_sequence ();
7892 #endif
7893 return NULL_RTX;
7897 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
7898 enum expand_modifier modifier)
7900 rtx op0, op1, op2, temp;
7901 tree type;
7902 int unsignedp;
7903 enum machine_mode mode;
7904 enum tree_code code = ops->code;
7905 optab this_optab;
7906 rtx subtarget, original_target;
7907 int ignore;
7908 bool reduce_bit_field;
7909 location_t loc = ops->location;
7910 tree treeop0, treeop1, treeop2;
7911 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7912 ? reduce_to_bit_field_precision ((expr), \
7913 target, \
7914 type) \
7915 : (expr))
7917 type = ops->type;
7918 mode = TYPE_MODE (type);
7919 unsignedp = TYPE_UNSIGNED (type);
7921 treeop0 = ops->op0;
7922 treeop1 = ops->op1;
7923 treeop2 = ops->op2;
7925 /* We should be called only on simple (binary or unary) expressions,
7926 exactly those that are valid in gimple expressions that aren't
7927 GIMPLE_SINGLE_RHS (or invalid). */
7928 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
7929 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
7930 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
7932 ignore = (target == const0_rtx
7933 || ((CONVERT_EXPR_CODE_P (code)
7934 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
7935 && TREE_CODE (type) == VOID_TYPE));
7937 /* We should be called only if we need the result. */
7938 gcc_assert (!ignore);
7940 /* An operation in what may be a bit-field type needs the
7941 result to be reduced to the precision of the bit-field type,
7942 which is narrower than that of the type's mode. */
7943 reduce_bit_field = (INTEGRAL_TYPE_P (type)
7944 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
7946 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
7947 target = 0;
7949 /* Use subtarget as the target for operand 0 of a binary operation. */
7950 subtarget = get_subtarget (target);
7951 original_target = target;
7953 switch (code)
7955 case NON_LVALUE_EXPR:
7956 case PAREN_EXPR:
7957 CASE_CONVERT:
7958 if (treeop0 == error_mark_node)
7959 return const0_rtx;
7961 if (TREE_CODE (type) == UNION_TYPE)
7963 tree valtype = TREE_TYPE (treeop0);
7965 /* If both input and output are BLKmode, this conversion isn't doing
7966 anything except possibly changing memory attribute. */
7967 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7969 rtx result = expand_expr (treeop0, target, tmode,
7970 modifier);
7972 result = copy_rtx (result);
7973 set_mem_attributes (result, type, 0);
7974 return result;
7977 if (target == 0)
7979 if (TYPE_MODE (type) != BLKmode)
7980 target = gen_reg_rtx (TYPE_MODE (type));
7981 else
7982 target = assign_temp (type, 1, 1);
7985 if (MEM_P (target))
7986 /* Store data into beginning of memory target. */
7987 store_expr (treeop0,
7988 adjust_address (target, TYPE_MODE (valtype), 0),
7989 modifier == EXPAND_STACK_PARM,
7990 false);
7992 else
7994 gcc_assert (REG_P (target));
7996 /* Store this field into a union of the proper type. */
7997 store_field (target,
7998 MIN ((int_size_in_bytes (TREE_TYPE
7999 (treeop0))
8000 * BITS_PER_UNIT),
8001 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8002 0, 0, 0, TYPE_MODE (valtype), treeop0, 0, false);
8005 /* Return the entire union. */
8006 return target;
8009 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8011 op0 = expand_expr (treeop0, target, VOIDmode,
8012 modifier);
8014 /* If the signedness of the conversion differs and OP0 is
8015 a promoted SUBREG, clear that indication since we now
8016 have to do the proper extension. */
8017 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8018 && GET_CODE (op0) == SUBREG)
8019 SUBREG_PROMOTED_VAR_P (op0) = 0;
8021 return REDUCE_BIT_FIELD (op0);
8024 op0 = expand_expr (treeop0, NULL_RTX, mode,
8025 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8026 if (GET_MODE (op0) == mode)
8029 /* If OP0 is a constant, just convert it into the proper mode. */
8030 else if (CONSTANT_P (op0))
8032 tree inner_type = TREE_TYPE (treeop0);
8033 enum machine_mode inner_mode = GET_MODE (op0);
8035 if (inner_mode == VOIDmode)
8036 inner_mode = TYPE_MODE (inner_type);
8038 if (modifier == EXPAND_INITIALIZER)
8039 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8040 subreg_lowpart_offset (mode,
8041 inner_mode));
8042 else
8043 op0= convert_modes (mode, inner_mode, op0,
8044 TYPE_UNSIGNED (inner_type));
8047 else if (modifier == EXPAND_INITIALIZER)
8048 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8050 else if (target == 0)
8051 op0 = convert_to_mode (mode, op0,
8052 TYPE_UNSIGNED (TREE_TYPE
8053 (treeop0)));
8054 else
8056 convert_move (target, op0,
8057 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8058 op0 = target;
8061 return REDUCE_BIT_FIELD (op0);
8063 case ADDR_SPACE_CONVERT_EXPR:
8065 tree treeop0_type = TREE_TYPE (treeop0);
8066 addr_space_t as_to;
8067 addr_space_t as_from;
8069 gcc_assert (POINTER_TYPE_P (type));
8070 gcc_assert (POINTER_TYPE_P (treeop0_type));
8072 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8073 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8075 /* Conversions between pointers to the same address space should
8076 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8077 gcc_assert (as_to != as_from);
8079 /* Ask target code to handle conversion between pointers
8080 to overlapping address spaces. */
8081 if (targetm.addr_space.subset_p (as_to, as_from)
8082 || targetm.addr_space.subset_p (as_from, as_to))
8084 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8085 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8086 gcc_assert (op0);
8087 return op0;
8090 /* For disjoint address spaces, converting anything but
8091 a null pointer invokes undefined behaviour. We simply
8092 always return a null pointer here. */
8093 return CONST0_RTX (mode);
8096 case POINTER_PLUS_EXPR:
8097 /* Even though the sizetype mode and the pointer's mode can be different
8098 expand is able to handle this correctly and get the correct result out
8099 of the PLUS_EXPR code. */
8100 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8101 if sizetype precision is smaller than pointer precision. */
8102 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8103 treeop1 = fold_convert_loc (loc, type,
8104 fold_convert_loc (loc, ssizetype,
8105 treeop1));
8106 /* If sizetype precision is larger than pointer precision, truncate the
8107 offset to have matching modes. */
8108 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8109 treeop1 = fold_convert_loc (loc, type, treeop1);
8111 case PLUS_EXPR:
8112 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8113 something else, make sure we add the register to the constant and
8114 then to the other thing. This case can occur during strength
8115 reduction and doing it this way will produce better code if the
8116 frame pointer or argument pointer is eliminated.
8118 fold-const.c will ensure that the constant is always in the inner
8119 PLUS_EXPR, so the only case we need to do anything about is if
8120 sp, ap, or fp is our second argument, in which case we must swap
8121 the innermost first argument and our second argument. */
8123 if (TREE_CODE (treeop0) == PLUS_EXPR
8124 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8125 && TREE_CODE (treeop1) == VAR_DECL
8126 && (DECL_RTL (treeop1) == frame_pointer_rtx
8127 || DECL_RTL (treeop1) == stack_pointer_rtx
8128 || DECL_RTL (treeop1) == arg_pointer_rtx))
8130 gcc_unreachable ();
8133 /* If the result is to be ptr_mode and we are adding an integer to
8134 something, we might be forming a constant. So try to use
8135 plus_constant. If it produces a sum and we can't accept it,
8136 use force_operand. This allows P = &ARR[const] to generate
8137 efficient code on machines where a SYMBOL_REF is not a valid
8138 address.
8140 If this is an EXPAND_SUM call, always return the sum. */
8141 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8142 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8144 if (modifier == EXPAND_STACK_PARM)
8145 target = 0;
8146 if (TREE_CODE (treeop0) == INTEGER_CST
8147 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8148 && TREE_CONSTANT (treeop1))
8150 rtx constant_part;
8152 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8153 EXPAND_SUM);
8154 /* Use immed_double_const to ensure that the constant is
8155 truncated according to the mode of OP1, then sign extended
8156 to a HOST_WIDE_INT. Using the constant directly can result
8157 in non-canonical RTL in a 64x32 cross compile. */
8158 constant_part
8159 = immed_double_const (TREE_INT_CST_LOW (treeop0),
8160 (HOST_WIDE_INT) 0,
8161 TYPE_MODE (TREE_TYPE (treeop1)));
8162 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8163 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8164 op1 = force_operand (op1, target);
8165 return REDUCE_BIT_FIELD (op1);
8168 else if (TREE_CODE (treeop1) == INTEGER_CST
8169 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8170 && TREE_CONSTANT (treeop0))
8172 rtx constant_part;
8174 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8175 (modifier == EXPAND_INITIALIZER
8176 ? EXPAND_INITIALIZER : EXPAND_SUM));
8177 if (! CONSTANT_P (op0))
8179 op1 = expand_expr (treeop1, NULL_RTX,
8180 VOIDmode, modifier);
8181 /* Return a PLUS if modifier says it's OK. */
8182 if (modifier == EXPAND_SUM
8183 || modifier == EXPAND_INITIALIZER)
8184 return simplify_gen_binary (PLUS, mode, op0, op1);
8185 goto binop2;
8187 /* Use immed_double_const to ensure that the constant is
8188 truncated according to the mode of OP1, then sign extended
8189 to a HOST_WIDE_INT. Using the constant directly can result
8190 in non-canonical RTL in a 64x32 cross compile. */
8191 constant_part
8192 = immed_double_const (TREE_INT_CST_LOW (treeop1),
8193 (HOST_WIDE_INT) 0,
8194 TYPE_MODE (TREE_TYPE (treeop0)));
8195 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8196 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8197 op0 = force_operand (op0, target);
8198 return REDUCE_BIT_FIELD (op0);
8202 /* Use TER to expand pointer addition of a negated value
8203 as pointer subtraction. */
8204 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8205 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8206 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8207 && TREE_CODE (treeop1) == SSA_NAME
8208 && TYPE_MODE (TREE_TYPE (treeop0))
8209 == TYPE_MODE (TREE_TYPE (treeop1)))
8211 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8212 if (def)
8214 treeop1 = gimple_assign_rhs1 (def);
8215 code = MINUS_EXPR;
8216 goto do_minus;
8220 /* No sense saving up arithmetic to be done
8221 if it's all in the wrong mode to form part of an address.
8222 And force_operand won't know whether to sign-extend or
8223 zero-extend. */
8224 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8225 || mode != ptr_mode)
8227 expand_operands (treeop0, treeop1,
8228 subtarget, &op0, &op1, EXPAND_NORMAL);
8229 if (op0 == const0_rtx)
8230 return op1;
8231 if (op1 == const0_rtx)
8232 return op0;
8233 goto binop2;
8236 expand_operands (treeop0, treeop1,
8237 subtarget, &op0, &op1, modifier);
8238 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8240 case MINUS_EXPR:
8241 do_minus:
8242 /* For initializers, we are allowed to return a MINUS of two
8243 symbolic constants. Here we handle all cases when both operands
8244 are constant. */
8245 /* Handle difference of two symbolic constants,
8246 for the sake of an initializer. */
8247 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8248 && really_constant_p (treeop0)
8249 && really_constant_p (treeop1))
8251 expand_operands (treeop0, treeop1,
8252 NULL_RTX, &op0, &op1, modifier);
8254 /* If the last operand is a CONST_INT, use plus_constant of
8255 the negated constant. Else make the MINUS. */
8256 if (CONST_INT_P (op1))
8257 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8258 -INTVAL (op1)));
8259 else
8260 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8263 /* No sense saving up arithmetic to be done
8264 if it's all in the wrong mode to form part of an address.
8265 And force_operand won't know whether to sign-extend or
8266 zero-extend. */
8267 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8268 || mode != ptr_mode)
8269 goto binop;
8271 expand_operands (treeop0, treeop1,
8272 subtarget, &op0, &op1, modifier);
8274 /* Convert A - const to A + (-const). */
8275 if (CONST_INT_P (op1))
8277 op1 = negate_rtx (mode, op1);
8278 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8281 goto binop2;
8283 case WIDEN_MULT_PLUS_EXPR:
8284 case WIDEN_MULT_MINUS_EXPR:
8285 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8286 op2 = expand_normal (treeop2);
8287 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8288 target, unsignedp);
8289 return target;
8291 case WIDEN_MULT_EXPR:
8292 /* If first operand is constant, swap them.
8293 Thus the following special case checks need only
8294 check the second operand. */
8295 if (TREE_CODE (treeop0) == INTEGER_CST)
8297 tree t1 = treeop0;
8298 treeop0 = treeop1;
8299 treeop1 = t1;
8302 /* First, check if we have a multiplication of one signed and one
8303 unsigned operand. */
8304 if (TREE_CODE (treeop1) != INTEGER_CST
8305 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8306 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8308 enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8309 this_optab = usmul_widen_optab;
8310 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8311 != CODE_FOR_nothing)
8313 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8314 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8315 EXPAND_NORMAL);
8316 else
8317 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8318 EXPAND_NORMAL);
8319 /* op0 and op1 might still be constant, despite the above
8320 != INTEGER_CST check. Handle it. */
8321 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8323 op0 = convert_modes (innermode, mode, op0, true);
8324 op1 = convert_modes (innermode, mode, op1, false);
8325 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8326 target, unsignedp));
8328 goto binop3;
8331 /* Check for a multiplication with matching signedness. */
8332 else if ((TREE_CODE (treeop1) == INTEGER_CST
8333 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8334 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8335 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8337 tree op0type = TREE_TYPE (treeop0);
8338 enum machine_mode innermode = TYPE_MODE (op0type);
8339 bool zextend_p = TYPE_UNSIGNED (op0type);
8340 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8341 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8343 if (TREE_CODE (treeop0) != INTEGER_CST)
8345 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8346 != CODE_FOR_nothing)
8348 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8349 EXPAND_NORMAL);
8350 /* op0 and op1 might still be constant, despite the above
8351 != INTEGER_CST check. Handle it. */
8352 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8354 widen_mult_const:
8355 op0 = convert_modes (innermode, mode, op0, zextend_p);
8357 = convert_modes (innermode, mode, op1,
8358 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8359 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8360 target,
8361 unsignedp));
8363 temp = expand_widening_mult (mode, op0, op1, target,
8364 unsignedp, this_optab);
8365 return REDUCE_BIT_FIELD (temp);
8367 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8368 != CODE_FOR_nothing
8369 && innermode == word_mode)
8371 rtx htem, hipart;
8372 op0 = expand_normal (treeop0);
8373 if (TREE_CODE (treeop1) == INTEGER_CST)
8374 op1 = convert_modes (innermode, mode,
8375 expand_normal (treeop1),
8376 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8377 else
8378 op1 = expand_normal (treeop1);
8379 /* op0 and op1 might still be constant, despite the above
8380 != INTEGER_CST check. Handle it. */
8381 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8382 goto widen_mult_const;
8383 temp = expand_binop (mode, other_optab, op0, op1, target,
8384 unsignedp, OPTAB_LIB_WIDEN);
8385 hipart = gen_highpart (innermode, temp);
8386 htem = expand_mult_highpart_adjust (innermode, hipart,
8387 op0, op1, hipart,
8388 zextend_p);
8389 if (htem != hipart)
8390 emit_move_insn (hipart, htem);
8391 return REDUCE_BIT_FIELD (temp);
8395 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8396 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8397 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8398 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8400 case FMA_EXPR:
8402 optab opt = fma_optab;
8403 gimple def0, def2;
8405 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8406 call. */
8407 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8409 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8410 tree call_expr;
8412 gcc_assert (fn != NULL_TREE);
8413 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8414 return expand_builtin (call_expr, target, subtarget, mode, false);
8417 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8418 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8420 op0 = op2 = NULL;
8422 if (def0 && def2
8423 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8425 opt = fnms_optab;
8426 op0 = expand_normal (gimple_assign_rhs1 (def0));
8427 op2 = expand_normal (gimple_assign_rhs1 (def2));
8429 else if (def0
8430 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8432 opt = fnma_optab;
8433 op0 = expand_normal (gimple_assign_rhs1 (def0));
8435 else if (def2
8436 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8438 opt = fms_optab;
8439 op2 = expand_normal (gimple_assign_rhs1 (def2));
8442 if (op0 == NULL)
8443 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8444 if (op2 == NULL)
8445 op2 = expand_normal (treeop2);
8446 op1 = expand_normal (treeop1);
8448 return expand_ternary_op (TYPE_MODE (type), opt,
8449 op0, op1, op2, target, 0);
8452 case MULT_EXPR:
8453 /* If this is a fixed-point operation, then we cannot use the code
8454 below because "expand_mult" doesn't support sat/no-sat fixed-point
8455 multiplications. */
8456 if (ALL_FIXED_POINT_MODE_P (mode))
8457 goto binop;
8459 /* If first operand is constant, swap them.
8460 Thus the following special case checks need only
8461 check the second operand. */
8462 if (TREE_CODE (treeop0) == INTEGER_CST)
8464 tree t1 = treeop0;
8465 treeop0 = treeop1;
8466 treeop1 = t1;
8469 /* Attempt to return something suitable for generating an
8470 indexed address, for machines that support that. */
8472 if (modifier == EXPAND_SUM && mode == ptr_mode
8473 && host_integerp (treeop1, 0))
8475 tree exp1 = treeop1;
8477 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8478 EXPAND_SUM);
8480 if (!REG_P (op0))
8481 op0 = force_operand (op0, NULL_RTX);
8482 if (!REG_P (op0))
8483 op0 = copy_to_mode_reg (mode, op0);
8485 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8486 gen_int_mode (tree_low_cst (exp1, 0),
8487 TYPE_MODE (TREE_TYPE (exp1)))));
8490 if (modifier == EXPAND_STACK_PARM)
8491 target = 0;
8493 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8494 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8496 case TRUNC_DIV_EXPR:
8497 case FLOOR_DIV_EXPR:
8498 case CEIL_DIV_EXPR:
8499 case ROUND_DIV_EXPR:
8500 case EXACT_DIV_EXPR:
8501 /* If this is a fixed-point operation, then we cannot use the code
8502 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8503 divisions. */
8504 if (ALL_FIXED_POINT_MODE_P (mode))
8505 goto binop;
8507 if (modifier == EXPAND_STACK_PARM)
8508 target = 0;
8509 /* Possible optimization: compute the dividend with EXPAND_SUM
8510 then if the divisor is constant can optimize the case
8511 where some terms of the dividend have coeffs divisible by it. */
8512 expand_operands (treeop0, treeop1,
8513 subtarget, &op0, &op1, EXPAND_NORMAL);
8514 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8516 case RDIV_EXPR:
8517 goto binop;
8519 case MULT_HIGHPART_EXPR:
8520 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8521 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8522 gcc_assert (temp);
8523 return temp;
8525 case TRUNC_MOD_EXPR:
8526 case FLOOR_MOD_EXPR:
8527 case CEIL_MOD_EXPR:
8528 case ROUND_MOD_EXPR:
8529 if (modifier == EXPAND_STACK_PARM)
8530 target = 0;
8531 expand_operands (treeop0, treeop1,
8532 subtarget, &op0, &op1, EXPAND_NORMAL);
8533 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8535 case FIXED_CONVERT_EXPR:
8536 op0 = expand_normal (treeop0);
8537 if (target == 0 || modifier == EXPAND_STACK_PARM)
8538 target = gen_reg_rtx (mode);
8540 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8541 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8542 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8543 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8544 else
8545 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8546 return target;
8548 case FIX_TRUNC_EXPR:
8549 op0 = expand_normal (treeop0);
8550 if (target == 0 || modifier == EXPAND_STACK_PARM)
8551 target = gen_reg_rtx (mode);
8552 expand_fix (target, op0, unsignedp);
8553 return target;
8555 case FLOAT_EXPR:
8556 op0 = expand_normal (treeop0);
8557 if (target == 0 || modifier == EXPAND_STACK_PARM)
8558 target = gen_reg_rtx (mode);
8559 /* expand_float can't figure out what to do if FROM has VOIDmode.
8560 So give it the correct mode. With -O, cse will optimize this. */
8561 if (GET_MODE (op0) == VOIDmode)
8562 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8563 op0);
8564 expand_float (target, op0,
8565 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8566 return target;
8568 case NEGATE_EXPR:
8569 op0 = expand_expr (treeop0, subtarget,
8570 VOIDmode, EXPAND_NORMAL);
8571 if (modifier == EXPAND_STACK_PARM)
8572 target = 0;
8573 temp = expand_unop (mode,
8574 optab_for_tree_code (NEGATE_EXPR, type,
8575 optab_default),
8576 op0, target, 0);
8577 gcc_assert (temp);
8578 return REDUCE_BIT_FIELD (temp);
8580 case ABS_EXPR:
8581 op0 = expand_expr (treeop0, subtarget,
8582 VOIDmode, EXPAND_NORMAL);
8583 if (modifier == EXPAND_STACK_PARM)
8584 target = 0;
8586 /* ABS_EXPR is not valid for complex arguments. */
8587 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8588 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8590 /* Unsigned abs is simply the operand. Testing here means we don't
8591 risk generating incorrect code below. */
8592 if (TYPE_UNSIGNED (type))
8593 return op0;
8595 return expand_abs (mode, op0, target, unsignedp,
8596 safe_from_p (target, treeop0, 1));
8598 case MAX_EXPR:
8599 case MIN_EXPR:
8600 target = original_target;
8601 if (target == 0
8602 || modifier == EXPAND_STACK_PARM
8603 || (MEM_P (target) && MEM_VOLATILE_P (target))
8604 || GET_MODE (target) != mode
8605 || (REG_P (target)
8606 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8607 target = gen_reg_rtx (mode);
8608 expand_operands (treeop0, treeop1,
8609 target, &op0, &op1, EXPAND_NORMAL);
8611 /* First try to do it with a special MIN or MAX instruction.
8612 If that does not win, use a conditional jump to select the proper
8613 value. */
8614 this_optab = optab_for_tree_code (code, type, optab_default);
8615 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8616 OPTAB_WIDEN);
8617 if (temp != 0)
8618 return temp;
8620 /* At this point, a MEM target is no longer useful; we will get better
8621 code without it. */
8623 if (! REG_P (target))
8624 target = gen_reg_rtx (mode);
8626 /* If op1 was placed in target, swap op0 and op1. */
8627 if (target != op0 && target == op1)
8629 temp = op0;
8630 op0 = op1;
8631 op1 = temp;
8634 /* We generate better code and avoid problems with op1 mentioning
8635 target by forcing op1 into a pseudo if it isn't a constant. */
8636 if (! CONSTANT_P (op1))
8637 op1 = force_reg (mode, op1);
8640 enum rtx_code comparison_code;
8641 rtx cmpop1 = op1;
8643 if (code == MAX_EXPR)
8644 comparison_code = unsignedp ? GEU : GE;
8645 else
8646 comparison_code = unsignedp ? LEU : LE;
8648 /* Canonicalize to comparisons against 0. */
8649 if (op1 == const1_rtx)
8651 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8652 or (a != 0 ? a : 1) for unsigned.
8653 For MIN we are safe converting (a <= 1 ? a : 1)
8654 into (a <= 0 ? a : 1) */
8655 cmpop1 = const0_rtx;
8656 if (code == MAX_EXPR)
8657 comparison_code = unsignedp ? NE : GT;
8659 if (op1 == constm1_rtx && !unsignedp)
8661 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8662 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8663 cmpop1 = const0_rtx;
8664 if (code == MIN_EXPR)
8665 comparison_code = LT;
8667 #ifdef HAVE_conditional_move
8668 /* Use a conditional move if possible. */
8669 if (can_conditionally_move_p (mode))
8671 rtx insn;
8673 /* ??? Same problem as in expmed.c: emit_conditional_move
8674 forces a stack adjustment via compare_from_rtx, and we
8675 lose the stack adjustment if the sequence we are about
8676 to create is discarded. */
8677 do_pending_stack_adjust ();
8679 start_sequence ();
8681 /* Try to emit the conditional move. */
8682 insn = emit_conditional_move (target, comparison_code,
8683 op0, cmpop1, mode,
8684 op0, op1, mode,
8685 unsignedp);
8687 /* If we could do the conditional move, emit the sequence,
8688 and return. */
8689 if (insn)
8691 rtx seq = get_insns ();
8692 end_sequence ();
8693 emit_insn (seq);
8694 return target;
8697 /* Otherwise discard the sequence and fall back to code with
8698 branches. */
8699 end_sequence ();
8701 #endif
8702 if (target != op0)
8703 emit_move_insn (target, op0);
8705 temp = gen_label_rtx ();
8706 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8707 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8708 -1);
8710 emit_move_insn (target, op1);
8711 emit_label (temp);
8712 return target;
8714 case BIT_NOT_EXPR:
8715 op0 = expand_expr (treeop0, subtarget,
8716 VOIDmode, EXPAND_NORMAL);
8717 if (modifier == EXPAND_STACK_PARM)
8718 target = 0;
8719 /* In case we have to reduce the result to bitfield precision
8720 for unsigned bitfield expand this as XOR with a proper constant
8721 instead. */
8722 if (reduce_bit_field && TYPE_UNSIGNED (type))
8723 temp = expand_binop (mode, xor_optab, op0,
8724 immed_double_int_const
8725 (double_int::mask (TYPE_PRECISION (type)), mode),
8726 target, 1, OPTAB_LIB_WIDEN);
8727 else
8728 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8729 gcc_assert (temp);
8730 return temp;
8732 /* ??? Can optimize bitwise operations with one arg constant.
8733 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8734 and (a bitwise1 b) bitwise2 b (etc)
8735 but that is probably not worth while. */
8737 case BIT_AND_EXPR:
8738 case BIT_IOR_EXPR:
8739 case BIT_XOR_EXPR:
8740 goto binop;
8742 case LROTATE_EXPR:
8743 case RROTATE_EXPR:
8744 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8745 || (GET_MODE_PRECISION (TYPE_MODE (type))
8746 == TYPE_PRECISION (type)));
8747 /* fall through */
8749 case LSHIFT_EXPR:
8750 case RSHIFT_EXPR:
8751 /* If this is a fixed-point operation, then we cannot use the code
8752 below because "expand_shift" doesn't support sat/no-sat fixed-point
8753 shifts. */
8754 if (ALL_FIXED_POINT_MODE_P (mode))
8755 goto binop;
8757 if (! safe_from_p (subtarget, treeop1, 1))
8758 subtarget = 0;
8759 if (modifier == EXPAND_STACK_PARM)
8760 target = 0;
8761 op0 = expand_expr (treeop0, subtarget,
8762 VOIDmode, EXPAND_NORMAL);
8763 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8764 unsignedp);
8765 if (code == LSHIFT_EXPR)
8766 temp = REDUCE_BIT_FIELD (temp);
8767 return temp;
8769 /* Could determine the answer when only additive constants differ. Also,
8770 the addition of one can be handled by changing the condition. */
8771 case LT_EXPR:
8772 case LE_EXPR:
8773 case GT_EXPR:
8774 case GE_EXPR:
8775 case EQ_EXPR:
8776 case NE_EXPR:
8777 case UNORDERED_EXPR:
8778 case ORDERED_EXPR:
8779 case UNLT_EXPR:
8780 case UNLE_EXPR:
8781 case UNGT_EXPR:
8782 case UNGE_EXPR:
8783 case UNEQ_EXPR:
8784 case LTGT_EXPR:
8785 temp = do_store_flag (ops,
8786 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8787 tmode != VOIDmode ? tmode : mode);
8788 if (temp)
8789 return temp;
8791 /* Use a compare and a jump for BLKmode comparisons, or for function
8792 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8794 if ((target == 0
8795 || modifier == EXPAND_STACK_PARM
8796 || ! safe_from_p (target, treeop0, 1)
8797 || ! safe_from_p (target, treeop1, 1)
8798 /* Make sure we don't have a hard reg (such as function's return
8799 value) live across basic blocks, if not optimizing. */
8800 || (!optimize && REG_P (target)
8801 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8802 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8804 emit_move_insn (target, const0_rtx);
8806 op1 = gen_label_rtx ();
8807 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8809 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8810 emit_move_insn (target, constm1_rtx);
8811 else
8812 emit_move_insn (target, const1_rtx);
8814 emit_label (op1);
8815 return target;
8817 case COMPLEX_EXPR:
8818 /* Get the rtx code of the operands. */
8819 op0 = expand_normal (treeop0);
8820 op1 = expand_normal (treeop1);
8822 if (!target)
8823 target = gen_reg_rtx (TYPE_MODE (type));
8824 else
8825 /* If target overlaps with op1, then either we need to force
8826 op1 into a pseudo (if target also overlaps with op0),
8827 or write the complex parts in reverse order. */
8828 switch (GET_CODE (target))
8830 case CONCAT:
8831 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
8833 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
8835 complex_expr_force_op1:
8836 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
8837 emit_move_insn (temp, op1);
8838 op1 = temp;
8839 break;
8841 complex_expr_swap_order:
8842 /* Move the imaginary (op1) and real (op0) parts to their
8843 location. */
8844 write_complex_part (target, op1, true);
8845 write_complex_part (target, op0, false);
8847 return target;
8849 break;
8850 case MEM:
8851 temp = adjust_address_nv (target,
8852 GET_MODE_INNER (GET_MODE (target)), 0);
8853 if (reg_overlap_mentioned_p (temp, op1))
8855 enum machine_mode imode = GET_MODE_INNER (GET_MODE (target));
8856 temp = adjust_address_nv (target, imode,
8857 GET_MODE_SIZE (imode));
8858 if (reg_overlap_mentioned_p (temp, op0))
8859 goto complex_expr_force_op1;
8860 goto complex_expr_swap_order;
8862 break;
8863 default:
8864 if (reg_overlap_mentioned_p (target, op1))
8866 if (reg_overlap_mentioned_p (target, op0))
8867 goto complex_expr_force_op1;
8868 goto complex_expr_swap_order;
8870 break;
8873 /* Move the real (op0) and imaginary (op1) parts to their location. */
8874 write_complex_part (target, op0, false);
8875 write_complex_part (target, op1, true);
8877 return target;
8879 case WIDEN_SUM_EXPR:
8881 tree oprnd0 = treeop0;
8882 tree oprnd1 = treeop1;
8884 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8885 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8886 target, unsignedp);
8887 return target;
8890 case REDUC_MAX_EXPR:
8891 case REDUC_MIN_EXPR:
8892 case REDUC_PLUS_EXPR:
8894 op0 = expand_normal (treeop0);
8895 this_optab = optab_for_tree_code (code, type, optab_default);
8896 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8897 gcc_assert (temp);
8898 return temp;
8901 case VEC_LSHIFT_EXPR:
8902 case VEC_RSHIFT_EXPR:
8904 target = expand_vec_shift_expr (ops, target);
8905 return target;
8908 case VEC_UNPACK_HI_EXPR:
8909 case VEC_UNPACK_LO_EXPR:
8911 op0 = expand_normal (treeop0);
8912 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
8913 target, unsignedp);
8914 gcc_assert (temp);
8915 return temp;
8918 case VEC_UNPACK_FLOAT_HI_EXPR:
8919 case VEC_UNPACK_FLOAT_LO_EXPR:
8921 op0 = expand_normal (treeop0);
8922 /* The signedness is determined from input operand. */
8923 temp = expand_widen_pattern_expr
8924 (ops, op0, NULL_RTX, NULL_RTX,
8925 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8927 gcc_assert (temp);
8928 return temp;
8931 case VEC_WIDEN_MULT_HI_EXPR:
8932 case VEC_WIDEN_MULT_LO_EXPR:
8933 case VEC_WIDEN_MULT_EVEN_EXPR:
8934 case VEC_WIDEN_MULT_ODD_EXPR:
8935 case VEC_WIDEN_LSHIFT_HI_EXPR:
8936 case VEC_WIDEN_LSHIFT_LO_EXPR:
8937 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8938 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
8939 target, unsignedp);
8940 gcc_assert (target);
8941 return target;
8943 case VEC_PACK_TRUNC_EXPR:
8944 case VEC_PACK_SAT_EXPR:
8945 case VEC_PACK_FIX_TRUNC_EXPR:
8946 mode = TYPE_MODE (TREE_TYPE (treeop0));
8947 goto binop;
8949 case VEC_PERM_EXPR:
8950 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
8951 op2 = expand_normal (treeop2);
8953 /* Careful here: if the target doesn't support integral vector modes,
8954 a constant selection vector could wind up smooshed into a normal
8955 integral constant. */
8956 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
8958 tree sel_type = TREE_TYPE (treeop2);
8959 enum machine_mode vmode
8960 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
8961 TYPE_VECTOR_SUBPARTS (sel_type));
8962 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
8963 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
8964 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
8966 else
8967 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
8969 temp = expand_vec_perm (mode, op0, op1, op2, target);
8970 gcc_assert (temp);
8971 return temp;
8973 case DOT_PROD_EXPR:
8975 tree oprnd0 = treeop0;
8976 tree oprnd1 = treeop1;
8977 tree oprnd2 = treeop2;
8978 rtx op2;
8980 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8981 op2 = expand_normal (oprnd2);
8982 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8983 target, unsignedp);
8984 return target;
8987 case REALIGN_LOAD_EXPR:
8989 tree oprnd0 = treeop0;
8990 tree oprnd1 = treeop1;
8991 tree oprnd2 = treeop2;
8992 rtx op2;
8994 this_optab = optab_for_tree_code (code, type, optab_default);
8995 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8996 op2 = expand_normal (oprnd2);
8997 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8998 target, unsignedp);
8999 gcc_assert (temp);
9000 return temp;
9003 case COND_EXPR:
9004 /* A COND_EXPR with its type being VOID_TYPE represents a
9005 conditional jump and is handled in
9006 expand_gimple_cond_expr. */
9007 gcc_assert (!VOID_TYPE_P (type));
9009 /* Note that COND_EXPRs whose type is a structure or union
9010 are required to be constructed to contain assignments of
9011 a temporary variable, so that we can evaluate them here
9012 for side effect only. If type is void, we must do likewise. */
9014 gcc_assert (!TREE_ADDRESSABLE (type)
9015 && !ignore
9016 && TREE_TYPE (treeop1) != void_type_node
9017 && TREE_TYPE (treeop2) != void_type_node);
9019 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9020 if (temp)
9021 return temp;
9023 /* If we are not to produce a result, we have no target. Otherwise,
9024 if a target was specified use it; it will not be used as an
9025 intermediate target unless it is safe. If no target, use a
9026 temporary. */
9028 if (modifier != EXPAND_STACK_PARM
9029 && original_target
9030 && safe_from_p (original_target, treeop0, 1)
9031 && GET_MODE (original_target) == mode
9032 && !MEM_P (original_target))
9033 temp = original_target;
9034 else
9035 temp = assign_temp (type, 0, 1);
9037 do_pending_stack_adjust ();
9038 NO_DEFER_POP;
9039 op0 = gen_label_rtx ();
9040 op1 = gen_label_rtx ();
9041 jumpifnot (treeop0, op0, -1);
9042 store_expr (treeop1, temp,
9043 modifier == EXPAND_STACK_PARM,
9044 false);
9046 emit_jump_insn (gen_jump (op1));
9047 emit_barrier ();
9048 emit_label (op0);
9049 store_expr (treeop2, temp,
9050 modifier == EXPAND_STACK_PARM,
9051 false);
9053 emit_label (op1);
9054 OK_DEFER_POP;
9055 return temp;
9057 case VEC_COND_EXPR:
9058 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9059 return target;
9061 default:
9062 gcc_unreachable ();
9065 /* Here to do an ordinary binary operator. */
9066 binop:
9067 expand_operands (treeop0, treeop1,
9068 subtarget, &op0, &op1, EXPAND_NORMAL);
9069 binop2:
9070 this_optab = optab_for_tree_code (code, type, optab_default);
9071 binop3:
9072 if (modifier == EXPAND_STACK_PARM)
9073 target = 0;
9074 temp = expand_binop (mode, this_optab, op0, op1, target,
9075 unsignedp, OPTAB_LIB_WIDEN);
9076 gcc_assert (temp);
9077 /* Bitwise operations do not need bitfield reduction as we expect their
9078 operands being properly truncated. */
9079 if (code == BIT_XOR_EXPR
9080 || code == BIT_AND_EXPR
9081 || code == BIT_IOR_EXPR)
9082 return temp;
9083 return REDUCE_BIT_FIELD (temp);
9085 #undef REDUCE_BIT_FIELD
9088 /* Return TRUE if expression STMT is suitable for replacement.
9089 Never consider memory loads as replaceable, because those don't ever lead
9090 into constant expressions. */
9092 static bool
9093 stmt_is_replaceable_p (gimple stmt)
9095 if (ssa_is_replaceable_p (stmt))
9097 /* Don't move around loads. */
9098 if (!gimple_assign_single_p (stmt)
9099 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9100 return true;
9102 return false;
9106 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
9107 enum expand_modifier modifier, rtx *alt_rtl)
9109 rtx op0, op1, temp, decl_rtl;
9110 tree type;
9111 int unsignedp;
9112 enum machine_mode mode;
9113 enum tree_code code = TREE_CODE (exp);
9114 rtx subtarget, original_target;
9115 int ignore;
9116 tree context;
9117 bool reduce_bit_field;
9118 location_t loc = EXPR_LOCATION (exp);
9119 struct separate_ops ops;
9120 tree treeop0, treeop1, treeop2;
9121 tree ssa_name = NULL_TREE;
9122 gimple g;
9124 type = TREE_TYPE (exp);
9125 mode = TYPE_MODE (type);
9126 unsignedp = TYPE_UNSIGNED (type);
9128 treeop0 = treeop1 = treeop2 = NULL_TREE;
9129 if (!VL_EXP_CLASS_P (exp))
9130 switch (TREE_CODE_LENGTH (code))
9132 default:
9133 case 3: treeop2 = TREE_OPERAND (exp, 2);
9134 case 2: treeop1 = TREE_OPERAND (exp, 1);
9135 case 1: treeop0 = TREE_OPERAND (exp, 0);
9136 case 0: break;
9138 ops.code = code;
9139 ops.type = type;
9140 ops.op0 = treeop0;
9141 ops.op1 = treeop1;
9142 ops.op2 = treeop2;
9143 ops.location = loc;
9145 ignore = (target == const0_rtx
9146 || ((CONVERT_EXPR_CODE_P (code)
9147 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9148 && TREE_CODE (type) == VOID_TYPE));
9150 /* An operation in what may be a bit-field type needs the
9151 result to be reduced to the precision of the bit-field type,
9152 which is narrower than that of the type's mode. */
9153 reduce_bit_field = (!ignore
9154 && INTEGRAL_TYPE_P (type)
9155 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9157 /* If we are going to ignore this result, we need only do something
9158 if there is a side-effect somewhere in the expression. If there
9159 is, short-circuit the most common cases here. Note that we must
9160 not call expand_expr with anything but const0_rtx in case this
9161 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9163 if (ignore)
9165 if (! TREE_SIDE_EFFECTS (exp))
9166 return const0_rtx;
9168 /* Ensure we reference a volatile object even if value is ignored, but
9169 don't do this if all we are doing is taking its address. */
9170 if (TREE_THIS_VOLATILE (exp)
9171 && TREE_CODE (exp) != FUNCTION_DECL
9172 && mode != VOIDmode && mode != BLKmode
9173 && modifier != EXPAND_CONST_ADDRESS)
9175 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9176 if (MEM_P (temp))
9177 copy_to_reg (temp);
9178 return const0_rtx;
9181 if (TREE_CODE_CLASS (code) == tcc_unary
9182 || code == BIT_FIELD_REF
9183 || code == COMPONENT_REF
9184 || code == INDIRECT_REF)
9185 return expand_expr (treeop0, const0_rtx, VOIDmode,
9186 modifier);
9188 else if (TREE_CODE_CLASS (code) == tcc_binary
9189 || TREE_CODE_CLASS (code) == tcc_comparison
9190 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9192 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9193 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9194 return const0_rtx;
9197 target = 0;
9200 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9201 target = 0;
9203 /* Use subtarget as the target for operand 0 of a binary operation. */
9204 subtarget = get_subtarget (target);
9205 original_target = target;
9207 switch (code)
9209 case LABEL_DECL:
9211 tree function = decl_function_context (exp);
9213 temp = label_rtx (exp);
9214 temp = gen_rtx_LABEL_REF (Pmode, temp);
9216 if (function != current_function_decl
9217 && function != 0)
9218 LABEL_REF_NONLOCAL_P (temp) = 1;
9220 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9221 return temp;
9224 case SSA_NAME:
9225 /* ??? ivopts calls expander, without any preparation from
9226 out-of-ssa. So fake instructions as if this was an access to the
9227 base variable. This unnecessarily allocates a pseudo, see how we can
9228 reuse it, if partition base vars have it set already. */
9229 if (!currently_expanding_to_rtl)
9231 tree var = SSA_NAME_VAR (exp);
9232 if (var && DECL_RTL_SET_P (var))
9233 return DECL_RTL (var);
9234 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9235 LAST_VIRTUAL_REGISTER + 1);
9238 g = get_gimple_for_ssa_name (exp);
9239 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9240 if (g == NULL
9241 && modifier == EXPAND_INITIALIZER
9242 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9243 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9244 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9245 g = SSA_NAME_DEF_STMT (exp);
9246 if (g)
9248 rtx r;
9249 location_t saved_loc = curr_insn_location ();
9251 set_curr_insn_location (gimple_location (g));
9252 r = expand_expr_real (gimple_assign_rhs_to_tree (g), target,
9253 tmode, modifier, NULL);
9254 set_curr_insn_location (saved_loc);
9255 if (REG_P (r) && !REG_EXPR (r))
9256 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9257 return r;
9260 ssa_name = exp;
9261 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9262 exp = SSA_NAME_VAR (ssa_name);
9263 goto expand_decl_rtl;
9265 case PARM_DECL:
9266 case VAR_DECL:
9267 /* If a static var's type was incomplete when the decl was written,
9268 but the type is complete now, lay out the decl now. */
9269 if (DECL_SIZE (exp) == 0
9270 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9271 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9272 layout_decl (exp, 0);
9274 /* ... fall through ... */
9276 case FUNCTION_DECL:
9277 case RESULT_DECL:
9278 decl_rtl = DECL_RTL (exp);
9279 expand_decl_rtl:
9280 gcc_assert (decl_rtl);
9281 decl_rtl = copy_rtx (decl_rtl);
9282 /* Record writes to register variables. */
9283 if (modifier == EXPAND_WRITE
9284 && REG_P (decl_rtl)
9285 && HARD_REGISTER_P (decl_rtl))
9286 add_to_hard_reg_set (&crtl->asm_clobbers,
9287 GET_MODE (decl_rtl), REGNO (decl_rtl));
9289 /* Ensure variable marked as used even if it doesn't go through
9290 a parser. If it hasn't be used yet, write out an external
9291 definition. */
9292 TREE_USED (exp) = 1;
9294 /* Show we haven't gotten RTL for this yet. */
9295 temp = 0;
9297 /* Variables inherited from containing functions should have
9298 been lowered by this point. */
9299 context = decl_function_context (exp);
9300 gcc_assert (SCOPE_FILE_SCOPE_P (context)
9301 || context == current_function_decl
9302 || TREE_STATIC (exp)
9303 || DECL_EXTERNAL (exp)
9304 /* ??? C++ creates functions that are not TREE_STATIC. */
9305 || TREE_CODE (exp) == FUNCTION_DECL);
9307 /* This is the case of an array whose size is to be determined
9308 from its initializer, while the initializer is still being parsed.
9309 ??? We aren't parsing while expanding anymore. */
9311 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9312 temp = validize_mem (decl_rtl);
9314 /* If DECL_RTL is memory, we are in the normal case and the
9315 address is not valid, get the address into a register. */
9317 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9319 if (alt_rtl)
9320 *alt_rtl = decl_rtl;
9321 decl_rtl = use_anchored_address (decl_rtl);
9322 if (modifier != EXPAND_CONST_ADDRESS
9323 && modifier != EXPAND_SUM
9324 && !memory_address_addr_space_p (DECL_MODE (exp),
9325 XEXP (decl_rtl, 0),
9326 MEM_ADDR_SPACE (decl_rtl)))
9327 temp = replace_equiv_address (decl_rtl,
9328 copy_rtx (XEXP (decl_rtl, 0)));
9331 /* If we got something, return it. But first, set the alignment
9332 if the address is a register. */
9333 if (temp != 0)
9335 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9336 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9338 return temp;
9341 /* If the mode of DECL_RTL does not match that of the decl,
9342 there are two cases: we are dealing with a BLKmode value
9343 that is returned in a register, or we are dealing with
9344 a promoted value. In the latter case, return a SUBREG
9345 of the wanted mode, but mark it so that we know that it
9346 was already extended. */
9347 if (REG_P (decl_rtl)
9348 && DECL_MODE (exp) != BLKmode
9349 && GET_MODE (decl_rtl) != DECL_MODE (exp))
9351 enum machine_mode pmode;
9353 /* Get the signedness to be used for this variable. Ensure we get
9354 the same mode we got when the variable was declared. */
9355 if (code == SSA_NAME
9356 && (g = SSA_NAME_DEF_STMT (ssa_name))
9357 && gimple_code (g) == GIMPLE_CALL)
9359 gcc_assert (!gimple_call_internal_p (g));
9360 pmode = promote_function_mode (type, mode, &unsignedp,
9361 gimple_call_fntype (g),
9364 else
9365 pmode = promote_decl_mode (exp, &unsignedp);
9366 gcc_assert (GET_MODE (decl_rtl) == pmode);
9368 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9369 SUBREG_PROMOTED_VAR_P (temp) = 1;
9370 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
9371 return temp;
9374 return decl_rtl;
9376 case INTEGER_CST:
9377 temp = immed_double_const (TREE_INT_CST_LOW (exp),
9378 TREE_INT_CST_HIGH (exp), mode);
9380 return temp;
9382 case VECTOR_CST:
9384 tree tmp = NULL_TREE;
9385 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9386 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9387 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9388 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9389 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9390 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9391 return const_vector_from_tree (exp);
9392 if (GET_MODE_CLASS (mode) == MODE_INT)
9394 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9395 if (type_for_mode)
9396 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9398 if (!tmp)
9400 vec<constructor_elt, va_gc> *v;
9401 unsigned i;
9402 vec_alloc (v, VECTOR_CST_NELTS (exp));
9403 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9404 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9405 tmp = build_constructor (type, v);
9407 return expand_expr (tmp, ignore ? const0_rtx : target,
9408 tmode, modifier);
9411 case CONST_DECL:
9412 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9414 case REAL_CST:
9415 /* If optimized, generate immediate CONST_DOUBLE
9416 which will be turned into memory by reload if necessary.
9418 We used to force a register so that loop.c could see it. But
9419 this does not allow gen_* patterns to perform optimizations with
9420 the constants. It also produces two insns in cases like "x = 1.0;".
9421 On most machines, floating-point constants are not permitted in
9422 many insns, so we'd end up copying it to a register in any case.
9424 Now, we do the copying in expand_binop, if appropriate. */
9425 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9426 TYPE_MODE (TREE_TYPE (exp)));
9428 case FIXED_CST:
9429 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9430 TYPE_MODE (TREE_TYPE (exp)));
9432 case COMPLEX_CST:
9433 /* Handle evaluating a complex constant in a CONCAT target. */
9434 if (original_target && GET_CODE (original_target) == CONCAT)
9436 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9437 rtx rtarg, itarg;
9439 rtarg = XEXP (original_target, 0);
9440 itarg = XEXP (original_target, 1);
9442 /* Move the real and imaginary parts separately. */
9443 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9444 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9446 if (op0 != rtarg)
9447 emit_move_insn (rtarg, op0);
9448 if (op1 != itarg)
9449 emit_move_insn (itarg, op1);
9451 return original_target;
9454 /* ... fall through ... */
9456 case STRING_CST:
9457 temp = expand_expr_constant (exp, 1, modifier);
9459 /* temp contains a constant address.
9460 On RISC machines where a constant address isn't valid,
9461 make some insns to get that address into a register. */
9462 if (modifier != EXPAND_CONST_ADDRESS
9463 && modifier != EXPAND_INITIALIZER
9464 && modifier != EXPAND_SUM
9465 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9466 MEM_ADDR_SPACE (temp)))
9467 return replace_equiv_address (temp,
9468 copy_rtx (XEXP (temp, 0)));
9469 return temp;
9471 case SAVE_EXPR:
9473 tree val = treeop0;
9474 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
9476 if (!SAVE_EXPR_RESOLVED_P (exp))
9478 /* We can indeed still hit this case, typically via builtin
9479 expanders calling save_expr immediately before expanding
9480 something. Assume this means that we only have to deal
9481 with non-BLKmode values. */
9482 gcc_assert (GET_MODE (ret) != BLKmode);
9484 val = build_decl (curr_insn_location (),
9485 VAR_DECL, NULL, TREE_TYPE (exp));
9486 DECL_ARTIFICIAL (val) = 1;
9487 DECL_IGNORED_P (val) = 1;
9488 treeop0 = val;
9489 TREE_OPERAND (exp, 0) = treeop0;
9490 SAVE_EXPR_RESOLVED_P (exp) = 1;
9492 if (!CONSTANT_P (ret))
9493 ret = copy_to_reg (ret);
9494 SET_DECL_RTL (val, ret);
9497 return ret;
9501 case CONSTRUCTOR:
9502 /* If we don't need the result, just ensure we evaluate any
9503 subexpressions. */
9504 if (ignore)
9506 unsigned HOST_WIDE_INT idx;
9507 tree value;
9509 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9510 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9512 return const0_rtx;
9515 return expand_constructor (exp, target, modifier, false);
9517 case TARGET_MEM_REF:
9519 addr_space_t as
9520 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9521 enum insn_code icode;
9522 unsigned int align;
9524 op0 = addr_for_mem_ref (exp, as, true);
9525 op0 = memory_address_addr_space (mode, op0, as);
9526 temp = gen_rtx_MEM (mode, op0);
9527 set_mem_attributes (temp, exp, 0);
9528 set_mem_addr_space (temp, as);
9529 align = get_object_alignment (exp);
9530 if (modifier != EXPAND_WRITE
9531 && modifier != EXPAND_MEMORY
9532 && mode != BLKmode
9533 && align < GET_MODE_ALIGNMENT (mode)
9534 /* If the target does not have special handling for unaligned
9535 loads of mode then it can use regular moves for them. */
9536 && ((icode = optab_handler (movmisalign_optab, mode))
9537 != CODE_FOR_nothing))
9539 struct expand_operand ops[2];
9541 /* We've already validated the memory, and we're creating a
9542 new pseudo destination. The predicates really can't fail,
9543 nor can the generator. */
9544 create_output_operand (&ops[0], NULL_RTX, mode);
9545 create_fixed_operand (&ops[1], temp);
9546 expand_insn (icode, 2, ops);
9547 temp = ops[0].value;
9549 return temp;
9552 case MEM_REF:
9554 addr_space_t as
9555 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9556 enum machine_mode address_mode;
9557 tree base = TREE_OPERAND (exp, 0);
9558 gimple def_stmt;
9559 enum insn_code icode;
9560 unsigned align;
9561 /* Handle expansion of non-aliased memory with non-BLKmode. That
9562 might end up in a register. */
9563 if (mem_ref_refers_to_non_mem_p (exp))
9565 HOST_WIDE_INT offset = mem_ref_offset (exp).low;
9566 base = TREE_OPERAND (base, 0);
9567 if (offset == 0
9568 && host_integerp (TYPE_SIZE (type), 1)
9569 && (GET_MODE_BITSIZE (DECL_MODE (base))
9570 == TREE_INT_CST_LOW (TYPE_SIZE (type))))
9571 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
9572 target, tmode, modifier);
9573 if (TYPE_MODE (type) == BLKmode)
9575 temp = assign_stack_temp (DECL_MODE (base),
9576 GET_MODE_SIZE (DECL_MODE (base)));
9577 store_expr (base, temp, 0, false);
9578 temp = adjust_address (temp, BLKmode, offset);
9579 set_mem_size (temp, int_size_in_bytes (type));
9580 return temp;
9582 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
9583 bitsize_int (offset * BITS_PER_UNIT));
9584 return expand_expr (exp, target, tmode, modifier);
9586 address_mode = targetm.addr_space.address_mode (as);
9587 base = TREE_OPERAND (exp, 0);
9588 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9590 tree mask = gimple_assign_rhs2 (def_stmt);
9591 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9592 gimple_assign_rhs1 (def_stmt), mask);
9593 TREE_OPERAND (exp, 0) = base;
9595 align = get_object_alignment (exp);
9596 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9597 op0 = memory_address_addr_space (mode, op0, as);
9598 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9600 rtx off
9601 = immed_double_int_const (mem_ref_offset (exp), address_mode);
9602 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9603 op0 = memory_address_addr_space (mode, op0, as);
9605 temp = gen_rtx_MEM (mode, op0);
9606 set_mem_attributes (temp, exp, 0);
9607 set_mem_addr_space (temp, as);
9608 if (TREE_THIS_VOLATILE (exp))
9609 MEM_VOLATILE_P (temp) = 1;
9610 if (modifier != EXPAND_WRITE
9611 && modifier != EXPAND_MEMORY
9612 && mode != BLKmode
9613 && align < GET_MODE_ALIGNMENT (mode))
9615 if ((icode = optab_handler (movmisalign_optab, mode))
9616 != CODE_FOR_nothing)
9618 struct expand_operand ops[2];
9620 /* We've already validated the memory, and we're creating a
9621 new pseudo destination. The predicates really can't fail,
9622 nor can the generator. */
9623 create_output_operand (&ops[0], NULL_RTX, mode);
9624 create_fixed_operand (&ops[1], temp);
9625 expand_insn (icode, 2, ops);
9626 temp = ops[0].value;
9628 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9629 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9630 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9631 (modifier == EXPAND_STACK_PARM
9632 ? NULL_RTX : target),
9633 mode, mode);
9635 return temp;
9638 case ARRAY_REF:
9641 tree array = treeop0;
9642 tree index = treeop1;
9643 tree init;
9645 /* Fold an expression like: "foo"[2].
9646 This is not done in fold so it won't happen inside &.
9647 Don't fold if this is for wide characters since it's too
9648 difficult to do correctly and this is a very rare case. */
9650 if (modifier != EXPAND_CONST_ADDRESS
9651 && modifier != EXPAND_INITIALIZER
9652 && modifier != EXPAND_MEMORY)
9654 tree t = fold_read_from_constant_string (exp);
9656 if (t)
9657 return expand_expr (t, target, tmode, modifier);
9660 /* If this is a constant index into a constant array,
9661 just get the value from the array. Handle both the cases when
9662 we have an explicit constructor and when our operand is a variable
9663 that was declared const. */
9665 if (modifier != EXPAND_CONST_ADDRESS
9666 && modifier != EXPAND_INITIALIZER
9667 && modifier != EXPAND_MEMORY
9668 && TREE_CODE (array) == CONSTRUCTOR
9669 && ! TREE_SIDE_EFFECTS (array)
9670 && TREE_CODE (index) == INTEGER_CST)
9672 unsigned HOST_WIDE_INT ix;
9673 tree field, value;
9675 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9676 field, value)
9677 if (tree_int_cst_equal (field, index))
9679 if (!TREE_SIDE_EFFECTS (value))
9680 return expand_expr (fold (value), target, tmode, modifier);
9681 break;
9685 else if (optimize >= 1
9686 && modifier != EXPAND_CONST_ADDRESS
9687 && modifier != EXPAND_INITIALIZER
9688 && modifier != EXPAND_MEMORY
9689 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9690 && TREE_CODE (index) == INTEGER_CST
9691 && (TREE_CODE (array) == VAR_DECL
9692 || TREE_CODE (array) == CONST_DECL)
9693 && (init = ctor_for_folding (array)) != error_mark_node)
9695 if (TREE_CODE (init) == CONSTRUCTOR)
9697 unsigned HOST_WIDE_INT ix;
9698 tree field, value;
9700 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
9701 field, value)
9702 if (tree_int_cst_equal (field, index))
9704 if (TREE_SIDE_EFFECTS (value))
9705 break;
9707 if (TREE_CODE (value) == CONSTRUCTOR)
9709 /* If VALUE is a CONSTRUCTOR, this
9710 optimization is only useful if
9711 this doesn't store the CONSTRUCTOR
9712 into memory. If it does, it is more
9713 efficient to just load the data from
9714 the array directly. */
9715 rtx ret = expand_constructor (value, target,
9716 modifier, true);
9717 if (ret == NULL_RTX)
9718 break;
9721 return
9722 expand_expr (fold (value), target, tmode, modifier);
9725 else if (TREE_CODE (init) == STRING_CST)
9727 tree low_bound = array_ref_low_bound (exp);
9728 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
9730 /* Optimize the special case of a zero lower bound.
9732 We convert the lower bound to sizetype to avoid problems
9733 with constant folding. E.g. suppose the lower bound is
9734 1 and its mode is QI. Without the conversion
9735 (ARRAY + (INDEX - (unsigned char)1))
9736 becomes
9737 (ARRAY + (-(unsigned char)1) + INDEX)
9738 which becomes
9739 (ARRAY + 255 + INDEX). Oops! */
9740 if (!integer_zerop (low_bound))
9741 index1 = size_diffop_loc (loc, index1,
9742 fold_convert_loc (loc, sizetype,
9743 low_bound));
9745 if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
9747 tree type = TREE_TYPE (TREE_TYPE (init));
9748 enum machine_mode mode = TYPE_MODE (type);
9750 if (GET_MODE_CLASS (mode) == MODE_INT
9751 && GET_MODE_SIZE (mode) == 1)
9752 return gen_int_mode (TREE_STRING_POINTER (init)
9753 [TREE_INT_CST_LOW (index1)],
9754 mode);
9759 goto normal_inner_ref;
9761 case COMPONENT_REF:
9762 /* If the operand is a CONSTRUCTOR, we can just extract the
9763 appropriate field if it is present. */
9764 if (TREE_CODE (treeop0) == CONSTRUCTOR)
9766 unsigned HOST_WIDE_INT idx;
9767 tree field, value;
9769 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
9770 idx, field, value)
9771 if (field == treeop1
9772 /* We can normally use the value of the field in the
9773 CONSTRUCTOR. However, if this is a bitfield in
9774 an integral mode that we can fit in a HOST_WIDE_INT,
9775 we must mask only the number of bits in the bitfield,
9776 since this is done implicitly by the constructor. If
9777 the bitfield does not meet either of those conditions,
9778 we can't do this optimization. */
9779 && (! DECL_BIT_FIELD (field)
9780 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
9781 && (GET_MODE_PRECISION (DECL_MODE (field))
9782 <= HOST_BITS_PER_WIDE_INT))))
9784 if (DECL_BIT_FIELD (field)
9785 && modifier == EXPAND_STACK_PARM)
9786 target = 0;
9787 op0 = expand_expr (value, target, tmode, modifier);
9788 if (DECL_BIT_FIELD (field))
9790 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
9791 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
9793 if (TYPE_UNSIGNED (TREE_TYPE (field)))
9795 op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
9796 imode);
9797 op0 = expand_and (imode, op0, op1, target);
9799 else
9801 int count = GET_MODE_PRECISION (imode) - bitsize;
9803 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
9804 target, 0);
9805 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
9806 target, 0);
9810 return op0;
9813 goto normal_inner_ref;
9815 case BIT_FIELD_REF:
9816 case ARRAY_RANGE_REF:
9817 normal_inner_ref:
9819 enum machine_mode mode1, mode2;
9820 HOST_WIDE_INT bitsize, bitpos;
9821 tree offset;
9822 int volatilep = 0, must_force_mem;
9823 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9824 &mode1, &unsignedp, &volatilep, true);
9825 rtx orig_op0, memloc;
9826 bool mem_attrs_from_type = false;
9828 /* If we got back the original object, something is wrong. Perhaps
9829 we are evaluating an expression too early. In any event, don't
9830 infinitely recurse. */
9831 gcc_assert (tem != exp);
9833 /* If TEM's type is a union of variable size, pass TARGET to the inner
9834 computation, since it will need a temporary and TARGET is known
9835 to have to do. This occurs in unchecked conversion in Ada. */
9836 orig_op0 = op0
9837 = expand_expr (tem,
9838 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9839 && COMPLETE_TYPE_P (TREE_TYPE (tem))
9840 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9841 != INTEGER_CST)
9842 && modifier != EXPAND_STACK_PARM
9843 ? target : NULL_RTX),
9844 VOIDmode,
9845 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
9847 /* If the bitfield is volatile, we want to access it in the
9848 field's mode, not the computed mode.
9849 If a MEM has VOIDmode (external with incomplete type),
9850 use BLKmode for it instead. */
9851 if (MEM_P (op0))
9853 if (volatilep && flag_strict_volatile_bitfields > 0)
9854 op0 = adjust_address (op0, mode1, 0);
9855 else if (GET_MODE (op0) == VOIDmode)
9856 op0 = adjust_address (op0, BLKmode, 0);
9859 mode2
9860 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
9862 /* If we have either an offset, a BLKmode result, or a reference
9863 outside the underlying object, we must force it to memory.
9864 Such a case can occur in Ada if we have unchecked conversion
9865 of an expression from a scalar type to an aggregate type or
9866 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
9867 passed a partially uninitialized object or a view-conversion
9868 to a larger size. */
9869 must_force_mem = (offset
9870 || mode1 == BLKmode
9871 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
9873 /* Handle CONCAT first. */
9874 if (GET_CODE (op0) == CONCAT && !must_force_mem)
9876 if (bitpos == 0
9877 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
9878 return op0;
9879 if (bitpos == 0
9880 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9881 && bitsize)
9883 op0 = XEXP (op0, 0);
9884 mode2 = GET_MODE (op0);
9886 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9887 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
9888 && bitpos
9889 && bitsize)
9891 op0 = XEXP (op0, 1);
9892 bitpos = 0;
9893 mode2 = GET_MODE (op0);
9895 else
9896 /* Otherwise force into memory. */
9897 must_force_mem = 1;
9900 /* If this is a constant, put it in a register if it is a legitimate
9901 constant and we don't need a memory reference. */
9902 if (CONSTANT_P (op0)
9903 && mode2 != BLKmode
9904 && targetm.legitimate_constant_p (mode2, op0)
9905 && !must_force_mem)
9906 op0 = force_reg (mode2, op0);
9908 /* Otherwise, if this is a constant, try to force it to the constant
9909 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
9910 is a legitimate constant. */
9911 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
9912 op0 = validize_mem (memloc);
9914 /* Otherwise, if this is a constant or the object is not in memory
9915 and need be, put it there. */
9916 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
9918 tree nt = build_qualified_type (TREE_TYPE (tem),
9919 (TYPE_QUALS (TREE_TYPE (tem))
9920 | TYPE_QUAL_CONST));
9921 memloc = assign_temp (nt, 1, 1);
9922 emit_move_insn (memloc, op0);
9923 op0 = memloc;
9924 mem_attrs_from_type = true;
9927 if (offset)
9929 enum machine_mode address_mode;
9930 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
9931 EXPAND_SUM);
9933 gcc_assert (MEM_P (op0));
9935 address_mode = get_address_mode (op0);
9936 if (GET_MODE (offset_rtx) != address_mode)
9937 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
9939 if (GET_MODE (op0) == BLKmode
9940 /* A constant address in OP0 can have VOIDmode, we must
9941 not try to call force_reg in that case. */
9942 && GET_MODE (XEXP (op0, 0)) != VOIDmode
9943 && bitsize != 0
9944 && (bitpos % bitsize) == 0
9945 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
9946 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
9948 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9949 bitpos = 0;
9952 op0 = offset_address (op0, offset_rtx,
9953 highest_pow2_factor (offset));
9956 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
9957 record its alignment as BIGGEST_ALIGNMENT. */
9958 if (MEM_P (op0) && bitpos == 0 && offset != 0
9959 && is_aligning_offset (offset, tem))
9960 set_mem_align (op0, BIGGEST_ALIGNMENT);
9962 /* Don't forget about volatility even if this is a bitfield. */
9963 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
9965 if (op0 == orig_op0)
9966 op0 = copy_rtx (op0);
9968 MEM_VOLATILE_P (op0) = 1;
9971 /* In cases where an aligned union has an unaligned object
9972 as a field, we might be extracting a BLKmode value from
9973 an integer-mode (e.g., SImode) object. Handle this case
9974 by doing the extract into an object as wide as the field
9975 (which we know to be the width of a basic mode), then
9976 storing into memory, and changing the mode to BLKmode. */
9977 if (mode1 == VOIDmode
9978 || REG_P (op0) || GET_CODE (op0) == SUBREG
9979 || (mode1 != BLKmode && ! direct_load[(int) mode1]
9980 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
9981 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
9982 && modifier != EXPAND_CONST_ADDRESS
9983 && modifier != EXPAND_INITIALIZER
9984 && modifier != EXPAND_MEMORY)
9985 /* If the field is volatile, we always want an aligned
9986 access. Do this in following two situations:
9987 1. the access is not already naturally
9988 aligned, otherwise "normal" (non-bitfield) volatile fields
9989 become non-addressable.
9990 2. the bitsize is narrower than the access size. Need
9991 to extract bitfields from the access. */
9992 || (volatilep && flag_strict_volatile_bitfields > 0
9993 && (bitpos % GET_MODE_ALIGNMENT (mode) != 0
9994 || (mode1 != BLKmode
9995 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)))
9996 /* If the field isn't aligned enough to fetch as a memref,
9997 fetch it as a bit field. */
9998 || (mode1 != BLKmode
9999 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10000 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10001 || (MEM_P (op0)
10002 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10003 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10004 && modifier != EXPAND_MEMORY
10005 && ((modifier == EXPAND_CONST_ADDRESS
10006 || modifier == EXPAND_INITIALIZER)
10007 ? STRICT_ALIGNMENT
10008 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10009 || (bitpos % BITS_PER_UNIT != 0)))
10010 /* If the type and the field are a constant size and the
10011 size of the type isn't the same size as the bitfield,
10012 we must use bitfield operations. */
10013 || (bitsize >= 0
10014 && TYPE_SIZE (TREE_TYPE (exp))
10015 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10016 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10017 bitsize)))
10019 enum machine_mode ext_mode = mode;
10021 if (ext_mode == BLKmode
10022 && ! (target != 0 && MEM_P (op0)
10023 && MEM_P (target)
10024 && bitpos % BITS_PER_UNIT == 0))
10025 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10027 if (ext_mode == BLKmode)
10029 if (target == 0)
10030 target = assign_temp (type, 1, 1);
10032 if (bitsize == 0)
10033 return target;
10035 /* In this case, BITPOS must start at a byte boundary and
10036 TARGET, if specified, must be a MEM. */
10037 gcc_assert (MEM_P (op0)
10038 && (!target || MEM_P (target))
10039 && !(bitpos % BITS_PER_UNIT));
10041 emit_block_move (target,
10042 adjust_address (op0, VOIDmode,
10043 bitpos / BITS_PER_UNIT),
10044 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10045 / BITS_PER_UNIT),
10046 (modifier == EXPAND_STACK_PARM
10047 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10049 return target;
10052 op0 = validize_mem (op0);
10054 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10055 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10057 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10058 (modifier == EXPAND_STACK_PARM
10059 ? NULL_RTX : target),
10060 ext_mode, ext_mode);
10062 /* If the result is a record type and BITSIZE is narrower than
10063 the mode of OP0, an integral mode, and this is a big endian
10064 machine, we must put the field into the high-order bits. */
10065 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10066 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10067 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10068 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10069 GET_MODE_BITSIZE (GET_MODE (op0))
10070 - bitsize, op0, 1);
10072 /* If the result type is BLKmode, store the data into a temporary
10073 of the appropriate type, but with the mode corresponding to the
10074 mode for the data we have (op0's mode). It's tempting to make
10075 this a constant type, since we know it's only being stored once,
10076 but that can cause problems if we are taking the address of this
10077 COMPONENT_REF because the MEM of any reference via that address
10078 will have flags corresponding to the type, which will not
10079 necessarily be constant. */
10080 if (mode == BLKmode)
10082 rtx new_rtx;
10084 new_rtx = assign_stack_temp_for_type (ext_mode,
10085 GET_MODE_BITSIZE (ext_mode),
10086 type);
10087 emit_move_insn (new_rtx, op0);
10088 op0 = copy_rtx (new_rtx);
10089 PUT_MODE (op0, BLKmode);
10092 return op0;
10095 /* If the result is BLKmode, use that to access the object
10096 now as well. */
10097 if (mode == BLKmode)
10098 mode1 = BLKmode;
10100 /* Get a reference to just this component. */
10101 if (modifier == EXPAND_CONST_ADDRESS
10102 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10103 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10104 else
10105 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10107 if (op0 == orig_op0)
10108 op0 = copy_rtx (op0);
10110 /* If op0 is a temporary because of forcing to memory, pass only the
10111 type to set_mem_attributes so that the original expression is never
10112 marked as ADDRESSABLE through MEM_EXPR of the temporary. */
10113 if (mem_attrs_from_type)
10114 set_mem_attributes (op0, type, 0);
10115 else
10116 set_mem_attributes (op0, exp, 0);
10118 if (REG_P (XEXP (op0, 0)))
10119 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10121 MEM_VOLATILE_P (op0) |= volatilep;
10122 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10123 || modifier == EXPAND_CONST_ADDRESS
10124 || modifier == EXPAND_INITIALIZER)
10125 return op0;
10127 if (target == 0)
10128 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10130 convert_move (target, op0, unsignedp);
10131 return target;
10134 case OBJ_TYPE_REF:
10135 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10137 case CALL_EXPR:
10138 /* All valid uses of __builtin_va_arg_pack () are removed during
10139 inlining. */
10140 if (CALL_EXPR_VA_ARG_PACK (exp))
10141 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10143 tree fndecl = get_callee_fndecl (exp), attr;
10145 if (fndecl
10146 && (attr = lookup_attribute ("error",
10147 DECL_ATTRIBUTES (fndecl))) != NULL)
10148 error ("%Kcall to %qs declared with attribute error: %s",
10149 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10150 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10151 if (fndecl
10152 && (attr = lookup_attribute ("warning",
10153 DECL_ATTRIBUTES (fndecl))) != NULL)
10154 warning_at (tree_nonartificial_location (exp),
10155 0, "%Kcall to %qs declared with attribute warning: %s",
10156 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10157 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10159 /* Check for a built-in function. */
10160 if (fndecl && DECL_BUILT_IN (fndecl))
10162 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10163 return expand_builtin (exp, target, subtarget, tmode, ignore);
10166 return expand_call (exp, target, ignore);
10168 case VIEW_CONVERT_EXPR:
10169 op0 = NULL_RTX;
10171 /* If we are converting to BLKmode, try to avoid an intermediate
10172 temporary by fetching an inner memory reference. */
10173 if (mode == BLKmode
10174 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10175 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10176 && handled_component_p (treeop0))
10178 enum machine_mode mode1;
10179 HOST_WIDE_INT bitsize, bitpos;
10180 tree offset;
10181 int unsignedp;
10182 int volatilep = 0;
10183 tree tem
10184 = get_inner_reference (treeop0, &bitsize, &bitpos,
10185 &offset, &mode1, &unsignedp, &volatilep,
10186 true);
10187 rtx orig_op0;
10189 /* ??? We should work harder and deal with non-zero offsets. */
10190 if (!offset
10191 && (bitpos % BITS_PER_UNIT) == 0
10192 && bitsize >= 0
10193 && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10195 /* See the normal_inner_ref case for the rationale. */
10196 orig_op0
10197 = expand_expr (tem,
10198 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10199 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10200 != INTEGER_CST)
10201 && modifier != EXPAND_STACK_PARM
10202 ? target : NULL_RTX),
10203 VOIDmode,
10204 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
10206 if (MEM_P (orig_op0))
10208 op0 = orig_op0;
10210 /* Get a reference to just this component. */
10211 if (modifier == EXPAND_CONST_ADDRESS
10212 || modifier == EXPAND_SUM
10213 || modifier == EXPAND_INITIALIZER)
10214 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10215 else
10216 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10218 if (op0 == orig_op0)
10219 op0 = copy_rtx (op0);
10221 set_mem_attributes (op0, treeop0, 0);
10222 if (REG_P (XEXP (op0, 0)))
10223 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10225 MEM_VOLATILE_P (op0) |= volatilep;
10230 if (!op0)
10231 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
10233 /* If the input and output modes are both the same, we are done. */
10234 if (mode == GET_MODE (op0))
10236 /* If neither mode is BLKmode, and both modes are the same size
10237 then we can use gen_lowpart. */
10238 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10239 && (GET_MODE_PRECISION (mode)
10240 == GET_MODE_PRECISION (GET_MODE (op0)))
10241 && !COMPLEX_MODE_P (GET_MODE (op0)))
10243 if (GET_CODE (op0) == SUBREG)
10244 op0 = force_reg (GET_MODE (op0), op0);
10245 temp = gen_lowpart_common (mode, op0);
10246 if (temp)
10247 op0 = temp;
10248 else
10250 if (!REG_P (op0) && !MEM_P (op0))
10251 op0 = force_reg (GET_MODE (op0), op0);
10252 op0 = gen_lowpart (mode, op0);
10255 /* If both types are integral, convert from one mode to the other. */
10256 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10257 op0 = convert_modes (mode, GET_MODE (op0), op0,
10258 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10259 /* As a last resort, spill op0 to memory, and reload it in a
10260 different mode. */
10261 else if (!MEM_P (op0))
10263 /* If the operand is not a MEM, force it into memory. Since we
10264 are going to be changing the mode of the MEM, don't call
10265 force_const_mem for constants because we don't allow pool
10266 constants to change mode. */
10267 tree inner_type = TREE_TYPE (treeop0);
10269 gcc_assert (!TREE_ADDRESSABLE (exp));
10271 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10272 target
10273 = assign_stack_temp_for_type
10274 (TYPE_MODE (inner_type),
10275 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10277 emit_move_insn (target, op0);
10278 op0 = target;
10281 /* At this point, OP0 is in the correct mode. If the output type is
10282 such that the operand is known to be aligned, indicate that it is.
10283 Otherwise, we need only be concerned about alignment for non-BLKmode
10284 results. */
10285 if (MEM_P (op0))
10287 enum insn_code icode;
10289 if (TYPE_ALIGN_OK (type))
10291 /* ??? Copying the MEM without substantially changing it might
10292 run afoul of the code handling volatile memory references in
10293 store_expr, which assumes that TARGET is returned unmodified
10294 if it has been used. */
10295 op0 = copy_rtx (op0);
10296 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10298 else if (mode != BLKmode
10299 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode)
10300 /* If the target does have special handling for unaligned
10301 loads of mode then use them. */
10302 && ((icode = optab_handler (movmisalign_optab, mode))
10303 != CODE_FOR_nothing))
10305 rtx reg, insn;
10307 op0 = adjust_address (op0, mode, 0);
10308 /* We've already validated the memory, and we're creating a
10309 new pseudo destination. The predicates really can't
10310 fail. */
10311 reg = gen_reg_rtx (mode);
10313 /* Nor can the insn generator. */
10314 insn = GEN_FCN (icode) (reg, op0);
10315 emit_insn (insn);
10316 return reg;
10318 else if (STRICT_ALIGNMENT
10319 && mode != BLKmode
10320 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10322 tree inner_type = TREE_TYPE (treeop0);
10323 HOST_WIDE_INT temp_size
10324 = MAX (int_size_in_bytes (inner_type),
10325 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10326 rtx new_rtx
10327 = assign_stack_temp_for_type (mode, temp_size, type);
10328 rtx new_with_op0_mode
10329 = adjust_address (new_rtx, GET_MODE (op0), 0);
10331 gcc_assert (!TREE_ADDRESSABLE (exp));
10333 if (GET_MODE (op0) == BLKmode)
10334 emit_block_move (new_with_op0_mode, op0,
10335 GEN_INT (GET_MODE_SIZE (mode)),
10336 (modifier == EXPAND_STACK_PARM
10337 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10338 else
10339 emit_move_insn (new_with_op0_mode, op0);
10341 op0 = new_rtx;
10344 op0 = adjust_address (op0, mode, 0);
10347 return op0;
10349 case MODIFY_EXPR:
10351 tree lhs = treeop0;
10352 tree rhs = treeop1;
10353 gcc_assert (ignore);
10355 /* Check for |= or &= of a bitfield of size one into another bitfield
10356 of size 1. In this case, (unless we need the result of the
10357 assignment) we can do this more efficiently with a
10358 test followed by an assignment, if necessary.
10360 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10361 things change so we do, this code should be enhanced to
10362 support it. */
10363 if (TREE_CODE (lhs) == COMPONENT_REF
10364 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10365 || TREE_CODE (rhs) == BIT_AND_EXPR)
10366 && TREE_OPERAND (rhs, 0) == lhs
10367 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10368 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10369 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10371 rtx label = gen_label_rtx ();
10372 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10373 do_jump (TREE_OPERAND (rhs, 1),
10374 value ? label : 0,
10375 value ? 0 : label, -1);
10376 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10377 false);
10378 do_pending_stack_adjust ();
10379 emit_label (label);
10380 return const0_rtx;
10383 expand_assignment (lhs, rhs, false);
10384 return const0_rtx;
10387 case ADDR_EXPR:
10388 return expand_expr_addr_expr (exp, target, tmode, modifier);
10390 case REALPART_EXPR:
10391 op0 = expand_normal (treeop0);
10392 return read_complex_part (op0, false);
10394 case IMAGPART_EXPR:
10395 op0 = expand_normal (treeop0);
10396 return read_complex_part (op0, true);
10398 case RETURN_EXPR:
10399 case LABEL_EXPR:
10400 case GOTO_EXPR:
10401 case SWITCH_EXPR:
10402 case ASM_EXPR:
10403 /* Expanded in cfgexpand.c. */
10404 gcc_unreachable ();
10406 case TRY_CATCH_EXPR:
10407 case CATCH_EXPR:
10408 case EH_FILTER_EXPR:
10409 case TRY_FINALLY_EXPR:
10410 /* Lowered by tree-eh.c. */
10411 gcc_unreachable ();
10413 case WITH_CLEANUP_EXPR:
10414 case CLEANUP_POINT_EXPR:
10415 case TARGET_EXPR:
10416 case CASE_LABEL_EXPR:
10417 case VA_ARG_EXPR:
10418 case BIND_EXPR:
10419 case INIT_EXPR:
10420 case CONJ_EXPR:
10421 case COMPOUND_EXPR:
10422 case PREINCREMENT_EXPR:
10423 case PREDECREMENT_EXPR:
10424 case POSTINCREMENT_EXPR:
10425 case POSTDECREMENT_EXPR:
10426 case LOOP_EXPR:
10427 case EXIT_EXPR:
10428 case COMPOUND_LITERAL_EXPR:
10429 /* Lowered by gimplify.c. */
10430 gcc_unreachable ();
10432 case FDESC_EXPR:
10433 /* Function descriptors are not valid except for as
10434 initialization constants, and should not be expanded. */
10435 gcc_unreachable ();
10437 case WITH_SIZE_EXPR:
10438 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10439 have pulled out the size to use in whatever context it needed. */
10440 return expand_expr_real (treeop0, original_target, tmode,
10441 modifier, alt_rtl);
10443 default:
10444 return expand_expr_real_2 (&ops, target, tmode, modifier);
10448 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10449 signedness of TYPE), possibly returning the result in TARGET. */
10450 static rtx
10451 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10453 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10454 if (target && GET_MODE (target) != GET_MODE (exp))
10455 target = 0;
10456 /* For constant values, reduce using build_int_cst_type. */
10457 if (CONST_INT_P (exp))
10459 HOST_WIDE_INT value = INTVAL (exp);
10460 tree t = build_int_cst_type (type, value);
10461 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10463 else if (TYPE_UNSIGNED (type))
10465 rtx mask = immed_double_int_const (double_int::mask (prec),
10466 GET_MODE (exp));
10467 return expand_and (GET_MODE (exp), exp, mask, target);
10469 else
10471 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10472 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10473 exp, count, target, 0);
10474 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10475 exp, count, target, 0);
10479 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10480 when applied to the address of EXP produces an address known to be
10481 aligned more than BIGGEST_ALIGNMENT. */
10483 static int
10484 is_aligning_offset (const_tree offset, const_tree exp)
10486 /* Strip off any conversions. */
10487 while (CONVERT_EXPR_P (offset))
10488 offset = TREE_OPERAND (offset, 0);
10490 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10491 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10492 if (TREE_CODE (offset) != BIT_AND_EXPR
10493 || !host_integerp (TREE_OPERAND (offset, 1), 1)
10494 || compare_tree_int (TREE_OPERAND (offset, 1),
10495 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10496 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
10497 return 0;
10499 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10500 It must be NEGATE_EXPR. Then strip any more conversions. */
10501 offset = TREE_OPERAND (offset, 0);
10502 while (CONVERT_EXPR_P (offset))
10503 offset = TREE_OPERAND (offset, 0);
10505 if (TREE_CODE (offset) != NEGATE_EXPR)
10506 return 0;
10508 offset = TREE_OPERAND (offset, 0);
10509 while (CONVERT_EXPR_P (offset))
10510 offset = TREE_OPERAND (offset, 0);
10512 /* This must now be the address of EXP. */
10513 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10516 /* Return the tree node if an ARG corresponds to a string constant or zero
10517 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10518 in bytes within the string that ARG is accessing. The type of the
10519 offset will be `sizetype'. */
10521 tree
10522 string_constant (tree arg, tree *ptr_offset)
10524 tree array, offset, lower_bound;
10525 STRIP_NOPS (arg);
10527 if (TREE_CODE (arg) == ADDR_EXPR)
10529 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10531 *ptr_offset = size_zero_node;
10532 return TREE_OPERAND (arg, 0);
10534 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10536 array = TREE_OPERAND (arg, 0);
10537 offset = size_zero_node;
10539 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10541 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10542 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10543 if (TREE_CODE (array) != STRING_CST
10544 && TREE_CODE (array) != VAR_DECL)
10545 return 0;
10547 /* Check if the array has a nonzero lower bound. */
10548 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10549 if (!integer_zerop (lower_bound))
10551 /* If the offset and base aren't both constants, return 0. */
10552 if (TREE_CODE (lower_bound) != INTEGER_CST)
10553 return 0;
10554 if (TREE_CODE (offset) != INTEGER_CST)
10555 return 0;
10556 /* Adjust offset by the lower bound. */
10557 offset = size_diffop (fold_convert (sizetype, offset),
10558 fold_convert (sizetype, lower_bound));
10561 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10563 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10564 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10565 if (TREE_CODE (array) != ADDR_EXPR)
10566 return 0;
10567 array = TREE_OPERAND (array, 0);
10568 if (TREE_CODE (array) != STRING_CST
10569 && TREE_CODE (array) != VAR_DECL)
10570 return 0;
10572 else
10573 return 0;
10575 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10577 tree arg0 = TREE_OPERAND (arg, 0);
10578 tree arg1 = TREE_OPERAND (arg, 1);
10580 STRIP_NOPS (arg0);
10581 STRIP_NOPS (arg1);
10583 if (TREE_CODE (arg0) == ADDR_EXPR
10584 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10585 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10587 array = TREE_OPERAND (arg0, 0);
10588 offset = arg1;
10590 else if (TREE_CODE (arg1) == ADDR_EXPR
10591 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10592 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10594 array = TREE_OPERAND (arg1, 0);
10595 offset = arg0;
10597 else
10598 return 0;
10600 else
10601 return 0;
10603 if (TREE_CODE (array) == STRING_CST)
10605 *ptr_offset = fold_convert (sizetype, offset);
10606 return array;
10608 else if (TREE_CODE (array) == VAR_DECL
10609 || TREE_CODE (array) == CONST_DECL)
10611 int length;
10612 tree init = ctor_for_folding (array);
10614 /* Variables initialized to string literals can be handled too. */
10615 if (init == error_mark_node
10616 || !init
10617 || TREE_CODE (init) != STRING_CST)
10618 return 0;
10620 /* Avoid const char foo[4] = "abcde"; */
10621 if (DECL_SIZE_UNIT (array) == NULL_TREE
10622 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10623 || (length = TREE_STRING_LENGTH (init)) <= 0
10624 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10625 return 0;
10627 /* If variable is bigger than the string literal, OFFSET must be constant
10628 and inside of the bounds of the string literal. */
10629 offset = fold_convert (sizetype, offset);
10630 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10631 && (! host_integerp (offset, 1)
10632 || compare_tree_int (offset, length) >= 0))
10633 return 0;
10635 *ptr_offset = offset;
10636 return init;
10639 return 0;
10642 /* Generate code to calculate OPS, and exploded expression
10643 using a store-flag instruction and return an rtx for the result.
10644 OPS reflects a comparison.
10646 If TARGET is nonzero, store the result there if convenient.
10648 Return zero if there is no suitable set-flag instruction
10649 available on this machine.
10651 Once expand_expr has been called on the arguments of the comparison,
10652 we are committed to doing the store flag, since it is not safe to
10653 re-evaluate the expression. We emit the store-flag insn by calling
10654 emit_store_flag, but only expand the arguments if we have a reason
10655 to believe that emit_store_flag will be successful. If we think that
10656 it will, but it isn't, we have to simulate the store-flag with a
10657 set/jump/set sequence. */
10659 static rtx
10660 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
10662 enum rtx_code code;
10663 tree arg0, arg1, type;
10664 tree tem;
10665 enum machine_mode operand_mode;
10666 int unsignedp;
10667 rtx op0, op1;
10668 rtx subtarget = target;
10669 location_t loc = ops->location;
10671 arg0 = ops->op0;
10672 arg1 = ops->op1;
10674 /* Don't crash if the comparison was erroneous. */
10675 if (arg0 == error_mark_node || arg1 == error_mark_node)
10676 return const0_rtx;
10678 type = TREE_TYPE (arg0);
10679 operand_mode = TYPE_MODE (type);
10680 unsignedp = TYPE_UNSIGNED (type);
10682 /* We won't bother with BLKmode store-flag operations because it would mean
10683 passing a lot of information to emit_store_flag. */
10684 if (operand_mode == BLKmode)
10685 return 0;
10687 /* We won't bother with store-flag operations involving function pointers
10688 when function pointers must be canonicalized before comparisons. */
10689 #ifdef HAVE_canonicalize_funcptr_for_compare
10690 if (HAVE_canonicalize_funcptr_for_compare
10691 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
10692 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
10693 == FUNCTION_TYPE))
10694 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
10695 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
10696 == FUNCTION_TYPE))))
10697 return 0;
10698 #endif
10700 STRIP_NOPS (arg0);
10701 STRIP_NOPS (arg1);
10703 /* For vector typed comparisons emit code to generate the desired
10704 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10705 expander for this. */
10706 if (TREE_CODE (ops->type) == VECTOR_TYPE)
10708 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10709 tree if_true = constant_boolean_node (true, ops->type);
10710 tree if_false = constant_boolean_node (false, ops->type);
10711 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10714 /* Get the rtx comparison code to use. We know that EXP is a comparison
10715 operation of some type. Some comparisons against 1 and -1 can be
10716 converted to comparisons with zero. Do so here so that the tests
10717 below will be aware that we have a comparison with zero. These
10718 tests will not catch constants in the first operand, but constants
10719 are rarely passed as the first operand. */
10721 switch (ops->code)
10723 case EQ_EXPR:
10724 code = EQ;
10725 break;
10726 case NE_EXPR:
10727 code = NE;
10728 break;
10729 case LT_EXPR:
10730 if (integer_onep (arg1))
10731 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10732 else
10733 code = unsignedp ? LTU : LT;
10734 break;
10735 case LE_EXPR:
10736 if (! unsignedp && integer_all_onesp (arg1))
10737 arg1 = integer_zero_node, code = LT;
10738 else
10739 code = unsignedp ? LEU : LE;
10740 break;
10741 case GT_EXPR:
10742 if (! unsignedp && integer_all_onesp (arg1))
10743 arg1 = integer_zero_node, code = GE;
10744 else
10745 code = unsignedp ? GTU : GT;
10746 break;
10747 case GE_EXPR:
10748 if (integer_onep (arg1))
10749 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10750 else
10751 code = unsignedp ? GEU : GE;
10752 break;
10754 case UNORDERED_EXPR:
10755 code = UNORDERED;
10756 break;
10757 case ORDERED_EXPR:
10758 code = ORDERED;
10759 break;
10760 case UNLT_EXPR:
10761 code = UNLT;
10762 break;
10763 case UNLE_EXPR:
10764 code = UNLE;
10765 break;
10766 case UNGT_EXPR:
10767 code = UNGT;
10768 break;
10769 case UNGE_EXPR:
10770 code = UNGE;
10771 break;
10772 case UNEQ_EXPR:
10773 code = UNEQ;
10774 break;
10775 case LTGT_EXPR:
10776 code = LTGT;
10777 break;
10779 default:
10780 gcc_unreachable ();
10783 /* Put a constant second. */
10784 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
10785 || TREE_CODE (arg0) == FIXED_CST)
10787 tem = arg0; arg0 = arg1; arg1 = tem;
10788 code = swap_condition (code);
10791 /* If this is an equality or inequality test of a single bit, we can
10792 do this by shifting the bit being tested to the low-order bit and
10793 masking the result with the constant 1. If the condition was EQ,
10794 we xor it with 1. This does not require an scc insn and is faster
10795 than an scc insn even if we have it.
10797 The code to make this transformation was moved into fold_single_bit_test,
10798 so we just call into the folder and expand its result. */
10800 if ((code == NE || code == EQ)
10801 && integer_zerop (arg1)
10802 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
10804 gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
10805 if (srcstmt
10806 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
10808 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
10809 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
10810 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
10811 gimple_assign_rhs1 (srcstmt),
10812 gimple_assign_rhs2 (srcstmt));
10813 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
10814 if (temp)
10815 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
10819 if (! get_subtarget (target)
10820 || GET_MODE (subtarget) != operand_mode)
10821 subtarget = 0;
10823 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
10825 if (target == 0)
10826 target = gen_reg_rtx (mode);
10828 /* Try a cstore if possible. */
10829 return emit_store_flag_force (target, code, op0, op1,
10830 operand_mode, unsignedp,
10831 (TYPE_PRECISION (ops->type) == 1
10832 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
10836 /* Stubs in case we haven't got a casesi insn. */
10837 #ifndef HAVE_casesi
10838 # define HAVE_casesi 0
10839 # define gen_casesi(a, b, c, d, e) (0)
10840 # define CODE_FOR_casesi CODE_FOR_nothing
10841 #endif
10843 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10844 0 otherwise (i.e. if there is no casesi instruction).
10846 DEFAULT_PROBABILITY is the probability of jumping to the default
10847 label. */
10849 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10850 rtx table_label, rtx default_label, rtx fallback_label,
10851 int default_probability)
10853 struct expand_operand ops[5];
10854 enum machine_mode index_mode = SImode;
10855 rtx op1, op2, index;
10857 if (! HAVE_casesi)
10858 return 0;
10860 /* Convert the index to SImode. */
10861 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10863 enum machine_mode omode = TYPE_MODE (index_type);
10864 rtx rangertx = expand_normal (range);
10866 /* We must handle the endpoints in the original mode. */
10867 index_expr = build2 (MINUS_EXPR, index_type,
10868 index_expr, minval);
10869 minval = integer_zero_node;
10870 index = expand_normal (index_expr);
10871 if (default_label)
10872 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10873 omode, 1, default_label,
10874 default_probability);
10875 /* Now we can safely truncate. */
10876 index = convert_to_mode (index_mode, index, 0);
10878 else
10880 if (TYPE_MODE (index_type) != index_mode)
10882 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
10883 index_expr = fold_convert (index_type, index_expr);
10886 index = expand_normal (index_expr);
10889 do_pending_stack_adjust ();
10891 op1 = expand_normal (minval);
10892 op2 = expand_normal (range);
10894 create_input_operand (&ops[0], index, index_mode);
10895 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
10896 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
10897 create_fixed_operand (&ops[3], table_label);
10898 create_fixed_operand (&ops[4], (default_label
10899 ? default_label
10900 : fallback_label));
10901 expand_jump_insn (CODE_FOR_casesi, 5, ops);
10902 return 1;
10905 /* Attempt to generate a tablejump instruction; same concept. */
10906 #ifndef HAVE_tablejump
10907 #define HAVE_tablejump 0
10908 #define gen_tablejump(x, y) (0)
10909 #endif
10911 /* Subroutine of the next function.
10913 INDEX is the value being switched on, with the lowest value
10914 in the table already subtracted.
10915 MODE is its expected mode (needed if INDEX is constant).
10916 RANGE is the length of the jump table.
10917 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10919 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10920 index value is out of range.
10921 DEFAULT_PROBABILITY is the probability of jumping to
10922 the default label. */
10924 static void
10925 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10926 rtx default_label, int default_probability)
10928 rtx temp, vector;
10930 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
10931 cfun->cfg->max_jumptable_ents = INTVAL (range);
10933 /* Do an unsigned comparison (in the proper mode) between the index
10934 expression and the value which represents the length of the range.
10935 Since we just finished subtracting the lower bound of the range
10936 from the index expression, this comparison allows us to simultaneously
10937 check that the original index expression value is both greater than
10938 or equal to the minimum value of the range and less than or equal to
10939 the maximum value of the range. */
10941 if (default_label)
10942 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10943 default_label, default_probability);
10946 /* If index is in range, it must fit in Pmode.
10947 Convert to Pmode so we can index with it. */
10948 if (mode != Pmode)
10949 index = convert_to_mode (Pmode, index, 1);
10951 /* Don't let a MEM slip through, because then INDEX that comes
10952 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10953 and break_out_memory_refs will go to work on it and mess it up. */
10954 #ifdef PIC_CASE_VECTOR_ADDRESS
10955 if (flag_pic && !REG_P (index))
10956 index = copy_to_mode_reg (Pmode, index);
10957 #endif
10959 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10960 GET_MODE_SIZE, because this indicates how large insns are. The other
10961 uses should all be Pmode, because they are addresses. This code
10962 could fail if addresses and insns are not the same size. */
10963 index = gen_rtx_PLUS
10964 (Pmode,
10965 gen_rtx_MULT (Pmode, index,
10966 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE), Pmode)),
10967 gen_rtx_LABEL_REF (Pmode, table_label));
10968 #ifdef PIC_CASE_VECTOR_ADDRESS
10969 if (flag_pic)
10970 index = PIC_CASE_VECTOR_ADDRESS (index);
10971 else
10972 #endif
10973 index = memory_address (CASE_VECTOR_MODE, index);
10974 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10975 vector = gen_const_mem (CASE_VECTOR_MODE, index);
10976 convert_move (temp, vector, 0);
10978 emit_jump_insn (gen_tablejump (temp, table_label));
10980 /* If we are generating PIC code or if the table is PC-relative, the
10981 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10982 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10983 emit_barrier ();
10987 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10988 rtx table_label, rtx default_label, int default_probability)
10990 rtx index;
10992 if (! HAVE_tablejump)
10993 return 0;
10995 index_expr = fold_build2 (MINUS_EXPR, index_type,
10996 fold_convert (index_type, index_expr),
10997 fold_convert (index_type, minval));
10998 index = expand_normal (index_expr);
10999 do_pending_stack_adjust ();
11001 do_tablejump (index, TYPE_MODE (index_type),
11002 convert_modes (TYPE_MODE (index_type),
11003 TYPE_MODE (TREE_TYPE (range)),
11004 expand_normal (range),
11005 TYPE_UNSIGNED (TREE_TYPE (range))),
11006 table_label, default_label, default_probability);
11007 return 1;
11010 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11011 static rtx
11012 const_vector_from_tree (tree exp)
11014 rtvec v;
11015 unsigned i;
11016 int units;
11017 tree elt;
11018 enum machine_mode inner, mode;
11020 mode = TYPE_MODE (TREE_TYPE (exp));
11022 if (initializer_zerop (exp))
11023 return CONST0_RTX (mode);
11025 units = GET_MODE_NUNITS (mode);
11026 inner = GET_MODE_INNER (mode);
11028 v = rtvec_alloc (units);
11030 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11032 elt = VECTOR_CST_ELT (exp, i);
11034 if (TREE_CODE (elt) == REAL_CST)
11035 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11036 inner);
11037 else if (TREE_CODE (elt) == FIXED_CST)
11038 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11039 inner);
11040 else
11041 RTVEC_ELT (v, i) = immed_double_int_const (tree_to_double_int (elt),
11042 inner);
11045 return gen_rtx_CONST_VECTOR (mode, v);
11048 /* Build a decl for a personality function given a language prefix. */
11050 tree
11051 build_personality_function (const char *lang)
11053 const char *unwind_and_version;
11054 tree decl, type;
11055 char *name;
11057 switch (targetm_common.except_unwind_info (&global_options))
11059 case UI_NONE:
11060 return NULL;
11061 case UI_SJLJ:
11062 unwind_and_version = "_sj0";
11063 break;
11064 case UI_DWARF2:
11065 case UI_TARGET:
11066 unwind_and_version = "_v0";
11067 break;
11068 case UI_SEH:
11069 unwind_and_version = "_seh0";
11070 break;
11071 default:
11072 gcc_unreachable ();
11075 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11077 type = build_function_type_list (integer_type_node, integer_type_node,
11078 long_long_unsigned_type_node,
11079 ptr_type_node, ptr_type_node, NULL_TREE);
11080 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11081 get_identifier (name), type);
11082 DECL_ARTIFICIAL (decl) = 1;
11083 DECL_EXTERNAL (decl) = 1;
11084 TREE_PUBLIC (decl) = 1;
11086 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11087 are the flags assigned by targetm.encode_section_info. */
11088 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11090 return decl;
11093 /* Extracts the personality function of DECL and returns the corresponding
11094 libfunc. */
11097 get_personality_function (tree decl)
11099 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11100 enum eh_personality_kind pk;
11102 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11103 if (pk == eh_personality_none)
11104 return NULL;
11106 if (!personality
11107 && pk == eh_personality_any)
11108 personality = lang_hooks.eh_personality ();
11110 if (pk == eh_personality_lang)
11111 gcc_assert (personality != NULL_TREE);
11113 return XEXP (DECL_RTL (personality), 0);
11116 #include "gt-expr.h"