* Makefile.in (C_COMMON_OBJS): Depend on c-cilkplus.o.
[official-gcc.git] / gcc / expr.c
blob28b43320798c196f42b1e60f24bdc6575c96d580
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "flags.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "except.h"
31 #include "function.h"
32 #include "insn-config.h"
33 #include "insn-attr.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
35 #include "expr.h"
36 #include "optabs.h"
37 #include "libfuncs.h"
38 #include "recog.h"
39 #include "reload.h"
40 #include "typeclass.h"
41 #include "toplev.h"
42 #include "langhooks.h"
43 #include "intl.h"
44 #include "tm_p.h"
45 #include "tree-iterator.h"
46 #include "gimple.h"
47 #include "gimple-ssa.h"
48 #include "cgraph.h"
49 #include "tree-ssanames.h"
50 #include "target.h"
51 #include "common/common-target.h"
52 #include "timevar.h"
53 #include "df.h"
54 #include "diagnostic.h"
55 #include "tree-ssa-live.h"
56 #include "tree-outof-ssa.h"
57 #include "target-globals.h"
58 #include "params.h"
59 #include "tree-ssa-address.h"
61 /* Decide whether a function's arguments should be processed
62 from first to last or from last to first.
64 They should if the stack and args grow in opposite directions, but
65 only if we have push insns. */
67 #ifdef PUSH_ROUNDING
69 #ifndef PUSH_ARGS_REVERSED
70 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
71 #define PUSH_ARGS_REVERSED /* If it's last to first. */
72 #endif
73 #endif
75 #endif
77 #ifndef STACK_PUSH_CODE
78 #ifdef STACK_GROWS_DOWNWARD
79 #define STACK_PUSH_CODE PRE_DEC
80 #else
81 #define STACK_PUSH_CODE PRE_INC
82 #endif
83 #endif
86 /* If this is nonzero, we do not bother generating VOLATILE
87 around volatile memory references, and we are willing to
88 output indirect addresses. If cse is to follow, we reject
89 indirect addresses so a useful potential cse is generated;
90 if it is used only once, instruction combination will produce
91 the same indirect address eventually. */
92 int cse_not_expected;
94 /* This structure is used by move_by_pieces to describe the move to
95 be performed. */
96 struct move_by_pieces_d
98 rtx to;
99 rtx to_addr;
100 int autinc_to;
101 int explicit_inc_to;
102 rtx from;
103 rtx from_addr;
104 int autinc_from;
105 int explicit_inc_from;
106 unsigned HOST_WIDE_INT len;
107 HOST_WIDE_INT offset;
108 int reverse;
111 /* This structure is used by store_by_pieces to describe the clear to
112 be performed. */
114 struct store_by_pieces_d
116 rtx to;
117 rtx to_addr;
118 int autinc_to;
119 int explicit_inc_to;
120 unsigned HOST_WIDE_INT len;
121 HOST_WIDE_INT offset;
122 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
123 void *constfundata;
124 int reverse;
127 static void move_by_pieces_1 (insn_gen_fn, machine_mode,
128 struct move_by_pieces_d *);
129 static bool block_move_libcall_safe_for_call_parm (void);
130 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
131 static tree emit_block_move_libcall_fn (int);
132 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
133 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
134 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
135 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
136 static void store_by_pieces_2 (insn_gen_fn, machine_mode,
137 struct store_by_pieces_d *);
138 static tree clear_storage_libcall_fn (int);
139 static rtx compress_float_constant (rtx, rtx);
140 static rtx get_subtarget (rtx);
141 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, int, alias_set_type);
144 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
146 unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
147 enum machine_mode, tree, alias_set_type, bool);
149 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
151 static int is_aligning_offset (const_tree, const_tree);
152 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
153 enum expand_modifier);
154 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
155 static rtx do_store_flag (sepops, rtx, enum machine_mode);
156 #ifdef PUSH_ROUNDING
157 static void emit_single_push_insn (enum machine_mode, rtx, tree);
158 #endif
159 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx, int);
160 static rtx const_vector_from_tree (tree);
161 static void write_complex_part (rtx, rtx, bool);
163 /* This macro is used to determine whether move_by_pieces should be called
164 to perform a structure copy. */
165 #ifndef MOVE_BY_PIECES_P
166 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
167 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
168 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
169 #endif
171 /* This macro is used to determine whether clear_by_pieces should be
172 called to clear storage. */
173 #ifndef CLEAR_BY_PIECES_P
174 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
175 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
176 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
177 #endif
179 /* This macro is used to determine whether store_by_pieces should be
180 called to "memset" storage with byte values other than zero. */
181 #ifndef SET_BY_PIECES_P
182 #define SET_BY_PIECES_P(SIZE, ALIGN) \
183 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
184 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
185 #endif
187 /* This macro is used to determine whether store_by_pieces should be
188 called to "memcpy" storage when the source is a constant string. */
189 #ifndef STORE_BY_PIECES_P
190 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
191 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
192 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
193 #endif
195 /* This is run to set up which modes can be used
196 directly in memory and to initialize the block move optab. It is run
197 at the beginning of compilation and when the target is reinitialized. */
199 void
200 init_expr_target (void)
202 rtx insn, pat;
203 enum machine_mode mode;
204 int num_clobbers;
205 rtx mem, mem1;
206 rtx reg;
208 /* Try indexing by frame ptr and try by stack ptr.
209 It is known that on the Convex the stack ptr isn't a valid index.
210 With luck, one or the other is valid on any machine. */
211 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
212 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
214 /* A scratch register we can modify in-place below to avoid
215 useless RTL allocations. */
216 reg = gen_rtx_REG (VOIDmode, -1);
218 insn = rtx_alloc (INSN);
219 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
220 PATTERN (insn) = pat;
222 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
223 mode = (enum machine_mode) ((int) mode + 1))
225 int regno;
227 direct_load[(int) mode] = direct_store[(int) mode] = 0;
228 PUT_MODE (mem, mode);
229 PUT_MODE (mem1, mode);
230 PUT_MODE (reg, mode);
232 /* See if there is some register that can be used in this mode and
233 directly loaded or stored from memory. */
235 if (mode != VOIDmode && mode != BLKmode)
236 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
237 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
238 regno++)
240 if (! HARD_REGNO_MODE_OK (regno, mode))
241 continue;
243 SET_REGNO (reg, regno);
245 SET_SRC (pat) = mem;
246 SET_DEST (pat) = reg;
247 if (recog (pat, insn, &num_clobbers) >= 0)
248 direct_load[(int) mode] = 1;
250 SET_SRC (pat) = mem1;
251 SET_DEST (pat) = reg;
252 if (recog (pat, insn, &num_clobbers) >= 0)
253 direct_load[(int) mode] = 1;
255 SET_SRC (pat) = reg;
256 SET_DEST (pat) = mem;
257 if (recog (pat, insn, &num_clobbers) >= 0)
258 direct_store[(int) mode] = 1;
260 SET_SRC (pat) = reg;
261 SET_DEST (pat) = mem1;
262 if (recog (pat, insn, &num_clobbers) >= 0)
263 direct_store[(int) mode] = 1;
267 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
269 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
270 mode = GET_MODE_WIDER_MODE (mode))
272 enum machine_mode srcmode;
273 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
274 srcmode = GET_MODE_WIDER_MODE (srcmode))
276 enum insn_code ic;
278 ic = can_extend_p (mode, srcmode, 0);
279 if (ic == CODE_FOR_nothing)
280 continue;
282 PUT_MODE (mem, srcmode);
284 if (insn_operand_matches (ic, 1, mem))
285 float_extend_from_mem[mode][srcmode] = true;
290 /* This is run at the start of compiling a function. */
292 void
293 init_expr (void)
295 memset (&crtl->expr, 0, sizeof (crtl->expr));
298 /* Copy data from FROM to TO, where the machine modes are not the same.
299 Both modes may be integer, or both may be floating, or both may be
300 fixed-point.
301 UNSIGNEDP should be nonzero if FROM is an unsigned type.
302 This causes zero-extension instead of sign-extension. */
304 void
305 convert_move (rtx to, rtx from, int unsignedp)
307 enum machine_mode to_mode = GET_MODE (to);
308 enum machine_mode from_mode = GET_MODE (from);
309 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
310 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
311 enum insn_code code;
312 rtx libcall;
314 /* rtx code for making an equivalent value. */
315 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
316 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
319 gcc_assert (to_real == from_real);
320 gcc_assert (to_mode != BLKmode);
321 gcc_assert (from_mode != BLKmode);
323 /* If the source and destination are already the same, then there's
324 nothing to do. */
325 if (to == from)
326 return;
328 /* If FROM is a SUBREG that indicates that we have already done at least
329 the required extension, strip it. We don't handle such SUBREGs as
330 TO here. */
332 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
333 && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
334 >= GET_MODE_PRECISION (to_mode))
335 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
336 from = gen_lowpart (to_mode, from), from_mode = to_mode;
338 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
340 if (to_mode == from_mode
341 || (from_mode == VOIDmode && CONSTANT_P (from)))
343 emit_move_insn (to, from);
344 return;
347 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
349 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
351 if (VECTOR_MODE_P (to_mode))
352 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
353 else
354 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
356 emit_move_insn (to, from);
357 return;
360 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
362 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
363 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
364 return;
367 if (to_real)
369 rtx value, insns;
370 convert_optab tab;
372 gcc_assert ((GET_MODE_PRECISION (from_mode)
373 != GET_MODE_PRECISION (to_mode))
374 || (DECIMAL_FLOAT_MODE_P (from_mode)
375 != DECIMAL_FLOAT_MODE_P (to_mode)));
377 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
378 /* Conversion between decimal float and binary float, same size. */
379 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
380 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
381 tab = sext_optab;
382 else
383 tab = trunc_optab;
385 /* Try converting directly if the insn is supported. */
387 code = convert_optab_handler (tab, to_mode, from_mode);
388 if (code != CODE_FOR_nothing)
390 emit_unop_insn (code, to, from,
391 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
392 return;
395 /* Otherwise use a libcall. */
396 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
398 /* Is this conversion implemented yet? */
399 gcc_assert (libcall);
401 start_sequence ();
402 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
403 1, from, from_mode);
404 insns = get_insns ();
405 end_sequence ();
406 emit_libcall_block (insns, to, value,
407 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
408 from)
409 : gen_rtx_FLOAT_EXTEND (to_mode, from));
410 return;
413 /* Handle pointer conversion. */ /* SPEE 900220. */
414 /* Targets are expected to provide conversion insns between PxImode and
415 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
416 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
418 enum machine_mode full_mode
419 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
421 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
422 != CODE_FOR_nothing);
424 if (full_mode != from_mode)
425 from = convert_to_mode (full_mode, from, unsignedp);
426 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
427 to, from, UNKNOWN);
428 return;
430 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
432 rtx new_from;
433 enum machine_mode full_mode
434 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
435 convert_optab ctab = unsignedp ? zext_optab : sext_optab;
436 enum insn_code icode;
438 icode = convert_optab_handler (ctab, full_mode, from_mode);
439 gcc_assert (icode != CODE_FOR_nothing);
441 if (to_mode == full_mode)
443 emit_unop_insn (icode, to, from, UNKNOWN);
444 return;
447 new_from = gen_reg_rtx (full_mode);
448 emit_unop_insn (icode, new_from, from, UNKNOWN);
450 /* else proceed to integer conversions below. */
451 from_mode = full_mode;
452 from = new_from;
455 /* Make sure both are fixed-point modes or both are not. */
456 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
457 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
458 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
460 /* If we widen from_mode to to_mode and they are in the same class,
461 we won't saturate the result.
462 Otherwise, always saturate the result to play safe. */
463 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
464 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
465 expand_fixed_convert (to, from, 0, 0);
466 else
467 expand_fixed_convert (to, from, 0, 1);
468 return;
471 /* Now both modes are integers. */
473 /* Handle expanding beyond a word. */
474 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
475 && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
477 rtx insns;
478 rtx lowpart;
479 rtx fill_value;
480 rtx lowfrom;
481 int i;
482 enum machine_mode lowpart_mode;
483 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
485 /* Try converting directly if the insn is supported. */
486 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
487 != CODE_FOR_nothing)
489 /* If FROM is a SUBREG, put it into a register. Do this
490 so that we always generate the same set of insns for
491 better cse'ing; if an intermediate assignment occurred,
492 we won't be doing the operation directly on the SUBREG. */
493 if (optimize > 0 && GET_CODE (from) == SUBREG)
494 from = force_reg (from_mode, from);
495 emit_unop_insn (code, to, from, equiv_code);
496 return;
498 /* Next, try converting via full word. */
499 else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
500 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
501 != CODE_FOR_nothing))
503 rtx word_to = gen_reg_rtx (word_mode);
504 if (REG_P (to))
506 if (reg_overlap_mentioned_p (to, from))
507 from = force_reg (from_mode, from);
508 emit_clobber (to);
510 convert_move (word_to, from, unsignedp);
511 emit_unop_insn (code, to, word_to, equiv_code);
512 return;
515 /* No special multiword conversion insn; do it by hand. */
516 start_sequence ();
518 /* Since we will turn this into a no conflict block, we must ensure the
519 the source does not overlap the target so force it into an isolated
520 register when maybe so. Likewise for any MEM input, since the
521 conversion sequence might require several references to it and we
522 must ensure we're getting the same value every time. */
524 if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
525 from = force_reg (from_mode, from);
527 /* Get a copy of FROM widened to a word, if necessary. */
528 if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
529 lowpart_mode = word_mode;
530 else
531 lowpart_mode = from_mode;
533 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
535 lowpart = gen_lowpart (lowpart_mode, to);
536 emit_move_insn (lowpart, lowfrom);
538 /* Compute the value to put in each remaining word. */
539 if (unsignedp)
540 fill_value = const0_rtx;
541 else
542 fill_value = emit_store_flag (gen_reg_rtx (word_mode),
543 LT, lowfrom, const0_rtx,
544 VOIDmode, 0, -1);
546 /* Fill the remaining words. */
547 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
549 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
550 rtx subword = operand_subword (to, index, 1, to_mode);
552 gcc_assert (subword);
554 if (fill_value != subword)
555 emit_move_insn (subword, fill_value);
558 insns = get_insns ();
559 end_sequence ();
561 emit_insn (insns);
562 return;
565 /* Truncating multi-word to a word or less. */
566 if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
567 && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
569 if (!((MEM_P (from)
570 && ! MEM_VOLATILE_P (from)
571 && direct_load[(int) to_mode]
572 && ! mode_dependent_address_p (XEXP (from, 0),
573 MEM_ADDR_SPACE (from)))
574 || REG_P (from)
575 || GET_CODE (from) == SUBREG))
576 from = force_reg (from_mode, from);
577 convert_move (to, gen_lowpart (word_mode, from), 0);
578 return;
581 /* Now follow all the conversions between integers
582 no more than a word long. */
584 /* For truncation, usually we can just refer to FROM in a narrower mode. */
585 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
586 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
588 if (!((MEM_P (from)
589 && ! MEM_VOLATILE_P (from)
590 && direct_load[(int) to_mode]
591 && ! mode_dependent_address_p (XEXP (from, 0),
592 MEM_ADDR_SPACE (from)))
593 || REG_P (from)
594 || GET_CODE (from) == SUBREG))
595 from = force_reg (from_mode, from);
596 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
597 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
598 from = copy_to_reg (from);
599 emit_move_insn (to, gen_lowpart (to_mode, from));
600 return;
603 /* Handle extension. */
604 if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
606 /* Convert directly if that works. */
607 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
608 != CODE_FOR_nothing)
610 emit_unop_insn (code, to, from, equiv_code);
611 return;
613 else
615 enum machine_mode intermediate;
616 rtx tmp;
617 int shift_amount;
619 /* Search for a mode to convert via. */
620 for (intermediate = from_mode; intermediate != VOIDmode;
621 intermediate = GET_MODE_WIDER_MODE (intermediate))
622 if (((can_extend_p (to_mode, intermediate, unsignedp)
623 != CODE_FOR_nothing)
624 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
625 && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
626 && (can_extend_p (intermediate, from_mode, unsignedp)
627 != CODE_FOR_nothing))
629 convert_move (to, convert_to_mode (intermediate, from,
630 unsignedp), unsignedp);
631 return;
634 /* No suitable intermediate mode.
635 Generate what we need with shifts. */
636 shift_amount = (GET_MODE_PRECISION (to_mode)
637 - GET_MODE_PRECISION (from_mode));
638 from = gen_lowpart (to_mode, force_reg (from_mode, from));
639 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
640 to, unsignedp);
641 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
642 to, unsignedp);
643 if (tmp != to)
644 emit_move_insn (to, tmp);
645 return;
649 /* Support special truncate insns for certain modes. */
650 if (convert_optab_handler (trunc_optab, to_mode,
651 from_mode) != CODE_FOR_nothing)
653 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
654 to, from, UNKNOWN);
655 return;
658 /* Handle truncation of volatile memrefs, and so on;
659 the things that couldn't be truncated directly,
660 and for which there was no special instruction.
662 ??? Code above formerly short-circuited this, for most integer
663 mode pairs, with a force_reg in from_mode followed by a recursive
664 call to this routine. Appears always to have been wrong. */
665 if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
667 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
668 emit_move_insn (to, temp);
669 return;
672 /* Mode combination is not recognized. */
673 gcc_unreachable ();
676 /* Return an rtx for a value that would result
677 from converting X to mode MODE.
678 Both X and MODE may be floating, or both integer.
679 UNSIGNEDP is nonzero if X is an unsigned value.
680 This can be done by referring to a part of X in place
681 or by copying to a new temporary with conversion. */
684 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
686 return convert_modes (mode, VOIDmode, x, unsignedp);
689 /* Return an rtx for a value that would result
690 from converting X from mode OLDMODE to mode MODE.
691 Both modes may be floating, or both integer.
692 UNSIGNEDP is nonzero if X is an unsigned value.
694 This can be done by referring to a part of X in place
695 or by copying to a new temporary with conversion.
697 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
700 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
702 rtx temp;
704 /* If FROM is a SUBREG that indicates that we have already done at least
705 the required extension, strip it. */
707 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
708 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
709 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
710 x = gen_lowpart (mode, x);
712 if (GET_MODE (x) != VOIDmode)
713 oldmode = GET_MODE (x);
715 if (mode == oldmode)
716 return x;
718 /* There is one case that we must handle specially: If we are converting
719 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
720 we are to interpret the constant as unsigned, gen_lowpart will do
721 the wrong if the constant appears negative. What we want to do is
722 make the high-order word of the constant zero, not all ones. */
724 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
725 && GET_MODE_BITSIZE (mode) == HOST_BITS_PER_DOUBLE_INT
726 && CONST_INT_P (x) && INTVAL (x) < 0)
728 double_int val = double_int::from_uhwi (INTVAL (x));
730 /* We need to zero extend VAL. */
731 if (oldmode != VOIDmode)
732 val = val.zext (GET_MODE_BITSIZE (oldmode));
734 return immed_double_int_const (val, mode);
737 /* We can do this with a gen_lowpart if both desired and current modes
738 are integer, and this is either a constant integer, a register, or a
739 non-volatile MEM. Except for the constant case where MODE is no
740 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
742 if ((CONST_INT_P (x)
743 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT)
744 || (GET_MODE_CLASS (mode) == MODE_INT
745 && GET_MODE_CLASS (oldmode) == MODE_INT
746 && (CONST_DOUBLE_AS_INT_P (x)
747 || (GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
748 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
749 && direct_load[(int) mode])
750 || (REG_P (x)
751 && (! HARD_REGISTER_P (x)
752 || HARD_REGNO_MODE_OK (REGNO (x), mode))
753 && TRULY_NOOP_TRUNCATION_MODES_P (mode,
754 GET_MODE (x))))))))
756 /* ?? If we don't know OLDMODE, we have to assume here that
757 X does not need sign- or zero-extension. This may not be
758 the case, but it's the best we can do. */
759 if (CONST_INT_P (x) && oldmode != VOIDmode
760 && GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (oldmode))
762 HOST_WIDE_INT val = INTVAL (x);
764 /* We must sign or zero-extend in this case. Start by
765 zero-extending, then sign extend if we need to. */
766 val &= GET_MODE_MASK (oldmode);
767 if (! unsignedp
768 && val_signbit_known_set_p (oldmode, val))
769 val |= ~GET_MODE_MASK (oldmode);
771 return gen_int_mode (val, mode);
774 return gen_lowpart (mode, x);
777 /* Converting from integer constant into mode is always equivalent to an
778 subreg operation. */
779 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
781 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
782 return simplify_gen_subreg (mode, x, oldmode, 0);
785 temp = gen_reg_rtx (mode);
786 convert_move (temp, x, unsignedp);
787 return temp;
790 /* Return the largest alignment we can use for doing a move (or store)
791 of MAX_PIECES. ALIGN is the largest alignment we could use. */
793 static unsigned int
794 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
796 enum machine_mode tmode;
798 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
799 if (align >= GET_MODE_ALIGNMENT (tmode))
800 align = GET_MODE_ALIGNMENT (tmode);
801 else
803 enum machine_mode tmode, xmode;
805 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
806 tmode != VOIDmode;
807 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
808 if (GET_MODE_SIZE (tmode) > max_pieces
809 || SLOW_UNALIGNED_ACCESS (tmode, align))
810 break;
812 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
815 return align;
818 /* Return the widest integer mode no wider than SIZE. If no such mode
819 can be found, return VOIDmode. */
821 static enum machine_mode
822 widest_int_mode_for_size (unsigned int size)
824 enum machine_mode tmode, mode = VOIDmode;
826 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
827 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
828 if (GET_MODE_SIZE (tmode) < size)
829 mode = tmode;
831 return mode;
834 /* STORE_MAX_PIECES is the number of bytes at a time that we can
835 store efficiently. Due to internal GCC limitations, this is
836 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
837 for an immediate constant. */
839 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
841 /* Determine whether the LEN bytes can be moved by using several move
842 instructions. Return nonzero if a call to move_by_pieces should
843 succeed. */
846 can_move_by_pieces (unsigned HOST_WIDE_INT len ATTRIBUTE_UNUSED,
847 unsigned int align ATTRIBUTE_UNUSED)
849 return MOVE_BY_PIECES_P (len, align);
852 /* Generate several move instructions to copy LEN bytes from block FROM to
853 block TO. (These are MEM rtx's with BLKmode).
855 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
856 used to push FROM to the stack.
858 ALIGN is maximum stack alignment we can assume.
860 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
861 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
862 stpcpy. */
865 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
866 unsigned int align, int endp)
868 struct move_by_pieces_d data;
869 enum machine_mode to_addr_mode;
870 enum machine_mode from_addr_mode = get_address_mode (from);
871 rtx to_addr, from_addr = XEXP (from, 0);
872 unsigned int max_size = MOVE_MAX_PIECES + 1;
873 enum insn_code icode;
875 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
877 data.offset = 0;
878 data.from_addr = from_addr;
879 if (to)
881 to_addr_mode = get_address_mode (to);
882 to_addr = XEXP (to, 0);
883 data.to = to;
884 data.autinc_to
885 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
886 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
887 data.reverse
888 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
890 else
892 to_addr_mode = VOIDmode;
893 to_addr = NULL_RTX;
894 data.to = NULL_RTX;
895 data.autinc_to = 1;
896 #ifdef STACK_GROWS_DOWNWARD
897 data.reverse = 1;
898 #else
899 data.reverse = 0;
900 #endif
902 data.to_addr = to_addr;
903 data.from = from;
904 data.autinc_from
905 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
906 || GET_CODE (from_addr) == POST_INC
907 || GET_CODE (from_addr) == POST_DEC);
909 data.explicit_inc_from = 0;
910 data.explicit_inc_to = 0;
911 if (data.reverse) data.offset = len;
912 data.len = len;
914 /* If copying requires more than two move insns,
915 copy addresses to registers (to make displacements shorter)
916 and use post-increment if available. */
917 if (!(data.autinc_from && data.autinc_to)
918 && move_by_pieces_ninsns (len, align, max_size) > 2)
920 /* Find the mode of the largest move...
921 MODE might not be used depending on the definitions of the
922 USE_* macros below. */
923 enum machine_mode mode ATTRIBUTE_UNUSED
924 = widest_int_mode_for_size (max_size);
926 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
928 data.from_addr = copy_to_mode_reg (from_addr_mode,
929 plus_constant (from_addr_mode,
930 from_addr, len));
931 data.autinc_from = 1;
932 data.explicit_inc_from = -1;
934 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
936 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
937 data.autinc_from = 1;
938 data.explicit_inc_from = 1;
940 if (!data.autinc_from && CONSTANT_P (from_addr))
941 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
942 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
944 data.to_addr = copy_to_mode_reg (to_addr_mode,
945 plus_constant (to_addr_mode,
946 to_addr, len));
947 data.autinc_to = 1;
948 data.explicit_inc_to = -1;
950 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
952 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
953 data.autinc_to = 1;
954 data.explicit_inc_to = 1;
956 if (!data.autinc_to && CONSTANT_P (to_addr))
957 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
960 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
962 /* First move what we can in the largest integer mode, then go to
963 successively smaller modes. */
965 while (max_size > 1 && data.len > 0)
967 enum machine_mode mode = widest_int_mode_for_size (max_size);
969 if (mode == VOIDmode)
970 break;
972 icode = optab_handler (mov_optab, mode);
973 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
974 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
976 max_size = GET_MODE_SIZE (mode);
979 /* The code above should have handled everything. */
980 gcc_assert (!data.len);
982 if (endp)
984 rtx to1;
986 gcc_assert (!data.reverse);
987 if (data.autinc_to)
989 if (endp == 2)
991 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
992 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
993 else
994 data.to_addr = copy_to_mode_reg (to_addr_mode,
995 plus_constant (to_addr_mode,
996 data.to_addr,
997 -1));
999 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1000 data.offset);
1002 else
1004 if (endp == 2)
1005 --data.offset;
1006 to1 = adjust_address (data.to, QImode, data.offset);
1008 return to1;
1010 else
1011 return data.to;
1014 /* Return number of insns required to move L bytes by pieces.
1015 ALIGN (in bits) is maximum alignment we can assume. */
1017 unsigned HOST_WIDE_INT
1018 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1019 unsigned int max_size)
1021 unsigned HOST_WIDE_INT n_insns = 0;
1023 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1025 while (max_size > 1 && l > 0)
1027 enum machine_mode mode;
1028 enum insn_code icode;
1030 mode = widest_int_mode_for_size (max_size);
1032 if (mode == VOIDmode)
1033 break;
1035 icode = optab_handler (mov_optab, mode);
1036 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1037 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1039 max_size = GET_MODE_SIZE (mode);
1042 gcc_assert (!l);
1043 return n_insns;
1046 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1047 with move instructions for mode MODE. GENFUN is the gen_... function
1048 to make a move insn for that mode. DATA has all the other info. */
1050 static void
1051 move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
1052 struct move_by_pieces_d *data)
1054 unsigned int size = GET_MODE_SIZE (mode);
1055 rtx to1 = NULL_RTX, from1;
1057 while (data->len >= size)
1059 if (data->reverse)
1060 data->offset -= size;
1062 if (data->to)
1064 if (data->autinc_to)
1065 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1066 data->offset);
1067 else
1068 to1 = adjust_address (data->to, mode, data->offset);
1071 if (data->autinc_from)
1072 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1073 data->offset);
1074 else
1075 from1 = adjust_address (data->from, mode, data->offset);
1077 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1078 emit_insn (gen_add2_insn (data->to_addr,
1079 gen_int_mode (-(HOST_WIDE_INT) size,
1080 GET_MODE (data->to_addr))));
1081 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1082 emit_insn (gen_add2_insn (data->from_addr,
1083 gen_int_mode (-(HOST_WIDE_INT) size,
1084 GET_MODE (data->from_addr))));
1086 if (data->to)
1087 emit_insn ((*genfun) (to1, from1));
1088 else
1090 #ifdef PUSH_ROUNDING
1091 emit_single_push_insn (mode, from1, NULL);
1092 #else
1093 gcc_unreachable ();
1094 #endif
1097 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1098 emit_insn (gen_add2_insn (data->to_addr,
1099 gen_int_mode (size,
1100 GET_MODE (data->to_addr))));
1101 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1102 emit_insn (gen_add2_insn (data->from_addr,
1103 gen_int_mode (size,
1104 GET_MODE (data->from_addr))));
1106 if (! data->reverse)
1107 data->offset += size;
1109 data->len -= size;
1113 /* Emit code to move a block Y to a block X. This may be done with
1114 string-move instructions, with multiple scalar move instructions,
1115 or with a library call.
1117 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1118 SIZE is an rtx that says how long they are.
1119 ALIGN is the maximum alignment we can assume they have.
1120 METHOD describes what kind of copy this is, and what mechanisms may be used.
1122 Return the address of the new block, if memcpy is called and returns it,
1123 0 otherwise. */
1126 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1127 unsigned int expected_align, HOST_WIDE_INT expected_size)
1129 bool may_use_call;
1130 rtx retval = 0;
1131 unsigned int align;
1133 gcc_assert (size);
1134 if (CONST_INT_P (size)
1135 && INTVAL (size) == 0)
1136 return 0;
1138 switch (method)
1140 case BLOCK_OP_NORMAL:
1141 case BLOCK_OP_TAILCALL:
1142 may_use_call = true;
1143 break;
1145 case BLOCK_OP_CALL_PARM:
1146 may_use_call = block_move_libcall_safe_for_call_parm ();
1148 /* Make inhibit_defer_pop nonzero around the library call
1149 to force it to pop the arguments right away. */
1150 NO_DEFER_POP;
1151 break;
1153 case BLOCK_OP_NO_LIBCALL:
1154 may_use_call = false;
1155 break;
1157 default:
1158 gcc_unreachable ();
1161 gcc_assert (MEM_P (x) && MEM_P (y));
1162 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1163 gcc_assert (align >= BITS_PER_UNIT);
1165 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1166 block copy is more efficient for other large modes, e.g. DCmode. */
1167 x = adjust_address (x, BLKmode, 0);
1168 y = adjust_address (y, BLKmode, 0);
1170 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1171 can be incorrect is coming from __builtin_memcpy. */
1172 if (CONST_INT_P (size))
1174 x = shallow_copy_rtx (x);
1175 y = shallow_copy_rtx (y);
1176 set_mem_size (x, INTVAL (size));
1177 set_mem_size (y, INTVAL (size));
1180 if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1181 move_by_pieces (x, y, INTVAL (size), align, 0);
1182 else if (emit_block_move_via_movmem (x, y, size, align,
1183 expected_align, expected_size))
1185 else if (may_use_call
1186 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1187 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1189 /* Since x and y are passed to a libcall, mark the corresponding
1190 tree EXPR as addressable. */
1191 tree y_expr = MEM_EXPR (y);
1192 tree x_expr = MEM_EXPR (x);
1193 if (y_expr)
1194 mark_addressable (y_expr);
1195 if (x_expr)
1196 mark_addressable (x_expr);
1197 retval = emit_block_move_via_libcall (x, y, size,
1198 method == BLOCK_OP_TAILCALL);
1201 else
1202 emit_block_move_via_loop (x, y, size, align);
1204 if (method == BLOCK_OP_CALL_PARM)
1205 OK_DEFER_POP;
1207 return retval;
1211 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1213 return emit_block_move_hints (x, y, size, method, 0, -1);
1216 /* A subroutine of emit_block_move. Returns true if calling the
1217 block move libcall will not clobber any parameters which may have
1218 already been placed on the stack. */
1220 static bool
1221 block_move_libcall_safe_for_call_parm (void)
1223 #if defined (REG_PARM_STACK_SPACE)
1224 tree fn;
1225 #endif
1227 /* If arguments are pushed on the stack, then they're safe. */
1228 if (PUSH_ARGS)
1229 return true;
1231 /* If registers go on the stack anyway, any argument is sure to clobber
1232 an outgoing argument. */
1233 #if defined (REG_PARM_STACK_SPACE)
1234 fn = emit_block_move_libcall_fn (false);
1235 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1236 depend on its argument. */
1237 (void) fn;
1238 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1239 && REG_PARM_STACK_SPACE (fn) != 0)
1240 return false;
1241 #endif
1243 /* If any argument goes in memory, then it might clobber an outgoing
1244 argument. */
1246 CUMULATIVE_ARGS args_so_far_v;
1247 cumulative_args_t args_so_far;
1248 tree fn, arg;
1250 fn = emit_block_move_libcall_fn (false);
1251 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1252 args_so_far = pack_cumulative_args (&args_so_far_v);
1254 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1255 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1257 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1258 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1259 NULL_TREE, true);
1260 if (!tmp || !REG_P (tmp))
1261 return false;
1262 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1263 return false;
1264 targetm.calls.function_arg_advance (args_so_far, mode,
1265 NULL_TREE, true);
1268 return true;
1271 /* A subroutine of emit_block_move. Expand a movmem pattern;
1272 return true if successful. */
1274 static bool
1275 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1276 unsigned int expected_align, HOST_WIDE_INT expected_size)
1278 int save_volatile_ok = volatile_ok;
1279 enum machine_mode mode;
1281 if (expected_align < align)
1282 expected_align = align;
1284 /* Since this is a move insn, we don't care about volatility. */
1285 volatile_ok = 1;
1287 /* Try the most limited insn first, because there's no point
1288 including more than one in the machine description unless
1289 the more limited one has some advantage. */
1291 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1292 mode = GET_MODE_WIDER_MODE (mode))
1294 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1296 if (code != CODE_FOR_nothing
1297 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1298 here because if SIZE is less than the mode mask, as it is
1299 returned by the macro, it will definitely be less than the
1300 actual mode mask. Since SIZE is within the Pmode address
1301 space, we limit MODE to Pmode. */
1302 && ((CONST_INT_P (size)
1303 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1304 <= (GET_MODE_MASK (mode) >> 1)))
1305 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1307 struct expand_operand ops[6];
1308 unsigned int nops;
1310 /* ??? When called via emit_block_move_for_call, it'd be
1311 nice if there were some way to inform the backend, so
1312 that it doesn't fail the expansion because it thinks
1313 emitting the libcall would be more efficient. */
1314 nops = insn_data[(int) code].n_generator_args;
1315 gcc_assert (nops == 4 || nops == 6);
1317 create_fixed_operand (&ops[0], x);
1318 create_fixed_operand (&ops[1], y);
1319 /* The check above guarantees that this size conversion is valid. */
1320 create_convert_operand_to (&ops[2], size, mode, true);
1321 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1322 if (nops == 6)
1324 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1325 create_integer_operand (&ops[5], expected_size);
1327 if (maybe_expand_insn (code, nops, ops))
1329 volatile_ok = save_volatile_ok;
1330 return true;
1335 volatile_ok = save_volatile_ok;
1336 return false;
1339 /* A subroutine of emit_block_move. Expand a call to memcpy.
1340 Return the return value from memcpy, 0 otherwise. */
1343 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1345 rtx dst_addr, src_addr;
1346 tree call_expr, fn, src_tree, dst_tree, size_tree;
1347 enum machine_mode size_mode;
1348 rtx retval;
1350 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1351 pseudos. We can then place those new pseudos into a VAR_DECL and
1352 use them later. */
1354 dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1355 src_addr = copy_addr_to_reg (XEXP (src, 0));
1357 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1358 src_addr = convert_memory_address (ptr_mode, src_addr);
1360 dst_tree = make_tree (ptr_type_node, dst_addr);
1361 src_tree = make_tree (ptr_type_node, src_addr);
1363 size_mode = TYPE_MODE (sizetype);
1365 size = convert_to_mode (size_mode, size, 1);
1366 size = copy_to_mode_reg (size_mode, size);
1368 /* It is incorrect to use the libcall calling conventions to call
1369 memcpy in this context. This could be a user call to memcpy and
1370 the user may wish to examine the return value from memcpy. For
1371 targets where libcalls and normal calls have different conventions
1372 for returning pointers, we could end up generating incorrect code. */
1374 size_tree = make_tree (sizetype, size);
1376 fn = emit_block_move_libcall_fn (true);
1377 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1378 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1380 retval = expand_normal (call_expr);
1382 return retval;
1385 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1386 for the function we use for block copies. */
1388 static GTY(()) tree block_move_fn;
1390 void
1391 init_block_move_fn (const char *asmspec)
1393 if (!block_move_fn)
1395 tree args, fn, attrs, attr_args;
1397 fn = get_identifier ("memcpy");
1398 args = build_function_type_list (ptr_type_node, ptr_type_node,
1399 const_ptr_type_node, sizetype,
1400 NULL_TREE);
1402 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1403 DECL_EXTERNAL (fn) = 1;
1404 TREE_PUBLIC (fn) = 1;
1405 DECL_ARTIFICIAL (fn) = 1;
1406 TREE_NOTHROW (fn) = 1;
1407 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1408 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1410 attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1411 attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1413 decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1415 block_move_fn = fn;
1418 if (asmspec)
1419 set_user_assembler_name (block_move_fn, asmspec);
1422 static tree
1423 emit_block_move_libcall_fn (int for_call)
1425 static bool emitted_extern;
1427 if (!block_move_fn)
1428 init_block_move_fn (NULL);
1430 if (for_call && !emitted_extern)
1432 emitted_extern = true;
1433 make_decl_rtl (block_move_fn);
1436 return block_move_fn;
1439 /* A subroutine of emit_block_move. Copy the data via an explicit
1440 loop. This is used only when libcalls are forbidden. */
1441 /* ??? It'd be nice to copy in hunks larger than QImode. */
1443 static void
1444 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1445 unsigned int align ATTRIBUTE_UNUSED)
1447 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1448 enum machine_mode x_addr_mode = get_address_mode (x);
1449 enum machine_mode y_addr_mode = get_address_mode (y);
1450 enum machine_mode iter_mode;
1452 iter_mode = GET_MODE (size);
1453 if (iter_mode == VOIDmode)
1454 iter_mode = word_mode;
1456 top_label = gen_label_rtx ();
1457 cmp_label = gen_label_rtx ();
1458 iter = gen_reg_rtx (iter_mode);
1460 emit_move_insn (iter, const0_rtx);
1462 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1463 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1464 do_pending_stack_adjust ();
1466 emit_jump (cmp_label);
1467 emit_label (top_label);
1469 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1470 x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1472 if (x_addr_mode != y_addr_mode)
1473 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1474 y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1476 x = change_address (x, QImode, x_addr);
1477 y = change_address (y, QImode, y_addr);
1479 emit_move_insn (x, y);
1481 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1482 true, OPTAB_LIB_WIDEN);
1483 if (tmp != iter)
1484 emit_move_insn (iter, tmp);
1486 emit_label (cmp_label);
1488 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1489 true, top_label, REG_BR_PROB_BASE * 90 / 100);
1492 /* Copy all or part of a value X into registers starting at REGNO.
1493 The number of registers to be filled is NREGS. */
1495 void
1496 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1498 int i;
1499 #ifdef HAVE_load_multiple
1500 rtx pat;
1501 rtx last;
1502 #endif
1504 if (nregs == 0)
1505 return;
1507 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1508 x = validize_mem (force_const_mem (mode, x));
1510 /* See if the machine can do this with a load multiple insn. */
1511 #ifdef HAVE_load_multiple
1512 if (HAVE_load_multiple)
1514 last = get_last_insn ();
1515 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1516 GEN_INT (nregs));
1517 if (pat)
1519 emit_insn (pat);
1520 return;
1522 else
1523 delete_insns_since (last);
1525 #endif
1527 for (i = 0; i < nregs; i++)
1528 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1529 operand_subword_force (x, i, mode));
1532 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1533 The number of registers to be filled is NREGS. */
1535 void
1536 move_block_from_reg (int regno, rtx x, int nregs)
1538 int i;
1540 if (nregs == 0)
1541 return;
1543 /* See if the machine can do this with a store multiple insn. */
1544 #ifdef HAVE_store_multiple
1545 if (HAVE_store_multiple)
1547 rtx last = get_last_insn ();
1548 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1549 GEN_INT (nregs));
1550 if (pat)
1552 emit_insn (pat);
1553 return;
1555 else
1556 delete_insns_since (last);
1558 #endif
1560 for (i = 0; i < nregs; i++)
1562 rtx tem = operand_subword (x, i, 1, BLKmode);
1564 gcc_assert (tem);
1566 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1570 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1571 ORIG, where ORIG is a non-consecutive group of registers represented by
1572 a PARALLEL. The clone is identical to the original except in that the
1573 original set of registers is replaced by a new set of pseudo registers.
1574 The new set has the same modes as the original set. */
1577 gen_group_rtx (rtx orig)
1579 int i, length;
1580 rtx *tmps;
1582 gcc_assert (GET_CODE (orig) == PARALLEL);
1584 length = XVECLEN (orig, 0);
1585 tmps = XALLOCAVEC (rtx, length);
1587 /* Skip a NULL entry in first slot. */
1588 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1590 if (i)
1591 tmps[0] = 0;
1593 for (; i < length; i++)
1595 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1596 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1598 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1601 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1604 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1605 except that values are placed in TMPS[i], and must later be moved
1606 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1608 static void
1609 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1611 rtx src;
1612 int start, i;
1613 enum machine_mode m = GET_MODE (orig_src);
1615 gcc_assert (GET_CODE (dst) == PARALLEL);
1617 if (m != VOIDmode
1618 && !SCALAR_INT_MODE_P (m)
1619 && !MEM_P (orig_src)
1620 && GET_CODE (orig_src) != CONCAT)
1622 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1623 if (imode == BLKmode)
1624 src = assign_stack_temp (GET_MODE (orig_src), ssize);
1625 else
1626 src = gen_reg_rtx (imode);
1627 if (imode != BLKmode)
1628 src = gen_lowpart (GET_MODE (orig_src), src);
1629 emit_move_insn (src, orig_src);
1630 /* ...and back again. */
1631 if (imode != BLKmode)
1632 src = gen_lowpart (imode, src);
1633 emit_group_load_1 (tmps, dst, src, type, ssize);
1634 return;
1637 /* Check for a NULL entry, used to indicate that the parameter goes
1638 both on the stack and in registers. */
1639 if (XEXP (XVECEXP (dst, 0, 0), 0))
1640 start = 0;
1641 else
1642 start = 1;
1644 /* Process the pieces. */
1645 for (i = start; i < XVECLEN (dst, 0); i++)
1647 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1648 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1649 unsigned int bytelen = GET_MODE_SIZE (mode);
1650 int shift = 0;
1652 /* Handle trailing fragments that run over the size of the struct. */
1653 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1655 /* Arrange to shift the fragment to where it belongs.
1656 extract_bit_field loads to the lsb of the reg. */
1657 if (
1658 #ifdef BLOCK_REG_PADDING
1659 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1660 == (BYTES_BIG_ENDIAN ? upward : downward)
1661 #else
1662 BYTES_BIG_ENDIAN
1663 #endif
1665 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1666 bytelen = ssize - bytepos;
1667 gcc_assert (bytelen > 0);
1670 /* If we won't be loading directly from memory, protect the real source
1671 from strange tricks we might play; but make sure that the source can
1672 be loaded directly into the destination. */
1673 src = orig_src;
1674 if (!MEM_P (orig_src)
1675 && (!CONSTANT_P (orig_src)
1676 || (GET_MODE (orig_src) != mode
1677 && GET_MODE (orig_src) != VOIDmode)))
1679 if (GET_MODE (orig_src) == VOIDmode)
1680 src = gen_reg_rtx (mode);
1681 else
1682 src = gen_reg_rtx (GET_MODE (orig_src));
1684 emit_move_insn (src, orig_src);
1687 /* Optimize the access just a bit. */
1688 if (MEM_P (src)
1689 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1690 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1691 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1692 && bytelen == GET_MODE_SIZE (mode))
1694 tmps[i] = gen_reg_rtx (mode);
1695 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1697 else if (COMPLEX_MODE_P (mode)
1698 && GET_MODE (src) == mode
1699 && bytelen == GET_MODE_SIZE (mode))
1700 /* Let emit_move_complex do the bulk of the work. */
1701 tmps[i] = src;
1702 else if (GET_CODE (src) == CONCAT)
1704 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1705 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1707 if ((bytepos == 0 && bytelen == slen0)
1708 || (bytepos != 0 && bytepos + bytelen <= slen))
1710 /* The following assumes that the concatenated objects all
1711 have the same size. In this case, a simple calculation
1712 can be used to determine the object and the bit field
1713 to be extracted. */
1714 tmps[i] = XEXP (src, bytepos / slen0);
1715 if (! CONSTANT_P (tmps[i])
1716 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1717 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1718 (bytepos % slen0) * BITS_PER_UNIT,
1719 1, NULL_RTX, mode, mode);
1721 else
1723 rtx mem;
1725 gcc_assert (!bytepos);
1726 mem = assign_stack_temp (GET_MODE (src), slen);
1727 emit_move_insn (mem, src);
1728 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1729 0, 1, NULL_RTX, mode, mode);
1732 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1733 SIMD register, which is currently broken. While we get GCC
1734 to emit proper RTL for these cases, let's dump to memory. */
1735 else if (VECTOR_MODE_P (GET_MODE (dst))
1736 && REG_P (src))
1738 int slen = GET_MODE_SIZE (GET_MODE (src));
1739 rtx mem;
1741 mem = assign_stack_temp (GET_MODE (src), slen);
1742 emit_move_insn (mem, src);
1743 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1745 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1746 && XVECLEN (dst, 0) > 1)
1747 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
1748 else if (CONSTANT_P (src))
1750 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1752 if (len == ssize)
1753 tmps[i] = src;
1754 else
1756 rtx first, second;
1758 gcc_assert (2 * len == ssize);
1759 split_double (src, &first, &second);
1760 if (i)
1761 tmps[i] = second;
1762 else
1763 tmps[i] = first;
1766 else if (REG_P (src) && GET_MODE (src) == mode)
1767 tmps[i] = src;
1768 else
1769 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1770 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1771 mode, mode);
1773 if (shift)
1774 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1775 shift, tmps[i], 0);
1779 /* Emit code to move a block SRC of type TYPE to a block DST,
1780 where DST is non-consecutive registers represented by a PARALLEL.
1781 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1782 if not known. */
1784 void
1785 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1787 rtx *tmps;
1788 int i;
1790 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1791 emit_group_load_1 (tmps, dst, src, type, ssize);
1793 /* Copy the extracted pieces into the proper (probable) hard regs. */
1794 for (i = 0; i < XVECLEN (dst, 0); i++)
1796 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1797 if (d == NULL)
1798 continue;
1799 emit_move_insn (d, tmps[i]);
1803 /* Similar, but load SRC into new pseudos in a format that looks like
1804 PARALLEL. This can later be fed to emit_group_move to get things
1805 in the right place. */
1808 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1810 rtvec vec;
1811 int i;
1813 vec = rtvec_alloc (XVECLEN (parallel, 0));
1814 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1816 /* Convert the vector to look just like the original PARALLEL, except
1817 with the computed values. */
1818 for (i = 0; i < XVECLEN (parallel, 0); i++)
1820 rtx e = XVECEXP (parallel, 0, i);
1821 rtx d = XEXP (e, 0);
1823 if (d)
1825 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1826 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1828 RTVEC_ELT (vec, i) = e;
1831 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1834 /* Emit code to move a block SRC to block DST, where SRC and DST are
1835 non-consecutive groups of registers, each represented by a PARALLEL. */
1837 void
1838 emit_group_move (rtx dst, rtx src)
1840 int i;
1842 gcc_assert (GET_CODE (src) == PARALLEL
1843 && GET_CODE (dst) == PARALLEL
1844 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1846 /* Skip first entry if NULL. */
1847 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1848 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1849 XEXP (XVECEXP (src, 0, i), 0));
1852 /* Move a group of registers represented by a PARALLEL into pseudos. */
1855 emit_group_move_into_temps (rtx src)
1857 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1858 int i;
1860 for (i = 0; i < XVECLEN (src, 0); i++)
1862 rtx e = XVECEXP (src, 0, i);
1863 rtx d = XEXP (e, 0);
1865 if (d)
1866 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1867 RTVEC_ELT (vec, i) = e;
1870 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1873 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1874 where SRC is non-consecutive registers represented by a PARALLEL.
1875 SSIZE represents the total size of block ORIG_DST, or -1 if not
1876 known. */
1878 void
1879 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1881 rtx *tmps, dst;
1882 int start, finish, i;
1883 enum machine_mode m = GET_MODE (orig_dst);
1885 gcc_assert (GET_CODE (src) == PARALLEL);
1887 if (!SCALAR_INT_MODE_P (m)
1888 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1890 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1891 if (imode == BLKmode)
1892 dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1893 else
1894 dst = gen_reg_rtx (imode);
1895 emit_group_store (dst, src, type, ssize);
1896 if (imode != BLKmode)
1897 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1898 emit_move_insn (orig_dst, dst);
1899 return;
1902 /* Check for a NULL entry, used to indicate that the parameter goes
1903 both on the stack and in registers. */
1904 if (XEXP (XVECEXP (src, 0, 0), 0))
1905 start = 0;
1906 else
1907 start = 1;
1908 finish = XVECLEN (src, 0);
1910 tmps = XALLOCAVEC (rtx, finish);
1912 /* Copy the (probable) hard regs into pseudos. */
1913 for (i = start; i < finish; i++)
1915 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1916 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1918 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1919 emit_move_insn (tmps[i], reg);
1921 else
1922 tmps[i] = reg;
1925 /* If we won't be storing directly into memory, protect the real destination
1926 from strange tricks we might play. */
1927 dst = orig_dst;
1928 if (GET_CODE (dst) == PARALLEL)
1930 rtx temp;
1932 /* We can get a PARALLEL dst if there is a conditional expression in
1933 a return statement. In that case, the dst and src are the same,
1934 so no action is necessary. */
1935 if (rtx_equal_p (dst, src))
1936 return;
1938 /* It is unclear if we can ever reach here, but we may as well handle
1939 it. Allocate a temporary, and split this into a store/load to/from
1940 the temporary. */
1942 temp = assign_stack_temp (GET_MODE (dst), ssize);
1943 emit_group_store (temp, src, type, ssize);
1944 emit_group_load (dst, temp, type, ssize);
1945 return;
1947 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1949 enum machine_mode outer = GET_MODE (dst);
1950 enum machine_mode inner;
1951 HOST_WIDE_INT bytepos;
1952 bool done = false;
1953 rtx temp;
1955 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1956 dst = gen_reg_rtx (outer);
1958 /* Make life a bit easier for combine. */
1959 /* If the first element of the vector is the low part
1960 of the destination mode, use a paradoxical subreg to
1961 initialize the destination. */
1962 if (start < finish)
1964 inner = GET_MODE (tmps[start]);
1965 bytepos = subreg_lowpart_offset (inner, outer);
1966 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1968 temp = simplify_gen_subreg (outer, tmps[start],
1969 inner, 0);
1970 if (temp)
1972 emit_move_insn (dst, temp);
1973 done = true;
1974 start++;
1979 /* If the first element wasn't the low part, try the last. */
1980 if (!done
1981 && start < finish - 1)
1983 inner = GET_MODE (tmps[finish - 1]);
1984 bytepos = subreg_lowpart_offset (inner, outer);
1985 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1987 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1988 inner, 0);
1989 if (temp)
1991 emit_move_insn (dst, temp);
1992 done = true;
1993 finish--;
1998 /* Otherwise, simply initialize the result to zero. */
1999 if (!done)
2000 emit_move_insn (dst, CONST0_RTX (outer));
2003 /* Process the pieces. */
2004 for (i = start; i < finish; i++)
2006 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2007 enum machine_mode mode = GET_MODE (tmps[i]);
2008 unsigned int bytelen = GET_MODE_SIZE (mode);
2009 unsigned int adj_bytelen = bytelen;
2010 rtx dest = dst;
2012 /* Handle trailing fragments that run over the size of the struct. */
2013 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2014 adj_bytelen = ssize - bytepos;
2016 if (GET_CODE (dst) == CONCAT)
2018 if (bytepos + adj_bytelen
2019 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2020 dest = XEXP (dst, 0);
2021 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2023 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2024 dest = XEXP (dst, 1);
2026 else
2028 enum machine_mode dest_mode = GET_MODE (dest);
2029 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2031 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2033 if (GET_MODE_ALIGNMENT (dest_mode)
2034 >= GET_MODE_ALIGNMENT (tmp_mode))
2036 dest = assign_stack_temp (dest_mode,
2037 GET_MODE_SIZE (dest_mode));
2038 emit_move_insn (adjust_address (dest,
2039 tmp_mode,
2040 bytepos),
2041 tmps[i]);
2042 dst = dest;
2044 else
2046 dest = assign_stack_temp (tmp_mode,
2047 GET_MODE_SIZE (tmp_mode));
2048 emit_move_insn (dest, tmps[i]);
2049 dst = adjust_address (dest, dest_mode, bytepos);
2051 break;
2055 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2057 /* store_bit_field always takes its value from the lsb.
2058 Move the fragment to the lsb if it's not already there. */
2059 if (
2060 #ifdef BLOCK_REG_PADDING
2061 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2062 == (BYTES_BIG_ENDIAN ? upward : downward)
2063 #else
2064 BYTES_BIG_ENDIAN
2065 #endif
2068 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2069 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2070 shift, tmps[i], 0);
2072 bytelen = adj_bytelen;
2075 /* Optimize the access just a bit. */
2076 if (MEM_P (dest)
2077 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2078 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2079 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2080 && bytelen == GET_MODE_SIZE (mode))
2081 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2082 else
2083 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2084 0, 0, mode, tmps[i]);
2087 /* Copy from the pseudo into the (probable) hard reg. */
2088 if (orig_dst != dst)
2089 emit_move_insn (orig_dst, dst);
2092 /* Return a form of X that does not use a PARALLEL. TYPE is the type
2093 of the value stored in X. */
2096 maybe_emit_group_store (rtx x, tree type)
2098 enum machine_mode mode = TYPE_MODE (type);
2099 gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2100 if (GET_CODE (x) == PARALLEL)
2102 rtx result = gen_reg_rtx (mode);
2103 emit_group_store (result, x, type, int_size_in_bytes (type));
2104 return result;
2106 return x;
2109 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2111 This is used on targets that return BLKmode values in registers. */
2113 void
2114 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2116 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2117 rtx src = NULL, dst = NULL;
2118 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2119 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2120 enum machine_mode mode = GET_MODE (srcreg);
2121 enum machine_mode tmode = GET_MODE (target);
2122 enum machine_mode copy_mode;
2124 /* BLKmode registers created in the back-end shouldn't have survived. */
2125 gcc_assert (mode != BLKmode);
2127 /* If the structure doesn't take up a whole number of words, see whether
2128 SRCREG is padded on the left or on the right. If it's on the left,
2129 set PADDING_CORRECTION to the number of bits to skip.
2131 In most ABIs, the structure will be returned at the least end of
2132 the register, which translates to right padding on little-endian
2133 targets and left padding on big-endian targets. The opposite
2134 holds if the structure is returned at the most significant
2135 end of the register. */
2136 if (bytes % UNITS_PER_WORD != 0
2137 && (targetm.calls.return_in_msb (type)
2138 ? !BYTES_BIG_ENDIAN
2139 : BYTES_BIG_ENDIAN))
2140 padding_correction
2141 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2143 /* We can use a single move if we have an exact mode for the size. */
2144 else if (MEM_P (target)
2145 && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2146 || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2147 && bytes == GET_MODE_SIZE (mode))
2149 emit_move_insn (adjust_address (target, mode, 0), srcreg);
2150 return;
2153 /* And if we additionally have the same mode for a register. */
2154 else if (REG_P (target)
2155 && GET_MODE (target) == mode
2156 && bytes == GET_MODE_SIZE (mode))
2158 emit_move_insn (target, srcreg);
2159 return;
2162 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2163 into a new pseudo which is a full word. */
2164 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2166 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2167 mode = word_mode;
2170 /* Copy the structure BITSIZE bits at a time. If the target lives in
2171 memory, take care of not reading/writing past its end by selecting
2172 a copy mode suited to BITSIZE. This should always be possible given
2173 how it is computed.
2175 If the target lives in register, make sure not to select a copy mode
2176 larger than the mode of the register.
2178 We could probably emit more efficient code for machines which do not use
2179 strict alignment, but it doesn't seem worth the effort at the current
2180 time. */
2182 copy_mode = word_mode;
2183 if (MEM_P (target))
2185 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2186 if (mem_mode != BLKmode)
2187 copy_mode = mem_mode;
2189 else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2190 copy_mode = tmode;
2192 for (bitpos = 0, xbitpos = padding_correction;
2193 bitpos < bytes * BITS_PER_UNIT;
2194 bitpos += bitsize, xbitpos += bitsize)
2196 /* We need a new source operand each time xbitpos is on a
2197 word boundary and when xbitpos == padding_correction
2198 (the first time through). */
2199 if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2200 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2202 /* We need a new destination operand each time bitpos is on
2203 a word boundary. */
2204 if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2205 dst = target;
2206 else if (bitpos % BITS_PER_WORD == 0)
2207 dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2209 /* Use xbitpos for the source extraction (right justified) and
2210 bitpos for the destination store (left justified). */
2211 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2212 extract_bit_field (src, bitsize,
2213 xbitpos % BITS_PER_WORD, 1,
2214 NULL_RTX, copy_mode, copy_mode));
2218 /* Copy BLKmode value SRC into a register of mode MODE. Return the
2219 register if it contains any data, otherwise return null.
2221 This is used on targets that return BLKmode values in registers. */
2224 copy_blkmode_to_reg (enum machine_mode mode, tree src)
2226 int i, n_regs;
2227 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2228 unsigned int bitsize;
2229 rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2230 enum machine_mode dst_mode;
2232 gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2234 x = expand_normal (src);
2236 bytes = int_size_in_bytes (TREE_TYPE (src));
2237 if (bytes == 0)
2238 return NULL_RTX;
2240 /* If the structure doesn't take up a whole number of words, see
2241 whether the register value should be padded on the left or on
2242 the right. Set PADDING_CORRECTION to the number of padding
2243 bits needed on the left side.
2245 In most ABIs, the structure will be returned at the least end of
2246 the register, which translates to right padding on little-endian
2247 targets and left padding on big-endian targets. The opposite
2248 holds if the structure is returned at the most significant
2249 end of the register. */
2250 if (bytes % UNITS_PER_WORD != 0
2251 && (targetm.calls.return_in_msb (TREE_TYPE (src))
2252 ? !BYTES_BIG_ENDIAN
2253 : BYTES_BIG_ENDIAN))
2254 padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2255 * BITS_PER_UNIT));
2257 n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2258 dst_words = XALLOCAVEC (rtx, n_regs);
2259 bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2261 /* Copy the structure BITSIZE bits at a time. */
2262 for (bitpos = 0, xbitpos = padding_correction;
2263 bitpos < bytes * BITS_PER_UNIT;
2264 bitpos += bitsize, xbitpos += bitsize)
2266 /* We need a new destination pseudo each time xbitpos is
2267 on a word boundary and when xbitpos == padding_correction
2268 (the first time through). */
2269 if (xbitpos % BITS_PER_WORD == 0
2270 || xbitpos == padding_correction)
2272 /* Generate an appropriate register. */
2273 dst_word = gen_reg_rtx (word_mode);
2274 dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2276 /* Clear the destination before we move anything into it. */
2277 emit_move_insn (dst_word, CONST0_RTX (word_mode));
2280 /* We need a new source operand each time bitpos is on a word
2281 boundary. */
2282 if (bitpos % BITS_PER_WORD == 0)
2283 src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2285 /* Use bitpos for the source extraction (left justified) and
2286 xbitpos for the destination store (right justified). */
2287 store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2288 0, 0, word_mode,
2289 extract_bit_field (src_word, bitsize,
2290 bitpos % BITS_PER_WORD, 1,
2291 NULL_RTX, word_mode, word_mode));
2294 if (mode == BLKmode)
2296 /* Find the smallest integer mode large enough to hold the
2297 entire structure. */
2298 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2299 mode != VOIDmode;
2300 mode = GET_MODE_WIDER_MODE (mode))
2301 /* Have we found a large enough mode? */
2302 if (GET_MODE_SIZE (mode) >= bytes)
2303 break;
2305 /* A suitable mode should have been found. */
2306 gcc_assert (mode != VOIDmode);
2309 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2310 dst_mode = word_mode;
2311 else
2312 dst_mode = mode;
2313 dst = gen_reg_rtx (dst_mode);
2315 for (i = 0; i < n_regs; i++)
2316 emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2318 if (mode != dst_mode)
2319 dst = gen_lowpart (mode, dst);
2321 return dst;
2324 /* Add a USE expression for REG to the (possibly empty) list pointed
2325 to by CALL_FUSAGE. REG must denote a hard register. */
2327 void
2328 use_reg_mode (rtx *call_fusage, rtx reg, enum machine_mode mode)
2330 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2332 *call_fusage
2333 = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2336 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2337 starting at REGNO. All of these registers must be hard registers. */
2339 void
2340 use_regs (rtx *call_fusage, int regno, int nregs)
2342 int i;
2344 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2346 for (i = 0; i < nregs; i++)
2347 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2350 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2351 PARALLEL REGS. This is for calls that pass values in multiple
2352 non-contiguous locations. The Irix 6 ABI has examples of this. */
2354 void
2355 use_group_regs (rtx *call_fusage, rtx regs)
2357 int i;
2359 for (i = 0; i < XVECLEN (regs, 0); i++)
2361 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2363 /* A NULL entry means the parameter goes both on the stack and in
2364 registers. This can also be a MEM for targets that pass values
2365 partially on the stack and partially in registers. */
2366 if (reg != 0 && REG_P (reg))
2367 use_reg (call_fusage, reg);
2371 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2372 assigment and the code of the expresion on the RHS is CODE. Return
2373 NULL otherwise. */
2375 static gimple
2376 get_def_for_expr (tree name, enum tree_code code)
2378 gimple def_stmt;
2380 if (TREE_CODE (name) != SSA_NAME)
2381 return NULL;
2383 def_stmt = get_gimple_for_ssa_name (name);
2384 if (!def_stmt
2385 || gimple_assign_rhs_code (def_stmt) != code)
2386 return NULL;
2388 return def_stmt;
2391 #ifdef HAVE_conditional_move
2392 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2393 assigment and the class of the expresion on the RHS is CLASS. Return
2394 NULL otherwise. */
2396 static gimple
2397 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2399 gimple def_stmt;
2401 if (TREE_CODE (name) != SSA_NAME)
2402 return NULL;
2404 def_stmt = get_gimple_for_ssa_name (name);
2405 if (!def_stmt
2406 || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2407 return NULL;
2409 return def_stmt;
2411 #endif
2414 /* Determine whether the LEN bytes generated by CONSTFUN can be
2415 stored to memory using several move instructions. CONSTFUNDATA is
2416 a pointer which will be passed as argument in every CONSTFUN call.
2417 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2418 a memset operation and false if it's a copy of a constant string.
2419 Return nonzero if a call to store_by_pieces should succeed. */
2422 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2423 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2424 void *constfundata, unsigned int align, bool memsetp)
2426 unsigned HOST_WIDE_INT l;
2427 unsigned int max_size;
2428 HOST_WIDE_INT offset = 0;
2429 enum machine_mode mode;
2430 enum insn_code icode;
2431 int reverse;
2432 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2433 rtx cst ATTRIBUTE_UNUSED;
2435 if (len == 0)
2436 return 1;
2438 if (! (memsetp
2439 ? SET_BY_PIECES_P (len, align)
2440 : STORE_BY_PIECES_P (len, align)))
2441 return 0;
2443 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2445 /* We would first store what we can in the largest integer mode, then go to
2446 successively smaller modes. */
2448 for (reverse = 0;
2449 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2450 reverse++)
2452 l = len;
2453 max_size = STORE_MAX_PIECES + 1;
2454 while (max_size > 1 && l > 0)
2456 mode = widest_int_mode_for_size (max_size);
2458 if (mode == VOIDmode)
2459 break;
2461 icode = optab_handler (mov_optab, mode);
2462 if (icode != CODE_FOR_nothing
2463 && align >= GET_MODE_ALIGNMENT (mode))
2465 unsigned int size = GET_MODE_SIZE (mode);
2467 while (l >= size)
2469 if (reverse)
2470 offset -= size;
2472 cst = (*constfun) (constfundata, offset, mode);
2473 if (!targetm.legitimate_constant_p (mode, cst))
2474 return 0;
2476 if (!reverse)
2477 offset += size;
2479 l -= size;
2483 max_size = GET_MODE_SIZE (mode);
2486 /* The code above should have handled everything. */
2487 gcc_assert (!l);
2490 return 1;
2493 /* Generate several move instructions to store LEN bytes generated by
2494 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2495 pointer which will be passed as argument in every CONSTFUN call.
2496 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2497 a memset operation and false if it's a copy of a constant string.
2498 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2499 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2500 stpcpy. */
2503 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2504 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2505 void *constfundata, unsigned int align, bool memsetp, int endp)
2507 enum machine_mode to_addr_mode = get_address_mode (to);
2508 struct store_by_pieces_d data;
2510 if (len == 0)
2512 gcc_assert (endp != 2);
2513 return to;
2516 gcc_assert (memsetp
2517 ? SET_BY_PIECES_P (len, align)
2518 : STORE_BY_PIECES_P (len, align));
2519 data.constfun = constfun;
2520 data.constfundata = constfundata;
2521 data.len = len;
2522 data.to = to;
2523 store_by_pieces_1 (&data, align);
2524 if (endp)
2526 rtx to1;
2528 gcc_assert (!data.reverse);
2529 if (data.autinc_to)
2531 if (endp == 2)
2533 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2534 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2535 else
2536 data.to_addr = copy_to_mode_reg (to_addr_mode,
2537 plus_constant (to_addr_mode,
2538 data.to_addr,
2539 -1));
2541 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2542 data.offset);
2544 else
2546 if (endp == 2)
2547 --data.offset;
2548 to1 = adjust_address (data.to, QImode, data.offset);
2550 return to1;
2552 else
2553 return data.to;
2556 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2557 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2559 static void
2560 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2562 struct store_by_pieces_d data;
2564 if (len == 0)
2565 return;
2567 data.constfun = clear_by_pieces_1;
2568 data.constfundata = NULL;
2569 data.len = len;
2570 data.to = to;
2571 store_by_pieces_1 (&data, align);
2574 /* Callback routine for clear_by_pieces.
2575 Return const0_rtx unconditionally. */
2577 static rtx
2578 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2579 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2580 enum machine_mode mode ATTRIBUTE_UNUSED)
2582 return const0_rtx;
2585 /* Subroutine of clear_by_pieces and store_by_pieces.
2586 Generate several move instructions to store LEN bytes of block TO. (A MEM
2587 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2589 static void
2590 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2591 unsigned int align ATTRIBUTE_UNUSED)
2593 enum machine_mode to_addr_mode = get_address_mode (data->to);
2594 rtx to_addr = XEXP (data->to, 0);
2595 unsigned int max_size = STORE_MAX_PIECES + 1;
2596 enum insn_code icode;
2598 data->offset = 0;
2599 data->to_addr = to_addr;
2600 data->autinc_to
2601 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2602 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2604 data->explicit_inc_to = 0;
2605 data->reverse
2606 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2607 if (data->reverse)
2608 data->offset = data->len;
2610 /* If storing requires more than two move insns,
2611 copy addresses to registers (to make displacements shorter)
2612 and use post-increment if available. */
2613 if (!data->autinc_to
2614 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2616 /* Determine the main mode we'll be using.
2617 MODE might not be used depending on the definitions of the
2618 USE_* macros below. */
2619 enum machine_mode mode ATTRIBUTE_UNUSED
2620 = widest_int_mode_for_size (max_size);
2622 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2624 data->to_addr = copy_to_mode_reg (to_addr_mode,
2625 plus_constant (to_addr_mode,
2626 to_addr,
2627 data->len));
2628 data->autinc_to = 1;
2629 data->explicit_inc_to = -1;
2632 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2633 && ! data->autinc_to)
2635 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2636 data->autinc_to = 1;
2637 data->explicit_inc_to = 1;
2640 if ( !data->autinc_to && CONSTANT_P (to_addr))
2641 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2644 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2646 /* First store what we can in the largest integer mode, then go to
2647 successively smaller modes. */
2649 while (max_size > 1 && data->len > 0)
2651 enum machine_mode mode = widest_int_mode_for_size (max_size);
2653 if (mode == VOIDmode)
2654 break;
2656 icode = optab_handler (mov_optab, mode);
2657 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2658 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2660 max_size = GET_MODE_SIZE (mode);
2663 /* The code above should have handled everything. */
2664 gcc_assert (!data->len);
2667 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2668 with move instructions for mode MODE. GENFUN is the gen_... function
2669 to make a move insn for that mode. DATA has all the other info. */
2671 static void
2672 store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
2673 struct store_by_pieces_d *data)
2675 unsigned int size = GET_MODE_SIZE (mode);
2676 rtx to1, cst;
2678 while (data->len >= size)
2680 if (data->reverse)
2681 data->offset -= size;
2683 if (data->autinc_to)
2684 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2685 data->offset);
2686 else
2687 to1 = adjust_address (data->to, mode, data->offset);
2689 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2690 emit_insn (gen_add2_insn (data->to_addr,
2691 gen_int_mode (-(HOST_WIDE_INT) size,
2692 GET_MODE (data->to_addr))));
2694 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2695 emit_insn ((*genfun) (to1, cst));
2697 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2698 emit_insn (gen_add2_insn (data->to_addr,
2699 gen_int_mode (size,
2700 GET_MODE (data->to_addr))));
2702 if (! data->reverse)
2703 data->offset += size;
2705 data->len -= size;
2709 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2710 its length in bytes. */
2713 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2714 unsigned int expected_align, HOST_WIDE_INT expected_size)
2716 enum machine_mode mode = GET_MODE (object);
2717 unsigned int align;
2719 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2721 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2722 just move a zero. Otherwise, do this a piece at a time. */
2723 if (mode != BLKmode
2724 && CONST_INT_P (size)
2725 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2727 rtx zero = CONST0_RTX (mode);
2728 if (zero != NULL)
2730 emit_move_insn (object, zero);
2731 return NULL;
2734 if (COMPLEX_MODE_P (mode))
2736 zero = CONST0_RTX (GET_MODE_INNER (mode));
2737 if (zero != NULL)
2739 write_complex_part (object, zero, 0);
2740 write_complex_part (object, zero, 1);
2741 return NULL;
2746 if (size == const0_rtx)
2747 return NULL;
2749 align = MEM_ALIGN (object);
2751 if (CONST_INT_P (size)
2752 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2753 clear_by_pieces (object, INTVAL (size), align);
2754 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2755 expected_align, expected_size))
2757 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2758 return set_storage_via_libcall (object, size, const0_rtx,
2759 method == BLOCK_OP_TAILCALL);
2760 else
2761 gcc_unreachable ();
2763 return NULL;
2767 clear_storage (rtx object, rtx size, enum block_op_methods method)
2769 return clear_storage_hints (object, size, method, 0, -1);
2773 /* A subroutine of clear_storage. Expand a call to memset.
2774 Return the return value of memset, 0 otherwise. */
2777 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2779 tree call_expr, fn, object_tree, size_tree, val_tree;
2780 enum machine_mode size_mode;
2781 rtx retval;
2783 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2784 place those into new pseudos into a VAR_DECL and use them later. */
2786 object = copy_addr_to_reg (XEXP (object, 0));
2788 size_mode = TYPE_MODE (sizetype);
2789 size = convert_to_mode (size_mode, size, 1);
2790 size = copy_to_mode_reg (size_mode, size);
2792 /* It is incorrect to use the libcall calling conventions to call
2793 memset in this context. This could be a user call to memset and
2794 the user may wish to examine the return value from memset. For
2795 targets where libcalls and normal calls have different conventions
2796 for returning pointers, we could end up generating incorrect code. */
2798 object_tree = make_tree (ptr_type_node, object);
2799 if (!CONST_INT_P (val))
2800 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2801 size_tree = make_tree (sizetype, size);
2802 val_tree = make_tree (integer_type_node, val);
2804 fn = clear_storage_libcall_fn (true);
2805 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2806 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2808 retval = expand_normal (call_expr);
2810 return retval;
2813 /* A subroutine of set_storage_via_libcall. Create the tree node
2814 for the function we use for block clears. */
2816 tree block_clear_fn;
2818 void
2819 init_block_clear_fn (const char *asmspec)
2821 if (!block_clear_fn)
2823 tree fn, args;
2825 fn = get_identifier ("memset");
2826 args = build_function_type_list (ptr_type_node, ptr_type_node,
2827 integer_type_node, sizetype,
2828 NULL_TREE);
2830 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2831 DECL_EXTERNAL (fn) = 1;
2832 TREE_PUBLIC (fn) = 1;
2833 DECL_ARTIFICIAL (fn) = 1;
2834 TREE_NOTHROW (fn) = 1;
2835 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2836 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2838 block_clear_fn = fn;
2841 if (asmspec)
2842 set_user_assembler_name (block_clear_fn, asmspec);
2845 static tree
2846 clear_storage_libcall_fn (int for_call)
2848 static bool emitted_extern;
2850 if (!block_clear_fn)
2851 init_block_clear_fn (NULL);
2853 if (for_call && !emitted_extern)
2855 emitted_extern = true;
2856 make_decl_rtl (block_clear_fn);
2859 return block_clear_fn;
2862 /* Expand a setmem pattern; return true if successful. */
2864 bool
2865 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2866 unsigned int expected_align, HOST_WIDE_INT expected_size)
2868 /* Try the most limited insn first, because there's no point
2869 including more than one in the machine description unless
2870 the more limited one has some advantage. */
2872 enum machine_mode mode;
2874 if (expected_align < align)
2875 expected_align = align;
2877 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2878 mode = GET_MODE_WIDER_MODE (mode))
2880 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2882 if (code != CODE_FOR_nothing
2883 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2884 here because if SIZE is less than the mode mask, as it is
2885 returned by the macro, it will definitely be less than the
2886 actual mode mask. Since SIZE is within the Pmode address
2887 space, we limit MODE to Pmode. */
2888 && ((CONST_INT_P (size)
2889 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2890 <= (GET_MODE_MASK (mode) >> 1)))
2891 || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
2893 struct expand_operand ops[6];
2894 unsigned int nops;
2896 nops = insn_data[(int) code].n_generator_args;
2897 gcc_assert (nops == 4 || nops == 6);
2899 create_fixed_operand (&ops[0], object);
2900 /* The check above guarantees that this size conversion is valid. */
2901 create_convert_operand_to (&ops[1], size, mode, true);
2902 create_convert_operand_from (&ops[2], val, byte_mode, true);
2903 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2904 if (nops == 6)
2906 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2907 create_integer_operand (&ops[5], expected_size);
2909 if (maybe_expand_insn (code, nops, ops))
2910 return true;
2914 return false;
2918 /* Write to one of the components of the complex value CPLX. Write VAL to
2919 the real part if IMAG_P is false, and the imaginary part if its true. */
2921 static void
2922 write_complex_part (rtx cplx, rtx val, bool imag_p)
2924 enum machine_mode cmode;
2925 enum machine_mode imode;
2926 unsigned ibitsize;
2928 if (GET_CODE (cplx) == CONCAT)
2930 emit_move_insn (XEXP (cplx, imag_p), val);
2931 return;
2934 cmode = GET_MODE (cplx);
2935 imode = GET_MODE_INNER (cmode);
2936 ibitsize = GET_MODE_BITSIZE (imode);
2938 /* For MEMs simplify_gen_subreg may generate an invalid new address
2939 because, e.g., the original address is considered mode-dependent
2940 by the target, which restricts simplify_subreg from invoking
2941 adjust_address_nv. Instead of preparing fallback support for an
2942 invalid address, we call adjust_address_nv directly. */
2943 if (MEM_P (cplx))
2945 emit_move_insn (adjust_address_nv (cplx, imode,
2946 imag_p ? GET_MODE_SIZE (imode) : 0),
2947 val);
2948 return;
2951 /* If the sub-object is at least word sized, then we know that subregging
2952 will work. This special case is important, since store_bit_field
2953 wants to operate on integer modes, and there's rarely an OImode to
2954 correspond to TCmode. */
2955 if (ibitsize >= BITS_PER_WORD
2956 /* For hard regs we have exact predicates. Assume we can split
2957 the original object if it spans an even number of hard regs.
2958 This special case is important for SCmode on 64-bit platforms
2959 where the natural size of floating-point regs is 32-bit. */
2960 || (REG_P (cplx)
2961 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2962 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2964 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2965 imag_p ? GET_MODE_SIZE (imode) : 0);
2966 if (part)
2968 emit_move_insn (part, val);
2969 return;
2971 else
2972 /* simplify_gen_subreg may fail for sub-word MEMs. */
2973 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2976 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
2979 /* Extract one of the components of the complex value CPLX. Extract the
2980 real part if IMAG_P is false, and the imaginary part if it's true. */
2982 static rtx
2983 read_complex_part (rtx cplx, bool imag_p)
2985 enum machine_mode cmode, imode;
2986 unsigned ibitsize;
2988 if (GET_CODE (cplx) == CONCAT)
2989 return XEXP (cplx, imag_p);
2991 cmode = GET_MODE (cplx);
2992 imode = GET_MODE_INNER (cmode);
2993 ibitsize = GET_MODE_BITSIZE (imode);
2995 /* Special case reads from complex constants that got spilled to memory. */
2996 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2998 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2999 if (decl && TREE_CODE (decl) == COMPLEX_CST)
3001 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3002 if (CONSTANT_CLASS_P (part))
3003 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3007 /* For MEMs simplify_gen_subreg may generate an invalid new address
3008 because, e.g., the original address is considered mode-dependent
3009 by the target, which restricts simplify_subreg from invoking
3010 adjust_address_nv. Instead of preparing fallback support for an
3011 invalid address, we call adjust_address_nv directly. */
3012 if (MEM_P (cplx))
3013 return adjust_address_nv (cplx, imode,
3014 imag_p ? GET_MODE_SIZE (imode) : 0);
3016 /* If the sub-object is at least word sized, then we know that subregging
3017 will work. This special case is important, since extract_bit_field
3018 wants to operate on integer modes, and there's rarely an OImode to
3019 correspond to TCmode. */
3020 if (ibitsize >= BITS_PER_WORD
3021 /* For hard regs we have exact predicates. Assume we can split
3022 the original object if it spans an even number of hard regs.
3023 This special case is important for SCmode on 64-bit platforms
3024 where the natural size of floating-point regs is 32-bit. */
3025 || (REG_P (cplx)
3026 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3027 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3029 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3030 imag_p ? GET_MODE_SIZE (imode) : 0);
3031 if (ret)
3032 return ret;
3033 else
3034 /* simplify_gen_subreg may fail for sub-word MEMs. */
3035 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3038 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3039 true, NULL_RTX, imode, imode);
3042 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
3043 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
3044 represented in NEW_MODE. If FORCE is true, this will never happen, as
3045 we'll force-create a SUBREG if needed. */
3047 static rtx
3048 emit_move_change_mode (enum machine_mode new_mode,
3049 enum machine_mode old_mode, rtx x, bool force)
3051 rtx ret;
3053 if (push_operand (x, GET_MODE (x)))
3055 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3056 MEM_COPY_ATTRIBUTES (ret, x);
3058 else if (MEM_P (x))
3060 /* We don't have to worry about changing the address since the
3061 size in bytes is supposed to be the same. */
3062 if (reload_in_progress)
3064 /* Copy the MEM to change the mode and move any
3065 substitutions from the old MEM to the new one. */
3066 ret = adjust_address_nv (x, new_mode, 0);
3067 copy_replacements (x, ret);
3069 else
3070 ret = adjust_address (x, new_mode, 0);
3072 else
3074 /* Note that we do want simplify_subreg's behavior of validating
3075 that the new mode is ok for a hard register. If we were to use
3076 simplify_gen_subreg, we would create the subreg, but would
3077 probably run into the target not being able to implement it. */
3078 /* Except, of course, when FORCE is true, when this is exactly what
3079 we want. Which is needed for CCmodes on some targets. */
3080 if (force)
3081 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3082 else
3083 ret = simplify_subreg (new_mode, x, old_mode, 0);
3086 return ret;
3089 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3090 an integer mode of the same size as MODE. Returns the instruction
3091 emitted, or NULL if such a move could not be generated. */
3093 static rtx
3094 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
3096 enum machine_mode imode;
3097 enum insn_code code;
3099 /* There must exist a mode of the exact size we require. */
3100 imode = int_mode_for_mode (mode);
3101 if (imode == BLKmode)
3102 return NULL_RTX;
3104 /* The target must support moves in this mode. */
3105 code = optab_handler (mov_optab, imode);
3106 if (code == CODE_FOR_nothing)
3107 return NULL_RTX;
3109 x = emit_move_change_mode (imode, mode, x, force);
3110 if (x == NULL_RTX)
3111 return NULL_RTX;
3112 y = emit_move_change_mode (imode, mode, y, force);
3113 if (y == NULL_RTX)
3114 return NULL_RTX;
3115 return emit_insn (GEN_FCN (code) (x, y));
3118 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3119 Return an equivalent MEM that does not use an auto-increment. */
3121 static rtx
3122 emit_move_resolve_push (enum machine_mode mode, rtx x)
3124 enum rtx_code code = GET_CODE (XEXP (x, 0));
3125 HOST_WIDE_INT adjust;
3126 rtx temp;
3128 adjust = GET_MODE_SIZE (mode);
3129 #ifdef PUSH_ROUNDING
3130 adjust = PUSH_ROUNDING (adjust);
3131 #endif
3132 if (code == PRE_DEC || code == POST_DEC)
3133 adjust = -adjust;
3134 else if (code == PRE_MODIFY || code == POST_MODIFY)
3136 rtx expr = XEXP (XEXP (x, 0), 1);
3137 HOST_WIDE_INT val;
3139 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3140 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3141 val = INTVAL (XEXP (expr, 1));
3142 if (GET_CODE (expr) == MINUS)
3143 val = -val;
3144 gcc_assert (adjust == val || adjust == -val);
3145 adjust = val;
3148 /* Do not use anti_adjust_stack, since we don't want to update
3149 stack_pointer_delta. */
3150 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3151 gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3152 0, OPTAB_LIB_WIDEN);
3153 if (temp != stack_pointer_rtx)
3154 emit_move_insn (stack_pointer_rtx, temp);
3156 switch (code)
3158 case PRE_INC:
3159 case PRE_DEC:
3160 case PRE_MODIFY:
3161 temp = stack_pointer_rtx;
3162 break;
3163 case POST_INC:
3164 case POST_DEC:
3165 case POST_MODIFY:
3166 temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3167 break;
3168 default:
3169 gcc_unreachable ();
3172 return replace_equiv_address (x, temp);
3175 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3176 X is known to satisfy push_operand, and MODE is known to be complex.
3177 Returns the last instruction emitted. */
3180 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3182 enum machine_mode submode = GET_MODE_INNER (mode);
3183 bool imag_first;
3185 #ifdef PUSH_ROUNDING
3186 unsigned int submodesize = GET_MODE_SIZE (submode);
3188 /* In case we output to the stack, but the size is smaller than the
3189 machine can push exactly, we need to use move instructions. */
3190 if (PUSH_ROUNDING (submodesize) != submodesize)
3192 x = emit_move_resolve_push (mode, x);
3193 return emit_move_insn (x, y);
3195 #endif
3197 /* Note that the real part always precedes the imag part in memory
3198 regardless of machine's endianness. */
3199 switch (GET_CODE (XEXP (x, 0)))
3201 case PRE_DEC:
3202 case POST_DEC:
3203 imag_first = true;
3204 break;
3205 case PRE_INC:
3206 case POST_INC:
3207 imag_first = false;
3208 break;
3209 default:
3210 gcc_unreachable ();
3213 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3214 read_complex_part (y, imag_first));
3215 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3216 read_complex_part (y, !imag_first));
3219 /* A subroutine of emit_move_complex. Perform the move from Y to X
3220 via two moves of the parts. Returns the last instruction emitted. */
3223 emit_move_complex_parts (rtx x, rtx y)
3225 /* Show the output dies here. This is necessary for SUBREGs
3226 of pseudos since we cannot track their lifetimes correctly;
3227 hard regs shouldn't appear here except as return values. */
3228 if (!reload_completed && !reload_in_progress
3229 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3230 emit_clobber (x);
3232 write_complex_part (x, read_complex_part (y, false), false);
3233 write_complex_part (x, read_complex_part (y, true), true);
3235 return get_last_insn ();
3238 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3239 MODE is known to be complex. Returns the last instruction emitted. */
3241 static rtx
3242 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3244 bool try_int;
3246 /* Need to take special care for pushes, to maintain proper ordering
3247 of the data, and possibly extra padding. */
3248 if (push_operand (x, mode))
3249 return emit_move_complex_push (mode, x, y);
3251 /* See if we can coerce the target into moving both values at once, except
3252 for floating point where we favor moving as parts if this is easy. */
3253 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3254 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3255 && !(REG_P (x)
3256 && HARD_REGISTER_P (x)
3257 && hard_regno_nregs[REGNO (x)][mode] == 1)
3258 && !(REG_P (y)
3259 && HARD_REGISTER_P (y)
3260 && hard_regno_nregs[REGNO (y)][mode] == 1))
3261 try_int = false;
3262 /* Not possible if the values are inherently not adjacent. */
3263 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3264 try_int = false;
3265 /* Is possible if both are registers (or subregs of registers). */
3266 else if (register_operand (x, mode) && register_operand (y, mode))
3267 try_int = true;
3268 /* If one of the operands is a memory, and alignment constraints
3269 are friendly enough, we may be able to do combined memory operations.
3270 We do not attempt this if Y is a constant because that combination is
3271 usually better with the by-parts thing below. */
3272 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3273 && (!STRICT_ALIGNMENT
3274 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3275 try_int = true;
3276 else
3277 try_int = false;
3279 if (try_int)
3281 rtx ret;
3283 /* For memory to memory moves, optimal behavior can be had with the
3284 existing block move logic. */
3285 if (MEM_P (x) && MEM_P (y))
3287 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3288 BLOCK_OP_NO_LIBCALL);
3289 return get_last_insn ();
3292 ret = emit_move_via_integer (mode, x, y, true);
3293 if (ret)
3294 return ret;
3297 return emit_move_complex_parts (x, y);
3300 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3301 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3303 static rtx
3304 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3306 rtx ret;
3308 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3309 if (mode != CCmode)
3311 enum insn_code code = optab_handler (mov_optab, CCmode);
3312 if (code != CODE_FOR_nothing)
3314 x = emit_move_change_mode (CCmode, mode, x, true);
3315 y = emit_move_change_mode (CCmode, mode, y, true);
3316 return emit_insn (GEN_FCN (code) (x, y));
3320 /* Otherwise, find the MODE_INT mode of the same width. */
3321 ret = emit_move_via_integer (mode, x, y, false);
3322 gcc_assert (ret != NULL);
3323 return ret;
3326 /* Return true if word I of OP lies entirely in the
3327 undefined bits of a paradoxical subreg. */
3329 static bool
3330 undefined_operand_subword_p (const_rtx op, int i)
3332 enum machine_mode innermode, innermostmode;
3333 int offset;
3334 if (GET_CODE (op) != SUBREG)
3335 return false;
3336 innermode = GET_MODE (op);
3337 innermostmode = GET_MODE (SUBREG_REG (op));
3338 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3339 /* The SUBREG_BYTE represents offset, as if the value were stored in
3340 memory, except for a paradoxical subreg where we define
3341 SUBREG_BYTE to be 0; undo this exception as in
3342 simplify_subreg. */
3343 if (SUBREG_BYTE (op) == 0
3344 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3346 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3347 if (WORDS_BIG_ENDIAN)
3348 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3349 if (BYTES_BIG_ENDIAN)
3350 offset += difference % UNITS_PER_WORD;
3352 if (offset >= GET_MODE_SIZE (innermostmode)
3353 || offset <= -GET_MODE_SIZE (word_mode))
3354 return true;
3355 return false;
3358 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3359 MODE is any multi-word or full-word mode that lacks a move_insn
3360 pattern. Note that you will get better code if you define such
3361 patterns, even if they must turn into multiple assembler instructions. */
3363 static rtx
3364 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3366 rtx last_insn = 0;
3367 rtx seq, inner;
3368 bool need_clobber;
3369 int i;
3371 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3373 /* If X is a push on the stack, do the push now and replace
3374 X with a reference to the stack pointer. */
3375 if (push_operand (x, mode))
3376 x = emit_move_resolve_push (mode, x);
3378 /* If we are in reload, see if either operand is a MEM whose address
3379 is scheduled for replacement. */
3380 if (reload_in_progress && MEM_P (x)
3381 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3382 x = replace_equiv_address_nv (x, inner);
3383 if (reload_in_progress && MEM_P (y)
3384 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3385 y = replace_equiv_address_nv (y, inner);
3387 start_sequence ();
3389 need_clobber = false;
3390 for (i = 0;
3391 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3392 i++)
3394 rtx xpart = operand_subword (x, i, 1, mode);
3395 rtx ypart;
3397 /* Do not generate code for a move if it would come entirely
3398 from the undefined bits of a paradoxical subreg. */
3399 if (undefined_operand_subword_p (y, i))
3400 continue;
3402 ypart = operand_subword (y, i, 1, mode);
3404 /* If we can't get a part of Y, put Y into memory if it is a
3405 constant. Otherwise, force it into a register. Then we must
3406 be able to get a part of Y. */
3407 if (ypart == 0 && CONSTANT_P (y))
3409 y = use_anchored_address (force_const_mem (mode, y));
3410 ypart = operand_subword (y, i, 1, mode);
3412 else if (ypart == 0)
3413 ypart = operand_subword_force (y, i, mode);
3415 gcc_assert (xpart && ypart);
3417 need_clobber |= (GET_CODE (xpart) == SUBREG);
3419 last_insn = emit_move_insn (xpart, ypart);
3422 seq = get_insns ();
3423 end_sequence ();
3425 /* Show the output dies here. This is necessary for SUBREGs
3426 of pseudos since we cannot track their lifetimes correctly;
3427 hard regs shouldn't appear here except as return values.
3428 We never want to emit such a clobber after reload. */
3429 if (x != y
3430 && ! (reload_in_progress || reload_completed)
3431 && need_clobber != 0)
3432 emit_clobber (x);
3434 emit_insn (seq);
3436 return last_insn;
3439 /* Low level part of emit_move_insn.
3440 Called just like emit_move_insn, but assumes X and Y
3441 are basically valid. */
3444 emit_move_insn_1 (rtx x, rtx y)
3446 enum machine_mode mode = GET_MODE (x);
3447 enum insn_code code;
3449 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3451 code = optab_handler (mov_optab, mode);
3452 if (code != CODE_FOR_nothing)
3453 return emit_insn (GEN_FCN (code) (x, y));
3455 /* Expand complex moves by moving real part and imag part. */
3456 if (COMPLEX_MODE_P (mode))
3457 return emit_move_complex (mode, x, y);
3459 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3460 || ALL_FIXED_POINT_MODE_P (mode))
3462 rtx result = emit_move_via_integer (mode, x, y, true);
3464 /* If we can't find an integer mode, use multi words. */
3465 if (result)
3466 return result;
3467 else
3468 return emit_move_multi_word (mode, x, y);
3471 if (GET_MODE_CLASS (mode) == MODE_CC)
3472 return emit_move_ccmode (mode, x, y);
3474 /* Try using a move pattern for the corresponding integer mode. This is
3475 only safe when simplify_subreg can convert MODE constants into integer
3476 constants. At present, it can only do this reliably if the value
3477 fits within a HOST_WIDE_INT. */
3478 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3480 rtx ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3482 if (ret)
3484 if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3485 return ret;
3489 return emit_move_multi_word (mode, x, y);
3492 /* Generate code to copy Y into X.
3493 Both Y and X must have the same mode, except that
3494 Y can be a constant with VOIDmode.
3495 This mode cannot be BLKmode; use emit_block_move for that.
3497 Return the last instruction emitted. */
3500 emit_move_insn (rtx x, rtx y)
3502 enum machine_mode mode = GET_MODE (x);
3503 rtx y_cst = NULL_RTX;
3504 rtx last_insn, set;
3506 gcc_assert (mode != BLKmode
3507 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3509 if (CONSTANT_P (y))
3511 if (optimize
3512 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3513 && (last_insn = compress_float_constant (x, y)))
3514 return last_insn;
3516 y_cst = y;
3518 if (!targetm.legitimate_constant_p (mode, y))
3520 y = force_const_mem (mode, y);
3522 /* If the target's cannot_force_const_mem prevented the spill,
3523 assume that the target's move expanders will also take care
3524 of the non-legitimate constant. */
3525 if (!y)
3526 y = y_cst;
3527 else
3528 y = use_anchored_address (y);
3532 /* If X or Y are memory references, verify that their addresses are valid
3533 for the machine. */
3534 if (MEM_P (x)
3535 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3536 MEM_ADDR_SPACE (x))
3537 && ! push_operand (x, GET_MODE (x))))
3538 x = validize_mem (x);
3540 if (MEM_P (y)
3541 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3542 MEM_ADDR_SPACE (y)))
3543 y = validize_mem (y);
3545 gcc_assert (mode != BLKmode);
3547 last_insn = emit_move_insn_1 (x, y);
3549 if (y_cst && REG_P (x)
3550 && (set = single_set (last_insn)) != NULL_RTX
3551 && SET_DEST (set) == x
3552 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3553 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3555 return last_insn;
3558 /* If Y is representable exactly in a narrower mode, and the target can
3559 perform the extension directly from constant or memory, then emit the
3560 move as an extension. */
3562 static rtx
3563 compress_float_constant (rtx x, rtx y)
3565 enum machine_mode dstmode = GET_MODE (x);
3566 enum machine_mode orig_srcmode = GET_MODE (y);
3567 enum machine_mode srcmode;
3568 REAL_VALUE_TYPE r;
3569 int oldcost, newcost;
3570 bool speed = optimize_insn_for_speed_p ();
3572 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3574 if (targetm.legitimate_constant_p (dstmode, y))
3575 oldcost = set_src_cost (y, speed);
3576 else
3577 oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3579 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3580 srcmode != orig_srcmode;
3581 srcmode = GET_MODE_WIDER_MODE (srcmode))
3583 enum insn_code ic;
3584 rtx trunc_y, last_insn;
3586 /* Skip if the target can't extend this way. */
3587 ic = can_extend_p (dstmode, srcmode, 0);
3588 if (ic == CODE_FOR_nothing)
3589 continue;
3591 /* Skip if the narrowed value isn't exact. */
3592 if (! exact_real_truncate (srcmode, &r))
3593 continue;
3595 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3597 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3599 /* Skip if the target needs extra instructions to perform
3600 the extension. */
3601 if (!insn_operand_matches (ic, 1, trunc_y))
3602 continue;
3603 /* This is valid, but may not be cheaper than the original. */
3604 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3605 speed);
3606 if (oldcost < newcost)
3607 continue;
3609 else if (float_extend_from_mem[dstmode][srcmode])
3611 trunc_y = force_const_mem (srcmode, trunc_y);
3612 /* This is valid, but may not be cheaper than the original. */
3613 newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3614 speed);
3615 if (oldcost < newcost)
3616 continue;
3617 trunc_y = validize_mem (trunc_y);
3619 else
3620 continue;
3622 /* For CSE's benefit, force the compressed constant pool entry
3623 into a new pseudo. This constant may be used in different modes,
3624 and if not, combine will put things back together for us. */
3625 trunc_y = force_reg (srcmode, trunc_y);
3626 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3627 last_insn = get_last_insn ();
3629 if (REG_P (x))
3630 set_unique_reg_note (last_insn, REG_EQUAL, y);
3632 return last_insn;
3635 return NULL_RTX;
3638 /* Pushing data onto the stack. */
3640 /* Push a block of length SIZE (perhaps variable)
3641 and return an rtx to address the beginning of the block.
3642 The value may be virtual_outgoing_args_rtx.
3644 EXTRA is the number of bytes of padding to push in addition to SIZE.
3645 BELOW nonzero means this padding comes at low addresses;
3646 otherwise, the padding comes at high addresses. */
3649 push_block (rtx size, int extra, int below)
3651 rtx temp;
3653 size = convert_modes (Pmode, ptr_mode, size, 1);
3654 if (CONSTANT_P (size))
3655 anti_adjust_stack (plus_constant (Pmode, size, extra));
3656 else if (REG_P (size) && extra == 0)
3657 anti_adjust_stack (size);
3658 else
3660 temp = copy_to_mode_reg (Pmode, size);
3661 if (extra != 0)
3662 temp = expand_binop (Pmode, add_optab, temp,
3663 gen_int_mode (extra, Pmode),
3664 temp, 0, OPTAB_LIB_WIDEN);
3665 anti_adjust_stack (temp);
3668 #ifndef STACK_GROWS_DOWNWARD
3669 if (0)
3670 #else
3671 if (1)
3672 #endif
3674 temp = virtual_outgoing_args_rtx;
3675 if (extra != 0 && below)
3676 temp = plus_constant (Pmode, temp, extra);
3678 else
3680 if (CONST_INT_P (size))
3681 temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3682 -INTVAL (size) - (below ? 0 : extra));
3683 else if (extra != 0 && !below)
3684 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3685 negate_rtx (Pmode, plus_constant (Pmode, size,
3686 extra)));
3687 else
3688 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3689 negate_rtx (Pmode, size));
3692 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3695 /* A utility routine that returns the base of an auto-inc memory, or NULL. */
3697 static rtx
3698 mem_autoinc_base (rtx mem)
3700 if (MEM_P (mem))
3702 rtx addr = XEXP (mem, 0);
3703 if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3704 return XEXP (addr, 0);
3706 return NULL;
3709 /* A utility routine used here, in reload, and in try_split. The insns
3710 after PREV up to and including LAST are known to adjust the stack,
3711 with a final value of END_ARGS_SIZE. Iterate backward from LAST
3712 placing notes as appropriate. PREV may be NULL, indicating the
3713 entire insn sequence prior to LAST should be scanned.
3715 The set of allowed stack pointer modifications is small:
3716 (1) One or more auto-inc style memory references (aka pushes),
3717 (2) One or more addition/subtraction with the SP as destination,
3718 (3) A single move insn with the SP as destination,
3719 (4) A call_pop insn,
3720 (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3722 Insns in the sequence that do not modify the SP are ignored,
3723 except for noreturn calls.
3725 The return value is the amount of adjustment that can be trivially
3726 verified, via immediate operand or auto-inc. If the adjustment
3727 cannot be trivially extracted, the return value is INT_MIN. */
3729 HOST_WIDE_INT
3730 find_args_size_adjust (rtx insn)
3732 rtx dest, set, pat;
3733 int i;
3735 pat = PATTERN (insn);
3736 set = NULL;
3738 /* Look for a call_pop pattern. */
3739 if (CALL_P (insn))
3741 /* We have to allow non-call_pop patterns for the case
3742 of emit_single_push_insn of a TLS address. */
3743 if (GET_CODE (pat) != PARALLEL)
3744 return 0;
3746 /* All call_pop have a stack pointer adjust in the parallel.
3747 The call itself is always first, and the stack adjust is
3748 usually last, so search from the end. */
3749 for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3751 set = XVECEXP (pat, 0, i);
3752 if (GET_CODE (set) != SET)
3753 continue;
3754 dest = SET_DEST (set);
3755 if (dest == stack_pointer_rtx)
3756 break;
3758 /* We'd better have found the stack pointer adjust. */
3759 if (i == 0)
3760 return 0;
3761 /* Fall through to process the extracted SET and DEST
3762 as if it was a standalone insn. */
3764 else if (GET_CODE (pat) == SET)
3765 set = pat;
3766 else if ((set = single_set (insn)) != NULL)
3768 else if (GET_CODE (pat) == PARALLEL)
3770 /* ??? Some older ports use a parallel with a stack adjust
3771 and a store for a PUSH_ROUNDING pattern, rather than a
3772 PRE/POST_MODIFY rtx. Don't force them to update yet... */
3773 /* ??? See h8300 and m68k, pushqi1. */
3774 for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3776 set = XVECEXP (pat, 0, i);
3777 if (GET_CODE (set) != SET)
3778 continue;
3779 dest = SET_DEST (set);
3780 if (dest == stack_pointer_rtx)
3781 break;
3783 /* We do not expect an auto-inc of the sp in the parallel. */
3784 gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3785 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3786 != stack_pointer_rtx);
3788 if (i < 0)
3789 return 0;
3791 else
3792 return 0;
3794 dest = SET_DEST (set);
3796 /* Look for direct modifications of the stack pointer. */
3797 if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3799 /* Look for a trivial adjustment, otherwise assume nothing. */
3800 /* Note that the SPU restore_stack_block pattern refers to
3801 the stack pointer in V4SImode. Consider that non-trivial. */
3802 if (SCALAR_INT_MODE_P (GET_MODE (dest))
3803 && GET_CODE (SET_SRC (set)) == PLUS
3804 && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3805 && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3806 return INTVAL (XEXP (SET_SRC (set), 1));
3807 /* ??? Reload can generate no-op moves, which will be cleaned
3808 up later. Recognize it and continue searching. */
3809 else if (rtx_equal_p (dest, SET_SRC (set)))
3810 return 0;
3811 else
3812 return HOST_WIDE_INT_MIN;
3814 else
3816 rtx mem, addr;
3818 /* Otherwise only think about autoinc patterns. */
3819 if (mem_autoinc_base (dest) == stack_pointer_rtx)
3821 mem = dest;
3822 gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3823 != stack_pointer_rtx);
3825 else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3826 mem = SET_SRC (set);
3827 else
3828 return 0;
3830 addr = XEXP (mem, 0);
3831 switch (GET_CODE (addr))
3833 case PRE_INC:
3834 case POST_INC:
3835 return GET_MODE_SIZE (GET_MODE (mem));
3836 case PRE_DEC:
3837 case POST_DEC:
3838 return -GET_MODE_SIZE (GET_MODE (mem));
3839 case PRE_MODIFY:
3840 case POST_MODIFY:
3841 addr = XEXP (addr, 1);
3842 gcc_assert (GET_CODE (addr) == PLUS);
3843 gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3844 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3845 return INTVAL (XEXP (addr, 1));
3846 default:
3847 gcc_unreachable ();
3853 fixup_args_size_notes (rtx prev, rtx last, int end_args_size)
3855 int args_size = end_args_size;
3856 bool saw_unknown = false;
3857 rtx insn;
3859 for (insn = last; insn != prev; insn = PREV_INSN (insn))
3861 HOST_WIDE_INT this_delta;
3863 if (!NONDEBUG_INSN_P (insn))
3864 continue;
3866 this_delta = find_args_size_adjust (insn);
3867 if (this_delta == 0)
3869 if (!CALL_P (insn)
3870 || ACCUMULATE_OUTGOING_ARGS
3871 || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3872 continue;
3875 gcc_assert (!saw_unknown);
3876 if (this_delta == HOST_WIDE_INT_MIN)
3877 saw_unknown = true;
3879 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3880 #ifdef STACK_GROWS_DOWNWARD
3881 this_delta = -(unsigned HOST_WIDE_INT) this_delta;
3882 #endif
3883 args_size -= this_delta;
3886 return saw_unknown ? INT_MIN : args_size;
3889 #ifdef PUSH_ROUNDING
3890 /* Emit single push insn. */
3892 static void
3893 emit_single_push_insn_1 (enum machine_mode mode, rtx x, tree type)
3895 rtx dest_addr;
3896 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3897 rtx dest;
3898 enum insn_code icode;
3900 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3901 /* If there is push pattern, use it. Otherwise try old way of throwing
3902 MEM representing push operation to move expander. */
3903 icode = optab_handler (push_optab, mode);
3904 if (icode != CODE_FOR_nothing)
3906 struct expand_operand ops[1];
3908 create_input_operand (&ops[0], x, mode);
3909 if (maybe_expand_insn (icode, 1, ops))
3910 return;
3912 if (GET_MODE_SIZE (mode) == rounded_size)
3913 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3914 /* If we are to pad downward, adjust the stack pointer first and
3915 then store X into the stack location using an offset. This is
3916 because emit_move_insn does not know how to pad; it does not have
3917 access to type. */
3918 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3920 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3921 HOST_WIDE_INT offset;
3923 emit_move_insn (stack_pointer_rtx,
3924 expand_binop (Pmode,
3925 #ifdef STACK_GROWS_DOWNWARD
3926 sub_optab,
3927 #else
3928 add_optab,
3929 #endif
3930 stack_pointer_rtx,
3931 gen_int_mode (rounded_size, Pmode),
3932 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3934 offset = (HOST_WIDE_INT) padding_size;
3935 #ifdef STACK_GROWS_DOWNWARD
3936 if (STACK_PUSH_CODE == POST_DEC)
3937 /* We have already decremented the stack pointer, so get the
3938 previous value. */
3939 offset += (HOST_WIDE_INT) rounded_size;
3940 #else
3941 if (STACK_PUSH_CODE == POST_INC)
3942 /* We have already incremented the stack pointer, so get the
3943 previous value. */
3944 offset -= (HOST_WIDE_INT) rounded_size;
3945 #endif
3946 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3947 gen_int_mode (offset, Pmode));
3949 else
3951 #ifdef STACK_GROWS_DOWNWARD
3952 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3953 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3954 gen_int_mode (-(HOST_WIDE_INT) rounded_size,
3955 Pmode));
3956 #else
3957 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3958 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3959 gen_int_mode (rounded_size, Pmode));
3960 #endif
3961 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3964 dest = gen_rtx_MEM (mode, dest_addr);
3966 if (type != 0)
3968 set_mem_attributes (dest, type, 1);
3970 if (flag_optimize_sibling_calls)
3971 /* Function incoming arguments may overlap with sibling call
3972 outgoing arguments and we cannot allow reordering of reads
3973 from function arguments with stores to outgoing arguments
3974 of sibling calls. */
3975 set_mem_alias_set (dest, 0);
3977 emit_move_insn (dest, x);
3980 /* Emit and annotate a single push insn. */
3982 static void
3983 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3985 int delta, old_delta = stack_pointer_delta;
3986 rtx prev = get_last_insn ();
3987 rtx last;
3989 emit_single_push_insn_1 (mode, x, type);
3991 last = get_last_insn ();
3993 /* Notice the common case where we emitted exactly one insn. */
3994 if (PREV_INSN (last) == prev)
3996 add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
3997 return;
4000 delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4001 gcc_assert (delta == INT_MIN || delta == old_delta);
4003 #endif
4005 /* Generate code to push X onto the stack, assuming it has mode MODE and
4006 type TYPE.
4007 MODE is redundant except when X is a CONST_INT (since they don't
4008 carry mode info).
4009 SIZE is an rtx for the size of data to be copied (in bytes),
4010 needed only if X is BLKmode.
4012 ALIGN (in bits) is maximum alignment we can assume.
4014 If PARTIAL and REG are both nonzero, then copy that many of the first
4015 bytes of X into registers starting with REG, and push the rest of X.
4016 The amount of space pushed is decreased by PARTIAL bytes.
4017 REG must be a hard register in this case.
4018 If REG is zero but PARTIAL is not, take any all others actions for an
4019 argument partially in registers, but do not actually load any
4020 registers.
4022 EXTRA is the amount in bytes of extra space to leave next to this arg.
4023 This is ignored if an argument block has already been allocated.
4025 On a machine that lacks real push insns, ARGS_ADDR is the address of
4026 the bottom of the argument block for this call. We use indexing off there
4027 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
4028 argument block has not been preallocated.
4030 ARGS_SO_FAR is the size of args previously pushed for this call.
4032 REG_PARM_STACK_SPACE is nonzero if functions require stack space
4033 for arguments passed in registers. If nonzero, it will be the number
4034 of bytes required. */
4036 void
4037 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
4038 unsigned int align, int partial, rtx reg, int extra,
4039 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4040 rtx alignment_pad)
4042 rtx xinner;
4043 enum direction stack_direction
4044 #ifdef STACK_GROWS_DOWNWARD
4045 = downward;
4046 #else
4047 = upward;
4048 #endif
4050 /* Decide where to pad the argument: `downward' for below,
4051 `upward' for above, or `none' for don't pad it.
4052 Default is below for small data on big-endian machines; else above. */
4053 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4055 /* Invert direction if stack is post-decrement.
4056 FIXME: why? */
4057 if (STACK_PUSH_CODE == POST_DEC)
4058 if (where_pad != none)
4059 where_pad = (where_pad == downward ? upward : downward);
4061 xinner = x;
4063 if (mode == BLKmode
4064 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
4066 /* Copy a block into the stack, entirely or partially. */
4068 rtx temp;
4069 int used;
4070 int offset;
4071 int skip;
4073 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4074 used = partial - offset;
4076 if (mode != BLKmode)
4078 /* A value is to be stored in an insufficiently aligned
4079 stack slot; copy via a suitably aligned slot if
4080 necessary. */
4081 size = GEN_INT (GET_MODE_SIZE (mode));
4082 if (!MEM_P (xinner))
4084 temp = assign_temp (type, 1, 1);
4085 emit_move_insn (temp, xinner);
4086 xinner = temp;
4090 gcc_assert (size);
4092 /* USED is now the # of bytes we need not copy to the stack
4093 because registers will take care of them. */
4095 if (partial != 0)
4096 xinner = adjust_address (xinner, BLKmode, used);
4098 /* If the partial register-part of the arg counts in its stack size,
4099 skip the part of stack space corresponding to the registers.
4100 Otherwise, start copying to the beginning of the stack space,
4101 by setting SKIP to 0. */
4102 skip = (reg_parm_stack_space == 0) ? 0 : used;
4104 #ifdef PUSH_ROUNDING
4105 /* Do it with several push insns if that doesn't take lots of insns
4106 and if there is no difficulty with push insns that skip bytes
4107 on the stack for alignment purposes. */
4108 if (args_addr == 0
4109 && PUSH_ARGS
4110 && CONST_INT_P (size)
4111 && skip == 0
4112 && MEM_ALIGN (xinner) >= align
4113 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
4114 /* Here we avoid the case of a structure whose weak alignment
4115 forces many pushes of a small amount of data,
4116 and such small pushes do rounding that causes trouble. */
4117 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4118 || align >= BIGGEST_ALIGNMENT
4119 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4120 == (align / BITS_PER_UNIT)))
4121 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4123 /* Push padding now if padding above and stack grows down,
4124 or if padding below and stack grows up.
4125 But if space already allocated, this has already been done. */
4126 if (extra && args_addr == 0
4127 && where_pad != none && where_pad != stack_direction)
4128 anti_adjust_stack (GEN_INT (extra));
4130 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4132 else
4133 #endif /* PUSH_ROUNDING */
4135 rtx target;
4137 /* Otherwise make space on the stack and copy the data
4138 to the address of that space. */
4140 /* Deduct words put into registers from the size we must copy. */
4141 if (partial != 0)
4143 if (CONST_INT_P (size))
4144 size = GEN_INT (INTVAL (size) - used);
4145 else
4146 size = expand_binop (GET_MODE (size), sub_optab, size,
4147 gen_int_mode (used, GET_MODE (size)),
4148 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4151 /* Get the address of the stack space.
4152 In this case, we do not deal with EXTRA separately.
4153 A single stack adjust will do. */
4154 if (! args_addr)
4156 temp = push_block (size, extra, where_pad == downward);
4157 extra = 0;
4159 else if (CONST_INT_P (args_so_far))
4160 temp = memory_address (BLKmode,
4161 plus_constant (Pmode, args_addr,
4162 skip + INTVAL (args_so_far)));
4163 else
4164 temp = memory_address (BLKmode,
4165 plus_constant (Pmode,
4166 gen_rtx_PLUS (Pmode,
4167 args_addr,
4168 args_so_far),
4169 skip));
4171 if (!ACCUMULATE_OUTGOING_ARGS)
4173 /* If the source is referenced relative to the stack pointer,
4174 copy it to another register to stabilize it. We do not need
4175 to do this if we know that we won't be changing sp. */
4177 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4178 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4179 temp = copy_to_reg (temp);
4182 target = gen_rtx_MEM (BLKmode, temp);
4184 /* We do *not* set_mem_attributes here, because incoming arguments
4185 may overlap with sibling call outgoing arguments and we cannot
4186 allow reordering of reads from function arguments with stores
4187 to outgoing arguments of sibling calls. We do, however, want
4188 to record the alignment of the stack slot. */
4189 /* ALIGN may well be better aligned than TYPE, e.g. due to
4190 PARM_BOUNDARY. Assume the caller isn't lying. */
4191 set_mem_align (target, align);
4193 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4196 else if (partial > 0)
4198 /* Scalar partly in registers. */
4200 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4201 int i;
4202 int not_stack;
4203 /* # bytes of start of argument
4204 that we must make space for but need not store. */
4205 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4206 int args_offset = INTVAL (args_so_far);
4207 int skip;
4209 /* Push padding now if padding above and stack grows down,
4210 or if padding below and stack grows up.
4211 But if space already allocated, this has already been done. */
4212 if (extra && args_addr == 0
4213 && where_pad != none && where_pad != stack_direction)
4214 anti_adjust_stack (GEN_INT (extra));
4216 /* If we make space by pushing it, we might as well push
4217 the real data. Otherwise, we can leave OFFSET nonzero
4218 and leave the space uninitialized. */
4219 if (args_addr == 0)
4220 offset = 0;
4222 /* Now NOT_STACK gets the number of words that we don't need to
4223 allocate on the stack. Convert OFFSET to words too. */
4224 not_stack = (partial - offset) / UNITS_PER_WORD;
4225 offset /= UNITS_PER_WORD;
4227 /* If the partial register-part of the arg counts in its stack size,
4228 skip the part of stack space corresponding to the registers.
4229 Otherwise, start copying to the beginning of the stack space,
4230 by setting SKIP to 0. */
4231 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4233 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4234 x = validize_mem (force_const_mem (mode, x));
4236 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4237 SUBREGs of such registers are not allowed. */
4238 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4239 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4240 x = copy_to_reg (x);
4242 /* Loop over all the words allocated on the stack for this arg. */
4243 /* We can do it by words, because any scalar bigger than a word
4244 has a size a multiple of a word. */
4245 #ifndef PUSH_ARGS_REVERSED
4246 for (i = not_stack; i < size; i++)
4247 #else
4248 for (i = size - 1; i >= not_stack; i--)
4249 #endif
4250 if (i >= not_stack + offset)
4251 emit_push_insn (operand_subword_force (x, i, mode),
4252 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4253 0, args_addr,
4254 GEN_INT (args_offset + ((i - not_stack + skip)
4255 * UNITS_PER_WORD)),
4256 reg_parm_stack_space, alignment_pad);
4258 else
4260 rtx addr;
4261 rtx dest;
4263 /* Push padding now if padding above and stack grows down,
4264 or if padding below and stack grows up.
4265 But if space already allocated, this has already been done. */
4266 if (extra && args_addr == 0
4267 && where_pad != none && where_pad != stack_direction)
4268 anti_adjust_stack (GEN_INT (extra));
4270 #ifdef PUSH_ROUNDING
4271 if (args_addr == 0 && PUSH_ARGS)
4272 emit_single_push_insn (mode, x, type);
4273 else
4274 #endif
4276 if (CONST_INT_P (args_so_far))
4277 addr
4278 = memory_address (mode,
4279 plus_constant (Pmode, args_addr,
4280 INTVAL (args_so_far)));
4281 else
4282 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4283 args_so_far));
4284 dest = gen_rtx_MEM (mode, addr);
4286 /* We do *not* set_mem_attributes here, because incoming arguments
4287 may overlap with sibling call outgoing arguments and we cannot
4288 allow reordering of reads from function arguments with stores
4289 to outgoing arguments of sibling calls. We do, however, want
4290 to record the alignment of the stack slot. */
4291 /* ALIGN may well be better aligned than TYPE, e.g. due to
4292 PARM_BOUNDARY. Assume the caller isn't lying. */
4293 set_mem_align (dest, align);
4295 emit_move_insn (dest, x);
4299 /* If part should go in registers, copy that part
4300 into the appropriate registers. Do this now, at the end,
4301 since mem-to-mem copies above may do function calls. */
4302 if (partial > 0 && reg != 0)
4304 /* Handle calls that pass values in multiple non-contiguous locations.
4305 The Irix 6 ABI has examples of this. */
4306 if (GET_CODE (reg) == PARALLEL)
4307 emit_group_load (reg, x, type, -1);
4308 else
4310 gcc_assert (partial % UNITS_PER_WORD == 0);
4311 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4315 if (extra && args_addr == 0 && where_pad == stack_direction)
4316 anti_adjust_stack (GEN_INT (extra));
4318 if (alignment_pad && args_addr == 0)
4319 anti_adjust_stack (alignment_pad);
4322 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4323 operations. */
4325 static rtx
4326 get_subtarget (rtx x)
4328 return (optimize
4329 || x == 0
4330 /* Only registers can be subtargets. */
4331 || !REG_P (x)
4332 /* Don't use hard regs to avoid extending their life. */
4333 || REGNO (x) < FIRST_PSEUDO_REGISTER
4334 ? 0 : x);
4337 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4338 FIELD is a bitfield. Returns true if the optimization was successful,
4339 and there's nothing else to do. */
4341 static bool
4342 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4343 unsigned HOST_WIDE_INT bitpos,
4344 unsigned HOST_WIDE_INT bitregion_start,
4345 unsigned HOST_WIDE_INT bitregion_end,
4346 enum machine_mode mode1, rtx str_rtx,
4347 tree to, tree src)
4349 enum machine_mode str_mode = GET_MODE (str_rtx);
4350 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4351 tree op0, op1;
4352 rtx value, result;
4353 optab binop;
4354 gimple srcstmt;
4355 enum tree_code code;
4357 if (mode1 != VOIDmode
4358 || bitsize >= BITS_PER_WORD
4359 || str_bitsize > BITS_PER_WORD
4360 || TREE_SIDE_EFFECTS (to)
4361 || TREE_THIS_VOLATILE (to))
4362 return false;
4364 STRIP_NOPS (src);
4365 if (TREE_CODE (src) != SSA_NAME)
4366 return false;
4367 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4368 return false;
4370 srcstmt = get_gimple_for_ssa_name (src);
4371 if (!srcstmt
4372 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4373 return false;
4375 code = gimple_assign_rhs_code (srcstmt);
4377 op0 = gimple_assign_rhs1 (srcstmt);
4379 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4380 to find its initialization. Hopefully the initialization will
4381 be from a bitfield load. */
4382 if (TREE_CODE (op0) == SSA_NAME)
4384 gimple op0stmt = get_gimple_for_ssa_name (op0);
4386 /* We want to eventually have OP0 be the same as TO, which
4387 should be a bitfield. */
4388 if (!op0stmt
4389 || !is_gimple_assign (op0stmt)
4390 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4391 return false;
4392 op0 = gimple_assign_rhs1 (op0stmt);
4395 op1 = gimple_assign_rhs2 (srcstmt);
4397 if (!operand_equal_p (to, op0, 0))
4398 return false;
4400 if (MEM_P (str_rtx))
4402 unsigned HOST_WIDE_INT offset1;
4404 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4405 str_mode = word_mode;
4406 str_mode = get_best_mode (bitsize, bitpos,
4407 bitregion_start, bitregion_end,
4408 MEM_ALIGN (str_rtx), str_mode, 0);
4409 if (str_mode == VOIDmode)
4410 return false;
4411 str_bitsize = GET_MODE_BITSIZE (str_mode);
4413 offset1 = bitpos;
4414 bitpos %= str_bitsize;
4415 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4416 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4418 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4419 return false;
4421 /* If the bit field covers the whole REG/MEM, store_field
4422 will likely generate better code. */
4423 if (bitsize >= str_bitsize)
4424 return false;
4426 /* We can't handle fields split across multiple entities. */
4427 if (bitpos + bitsize > str_bitsize)
4428 return false;
4430 if (BYTES_BIG_ENDIAN)
4431 bitpos = str_bitsize - bitpos - bitsize;
4433 switch (code)
4435 case PLUS_EXPR:
4436 case MINUS_EXPR:
4437 /* For now, just optimize the case of the topmost bitfield
4438 where we don't need to do any masking and also
4439 1 bit bitfields where xor can be used.
4440 We might win by one instruction for the other bitfields
4441 too if insv/extv instructions aren't used, so that
4442 can be added later. */
4443 if (bitpos + bitsize != str_bitsize
4444 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4445 break;
4447 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4448 value = convert_modes (str_mode,
4449 TYPE_MODE (TREE_TYPE (op1)), value,
4450 TYPE_UNSIGNED (TREE_TYPE (op1)));
4452 /* We may be accessing data outside the field, which means
4453 we can alias adjacent data. */
4454 if (MEM_P (str_rtx))
4456 str_rtx = shallow_copy_rtx (str_rtx);
4457 set_mem_alias_set (str_rtx, 0);
4458 set_mem_expr (str_rtx, 0);
4461 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4462 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4464 value = expand_and (str_mode, value, const1_rtx, NULL);
4465 binop = xor_optab;
4467 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4468 result = expand_binop (str_mode, binop, str_rtx,
4469 value, str_rtx, 1, OPTAB_WIDEN);
4470 if (result != str_rtx)
4471 emit_move_insn (str_rtx, result);
4472 return true;
4474 case BIT_IOR_EXPR:
4475 case BIT_XOR_EXPR:
4476 if (TREE_CODE (op1) != INTEGER_CST)
4477 break;
4478 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4479 value = convert_modes (str_mode,
4480 TYPE_MODE (TREE_TYPE (op1)), value,
4481 TYPE_UNSIGNED (TREE_TYPE (op1)));
4483 /* We may be accessing data outside the field, which means
4484 we can alias adjacent data. */
4485 if (MEM_P (str_rtx))
4487 str_rtx = shallow_copy_rtx (str_rtx);
4488 set_mem_alias_set (str_rtx, 0);
4489 set_mem_expr (str_rtx, 0);
4492 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4493 if (bitpos + bitsize != str_bitsize)
4495 rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4496 str_mode);
4497 value = expand_and (str_mode, value, mask, NULL_RTX);
4499 value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4500 result = expand_binop (str_mode, binop, str_rtx,
4501 value, str_rtx, 1, OPTAB_WIDEN);
4502 if (result != str_rtx)
4503 emit_move_insn (str_rtx, result);
4504 return true;
4506 default:
4507 break;
4510 return false;
4513 /* In the C++ memory model, consecutive bit fields in a structure are
4514 considered one memory location.
4516 Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4517 returns the bit range of consecutive bits in which this COMPONENT_REF
4518 belongs. The values are returned in *BITSTART and *BITEND. *BITPOS
4519 and *OFFSET may be adjusted in the process.
4521 If the access does not need to be restricted, 0 is returned in both
4522 *BITSTART and *BITEND. */
4524 static void
4525 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4526 unsigned HOST_WIDE_INT *bitend,
4527 tree exp,
4528 HOST_WIDE_INT *bitpos,
4529 tree *offset)
4531 HOST_WIDE_INT bitoffset;
4532 tree field, repr;
4534 gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4536 field = TREE_OPERAND (exp, 1);
4537 repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4538 /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4539 need to limit the range we can access. */
4540 if (!repr)
4542 *bitstart = *bitend = 0;
4543 return;
4546 /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4547 part of a larger bit field, then the representative does not serve any
4548 useful purpose. This can occur in Ada. */
4549 if (handled_component_p (TREE_OPERAND (exp, 0)))
4551 enum machine_mode rmode;
4552 HOST_WIDE_INT rbitsize, rbitpos;
4553 tree roffset;
4554 int unsignedp;
4555 int volatilep = 0;
4556 get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4557 &roffset, &rmode, &unsignedp, &volatilep, false);
4558 if ((rbitpos % BITS_PER_UNIT) != 0)
4560 *bitstart = *bitend = 0;
4561 return;
4565 /* Compute the adjustment to bitpos from the offset of the field
4566 relative to the representative. DECL_FIELD_OFFSET of field and
4567 repr are the same by construction if they are not constants,
4568 see finish_bitfield_layout. */
4569 if (host_integerp (DECL_FIELD_OFFSET (field), 1)
4570 && host_integerp (DECL_FIELD_OFFSET (repr), 1))
4571 bitoffset = (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
4572 - tree_low_cst (DECL_FIELD_OFFSET (repr), 1)) * BITS_PER_UNIT;
4573 else
4574 bitoffset = 0;
4575 bitoffset += (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
4576 - tree_low_cst (DECL_FIELD_BIT_OFFSET (repr), 1));
4578 /* If the adjustment is larger than bitpos, we would have a negative bit
4579 position for the lower bound and this may wreak havoc later. Adjust
4580 offset and bitpos to make the lower bound non-negative in that case. */
4581 if (bitoffset > *bitpos)
4583 HOST_WIDE_INT adjust = bitoffset - *bitpos;
4584 gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4586 *bitpos += adjust;
4587 if (*offset == NULL_TREE)
4588 *offset = size_int (-adjust / BITS_PER_UNIT);
4589 else
4590 *offset
4591 = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4592 *bitstart = 0;
4594 else
4595 *bitstart = *bitpos - bitoffset;
4597 *bitend = *bitstart + tree_low_cst (DECL_SIZE (repr), 1) - 1;
4600 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4601 in memory and has non-BLKmode. DECL_RTL must not be a MEM; if
4602 DECL_RTL was not set yet, return NORTL. */
4604 static inline bool
4605 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4607 if (TREE_CODE (addr) != ADDR_EXPR)
4608 return false;
4610 tree base = TREE_OPERAND (addr, 0);
4612 if (!DECL_P (base)
4613 || TREE_ADDRESSABLE (base)
4614 || DECL_MODE (base) == BLKmode)
4615 return false;
4617 if (!DECL_RTL_SET_P (base))
4618 return nortl;
4620 return (!MEM_P (DECL_RTL (base)));
4623 /* Returns true if the MEM_REF REF refers to an object that does not
4624 reside in memory and has non-BLKmode. */
4626 static inline bool
4627 mem_ref_refers_to_non_mem_p (tree ref)
4629 tree base = TREE_OPERAND (ref, 0);
4630 return addr_expr_of_non_mem_decl_p_1 (base, false);
4633 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4634 is true, try generating a nontemporal store. */
4636 void
4637 expand_assignment (tree to, tree from, bool nontemporal)
4639 rtx to_rtx = 0;
4640 rtx result;
4641 enum machine_mode mode;
4642 unsigned int align;
4643 enum insn_code icode;
4645 /* Don't crash if the lhs of the assignment was erroneous. */
4646 if (TREE_CODE (to) == ERROR_MARK)
4648 expand_normal (from);
4649 return;
4652 /* Optimize away no-op moves without side-effects. */
4653 if (operand_equal_p (to, from, 0))
4654 return;
4656 /* Handle misaligned stores. */
4657 mode = TYPE_MODE (TREE_TYPE (to));
4658 if ((TREE_CODE (to) == MEM_REF
4659 || TREE_CODE (to) == TARGET_MEM_REF)
4660 && mode != BLKmode
4661 && !mem_ref_refers_to_non_mem_p (to)
4662 && ((align = get_object_alignment (to))
4663 < GET_MODE_ALIGNMENT (mode))
4664 && (((icode = optab_handler (movmisalign_optab, mode))
4665 != CODE_FOR_nothing)
4666 || SLOW_UNALIGNED_ACCESS (mode, align)))
4668 rtx reg, mem;
4670 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4671 reg = force_not_mem (reg);
4672 mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4674 if (icode != CODE_FOR_nothing)
4676 struct expand_operand ops[2];
4678 create_fixed_operand (&ops[0], mem);
4679 create_input_operand (&ops[1], reg, mode);
4680 /* The movmisalign<mode> pattern cannot fail, else the assignment
4681 would silently be omitted. */
4682 expand_insn (icode, 2, ops);
4684 else
4685 store_bit_field (mem, GET_MODE_BITSIZE (mode),
4686 0, 0, 0, mode, reg);
4687 return;
4690 /* Assignment of a structure component needs special treatment
4691 if the structure component's rtx is not simply a MEM.
4692 Assignment of an array element at a constant index, and assignment of
4693 an array element in an unaligned packed structure field, has the same
4694 problem. Same for (partially) storing into a non-memory object. */
4695 if (handled_component_p (to)
4696 || (TREE_CODE (to) == MEM_REF
4697 && mem_ref_refers_to_non_mem_p (to))
4698 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4700 enum machine_mode mode1;
4701 HOST_WIDE_INT bitsize, bitpos;
4702 unsigned HOST_WIDE_INT bitregion_start = 0;
4703 unsigned HOST_WIDE_INT bitregion_end = 0;
4704 tree offset;
4705 int unsignedp;
4706 int volatilep = 0;
4707 tree tem;
4709 push_temp_slots ();
4710 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4711 &unsignedp, &volatilep, true);
4713 /* Make sure bitpos is not negative, it can wreak havoc later. */
4714 if (bitpos < 0)
4716 gcc_assert (offset == NULL_TREE);
4717 offset = size_int (bitpos >> (BITS_PER_UNIT == 8
4718 ? 3 : exact_log2 (BITS_PER_UNIT)));
4719 bitpos &= BITS_PER_UNIT - 1;
4722 if (TREE_CODE (to) == COMPONENT_REF
4723 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4724 get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4726 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4728 /* If the bitfield is volatile, we want to access it in the
4729 field's mode, not the computed mode.
4730 If a MEM has VOIDmode (external with incomplete type),
4731 use BLKmode for it instead. */
4732 if (MEM_P (to_rtx))
4734 if (volatilep && flag_strict_volatile_bitfields > 0)
4735 to_rtx = adjust_address (to_rtx, mode1, 0);
4736 else if (GET_MODE (to_rtx) == VOIDmode)
4737 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4740 if (offset != 0)
4742 enum machine_mode address_mode;
4743 rtx offset_rtx;
4745 if (!MEM_P (to_rtx))
4747 /* We can get constant negative offsets into arrays with broken
4748 user code. Translate this to a trap instead of ICEing. */
4749 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4750 expand_builtin_trap ();
4751 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4754 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4755 address_mode = get_address_mode (to_rtx);
4756 if (GET_MODE (offset_rtx) != address_mode)
4757 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4759 /* A constant address in TO_RTX can have VOIDmode, we must not try
4760 to call force_reg for that case. Avoid that case. */
4761 if (MEM_P (to_rtx)
4762 && GET_MODE (to_rtx) == BLKmode
4763 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4764 && bitsize > 0
4765 && (bitpos % bitsize) == 0
4766 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4767 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4769 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4770 bitpos = 0;
4773 to_rtx = offset_address (to_rtx, offset_rtx,
4774 highest_pow2_factor_for_target (to,
4775 offset));
4778 /* No action is needed if the target is not a memory and the field
4779 lies completely outside that target. This can occur if the source
4780 code contains an out-of-bounds access to a small array. */
4781 if (!MEM_P (to_rtx)
4782 && GET_MODE (to_rtx) != BLKmode
4783 && (unsigned HOST_WIDE_INT) bitpos
4784 >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4786 expand_normal (from);
4787 result = NULL;
4789 /* Handle expand_expr of a complex value returning a CONCAT. */
4790 else if (GET_CODE (to_rtx) == CONCAT)
4792 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4793 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4794 && bitpos == 0
4795 && bitsize == mode_bitsize)
4796 result = store_expr (from, to_rtx, false, nontemporal);
4797 else if (bitsize == mode_bitsize / 2
4798 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4799 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4800 nontemporal);
4801 else if (bitpos + bitsize <= mode_bitsize / 2)
4802 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4803 bitregion_start, bitregion_end,
4804 mode1, from,
4805 get_alias_set (to), nontemporal);
4806 else if (bitpos >= mode_bitsize / 2)
4807 result = store_field (XEXP (to_rtx, 1), bitsize,
4808 bitpos - mode_bitsize / 2,
4809 bitregion_start, bitregion_end,
4810 mode1, from,
4811 get_alias_set (to), nontemporal);
4812 else if (bitpos == 0 && bitsize == mode_bitsize)
4814 rtx from_rtx;
4815 result = expand_normal (from);
4816 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4817 TYPE_MODE (TREE_TYPE (from)), 0);
4818 emit_move_insn (XEXP (to_rtx, 0),
4819 read_complex_part (from_rtx, false));
4820 emit_move_insn (XEXP (to_rtx, 1),
4821 read_complex_part (from_rtx, true));
4823 else
4825 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4826 GET_MODE_SIZE (GET_MODE (to_rtx)));
4827 write_complex_part (temp, XEXP (to_rtx, 0), false);
4828 write_complex_part (temp, XEXP (to_rtx, 1), true);
4829 result = store_field (temp, bitsize, bitpos,
4830 bitregion_start, bitregion_end,
4831 mode1, from,
4832 get_alias_set (to), nontemporal);
4833 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4834 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4837 else
4839 if (MEM_P (to_rtx))
4841 /* If the field is at offset zero, we could have been given the
4842 DECL_RTX of the parent struct. Don't munge it. */
4843 to_rtx = shallow_copy_rtx (to_rtx);
4844 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4845 if (volatilep)
4846 MEM_VOLATILE_P (to_rtx) = 1;
4849 if (optimize_bitfield_assignment_op (bitsize, bitpos,
4850 bitregion_start, bitregion_end,
4851 mode1,
4852 to_rtx, to, from))
4853 result = NULL;
4854 else
4855 result = store_field (to_rtx, bitsize, bitpos,
4856 bitregion_start, bitregion_end,
4857 mode1, from,
4858 get_alias_set (to), nontemporal);
4861 if (result)
4862 preserve_temp_slots (result);
4863 pop_temp_slots ();
4864 return;
4867 /* If the rhs is a function call and its value is not an aggregate,
4868 call the function before we start to compute the lhs.
4869 This is needed for correct code for cases such as
4870 val = setjmp (buf) on machines where reference to val
4871 requires loading up part of an address in a separate insn.
4873 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4874 since it might be a promoted variable where the zero- or sign- extension
4875 needs to be done. Handling this in the normal way is safe because no
4876 computation is done before the call. The same is true for SSA names. */
4877 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4878 && COMPLETE_TYPE_P (TREE_TYPE (from))
4879 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4880 && ! (((TREE_CODE (to) == VAR_DECL
4881 || TREE_CODE (to) == PARM_DECL
4882 || TREE_CODE (to) == RESULT_DECL)
4883 && REG_P (DECL_RTL (to)))
4884 || TREE_CODE (to) == SSA_NAME))
4886 rtx value;
4888 push_temp_slots ();
4889 value = expand_normal (from);
4890 if (to_rtx == 0)
4891 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4893 /* Handle calls that return values in multiple non-contiguous locations.
4894 The Irix 6 ABI has examples of this. */
4895 if (GET_CODE (to_rtx) == PARALLEL)
4897 if (GET_CODE (value) == PARALLEL)
4898 emit_group_move (to_rtx, value);
4899 else
4900 emit_group_load (to_rtx, value, TREE_TYPE (from),
4901 int_size_in_bytes (TREE_TYPE (from)));
4903 else if (GET_CODE (value) == PARALLEL)
4904 emit_group_store (to_rtx, value, TREE_TYPE (from),
4905 int_size_in_bytes (TREE_TYPE (from)));
4906 else if (GET_MODE (to_rtx) == BLKmode)
4908 /* Handle calls that return BLKmode values in registers. */
4909 if (REG_P (value))
4910 copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
4911 else
4912 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4914 else
4916 if (POINTER_TYPE_P (TREE_TYPE (to)))
4917 value = convert_memory_address_addr_space
4918 (GET_MODE (to_rtx), value,
4919 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
4921 emit_move_insn (to_rtx, value);
4923 preserve_temp_slots (to_rtx);
4924 pop_temp_slots ();
4925 return;
4928 /* Ordinary treatment. Expand TO to get a REG or MEM rtx. */
4929 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4931 /* Don't move directly into a return register. */
4932 if (TREE_CODE (to) == RESULT_DECL
4933 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4935 rtx temp;
4937 push_temp_slots ();
4939 /* If the source is itself a return value, it still is in a pseudo at
4940 this point so we can move it back to the return register directly. */
4941 if (REG_P (to_rtx)
4942 && TYPE_MODE (TREE_TYPE (from)) == BLKmode
4943 && TREE_CODE (from) != CALL_EXPR)
4944 temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
4945 else
4946 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4948 /* Handle calls that return values in multiple non-contiguous locations.
4949 The Irix 6 ABI has examples of this. */
4950 if (GET_CODE (to_rtx) == PARALLEL)
4952 if (GET_CODE (temp) == PARALLEL)
4953 emit_group_move (to_rtx, temp);
4954 else
4955 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4956 int_size_in_bytes (TREE_TYPE (from)));
4958 else if (temp)
4959 emit_move_insn (to_rtx, temp);
4961 preserve_temp_slots (to_rtx);
4962 pop_temp_slots ();
4963 return;
4966 /* In case we are returning the contents of an object which overlaps
4967 the place the value is being stored, use a safe function when copying
4968 a value through a pointer into a structure value return block. */
4969 if (TREE_CODE (to) == RESULT_DECL
4970 && TREE_CODE (from) == INDIRECT_REF
4971 && ADDR_SPACE_GENERIC_P
4972 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
4973 && refs_may_alias_p (to, from)
4974 && cfun->returns_struct
4975 && !cfun->returns_pcc_struct)
4977 rtx from_rtx, size;
4979 push_temp_slots ();
4980 size = expr_size (from);
4981 from_rtx = expand_normal (from);
4983 emit_library_call (memmove_libfunc, LCT_NORMAL,
4984 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4985 XEXP (from_rtx, 0), Pmode,
4986 convert_to_mode (TYPE_MODE (sizetype),
4987 size, TYPE_UNSIGNED (sizetype)),
4988 TYPE_MODE (sizetype));
4990 preserve_temp_slots (to_rtx);
4991 pop_temp_slots ();
4992 return;
4995 /* Compute FROM and store the value in the rtx we got. */
4997 push_temp_slots ();
4998 result = store_expr (from, to_rtx, 0, nontemporal);
4999 preserve_temp_slots (result);
5000 pop_temp_slots ();
5001 return;
5004 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
5005 succeeded, false otherwise. */
5007 bool
5008 emit_storent_insn (rtx to, rtx from)
5010 struct expand_operand ops[2];
5011 enum machine_mode mode = GET_MODE (to);
5012 enum insn_code code = optab_handler (storent_optab, mode);
5014 if (code == CODE_FOR_nothing)
5015 return false;
5017 create_fixed_operand (&ops[0], to);
5018 create_input_operand (&ops[1], from, mode);
5019 return maybe_expand_insn (code, 2, ops);
5022 /* Generate code for computing expression EXP,
5023 and storing the value into TARGET.
5025 If the mode is BLKmode then we may return TARGET itself.
5026 It turns out that in BLKmode it doesn't cause a problem.
5027 because C has no operators that could combine two different
5028 assignments into the same BLKmode object with different values
5029 with no sequence point. Will other languages need this to
5030 be more thorough?
5032 If CALL_PARAM_P is nonzero, this is a store into a call param on the
5033 stack, and block moves may need to be treated specially.
5035 If NONTEMPORAL is true, try using a nontemporal store instruction. */
5038 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
5040 rtx temp;
5041 rtx alt_rtl = NULL_RTX;
5042 location_t loc = curr_insn_location ();
5044 if (VOID_TYPE_P (TREE_TYPE (exp)))
5046 /* C++ can generate ?: expressions with a throw expression in one
5047 branch and an rvalue in the other. Here, we resolve attempts to
5048 store the throw expression's nonexistent result. */
5049 gcc_assert (!call_param_p);
5050 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5051 return NULL_RTX;
5053 if (TREE_CODE (exp) == COMPOUND_EXPR)
5055 /* Perform first part of compound expression, then assign from second
5056 part. */
5057 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5058 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5059 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5060 nontemporal);
5062 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5064 /* For conditional expression, get safe form of the target. Then
5065 test the condition, doing the appropriate assignment on either
5066 side. This avoids the creation of unnecessary temporaries.
5067 For non-BLKmode, it is more efficient not to do this. */
5069 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
5071 do_pending_stack_adjust ();
5072 NO_DEFER_POP;
5073 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5074 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5075 nontemporal);
5076 emit_jump_insn (gen_jump (lab2));
5077 emit_barrier ();
5078 emit_label (lab1);
5079 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
5080 nontemporal);
5081 emit_label (lab2);
5082 OK_DEFER_POP;
5084 return NULL_RTX;
5086 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5087 /* If this is a scalar in a register that is stored in a wider mode
5088 than the declared mode, compute the result into its declared mode
5089 and then convert to the wider mode. Our value is the computed
5090 expression. */
5092 rtx inner_target = 0;
5094 /* We can do the conversion inside EXP, which will often result
5095 in some optimizations. Do the conversion in two steps: first
5096 change the signedness, if needed, then the extend. But don't
5097 do this if the type of EXP is a subtype of something else
5098 since then the conversion might involve more than just
5099 converting modes. */
5100 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5101 && TREE_TYPE (TREE_TYPE (exp)) == 0
5102 && GET_MODE_PRECISION (GET_MODE (target))
5103 == TYPE_PRECISION (TREE_TYPE (exp)))
5105 if (TYPE_UNSIGNED (TREE_TYPE (exp))
5106 != SUBREG_PROMOTED_UNSIGNED_P (target))
5108 /* Some types, e.g. Fortran's logical*4, won't have a signed
5109 version, so use the mode instead. */
5110 tree ntype
5111 = (signed_or_unsigned_type_for
5112 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
5113 if (ntype == NULL)
5114 ntype = lang_hooks.types.type_for_mode
5115 (TYPE_MODE (TREE_TYPE (exp)),
5116 SUBREG_PROMOTED_UNSIGNED_P (target));
5118 exp = fold_convert_loc (loc, ntype, exp);
5121 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5122 (GET_MODE (SUBREG_REG (target)),
5123 SUBREG_PROMOTED_UNSIGNED_P (target)),
5124 exp);
5126 inner_target = SUBREG_REG (target);
5129 temp = expand_expr (exp, inner_target, VOIDmode,
5130 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5132 /* If TEMP is a VOIDmode constant, use convert_modes to make
5133 sure that we properly convert it. */
5134 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5136 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5137 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
5138 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5139 GET_MODE (target), temp,
5140 SUBREG_PROMOTED_UNSIGNED_P (target));
5143 convert_move (SUBREG_REG (target), temp,
5144 SUBREG_PROMOTED_UNSIGNED_P (target));
5146 return NULL_RTX;
5148 else if ((TREE_CODE (exp) == STRING_CST
5149 || (TREE_CODE (exp) == MEM_REF
5150 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5151 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5152 == STRING_CST
5153 && integer_zerop (TREE_OPERAND (exp, 1))))
5154 && !nontemporal && !call_param_p
5155 && MEM_P (target))
5157 /* Optimize initialization of an array with a STRING_CST. */
5158 HOST_WIDE_INT exp_len, str_copy_len;
5159 rtx dest_mem;
5160 tree str = TREE_CODE (exp) == STRING_CST
5161 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5163 exp_len = int_expr_size (exp);
5164 if (exp_len <= 0)
5165 goto normal_expr;
5167 if (TREE_STRING_LENGTH (str) <= 0)
5168 goto normal_expr;
5170 str_copy_len = strlen (TREE_STRING_POINTER (str));
5171 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5172 goto normal_expr;
5174 str_copy_len = TREE_STRING_LENGTH (str);
5175 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5176 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5178 str_copy_len += STORE_MAX_PIECES - 1;
5179 str_copy_len &= ~(STORE_MAX_PIECES - 1);
5181 str_copy_len = MIN (str_copy_len, exp_len);
5182 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5183 CONST_CAST (char *, TREE_STRING_POINTER (str)),
5184 MEM_ALIGN (target), false))
5185 goto normal_expr;
5187 dest_mem = target;
5189 dest_mem = store_by_pieces (dest_mem,
5190 str_copy_len, builtin_strncpy_read_str,
5191 CONST_CAST (char *,
5192 TREE_STRING_POINTER (str)),
5193 MEM_ALIGN (target), false,
5194 exp_len > str_copy_len ? 1 : 0);
5195 if (exp_len > str_copy_len)
5196 clear_storage (adjust_address (dest_mem, BLKmode, 0),
5197 GEN_INT (exp_len - str_copy_len),
5198 BLOCK_OP_NORMAL);
5199 return NULL_RTX;
5201 else
5203 rtx tmp_target;
5205 normal_expr:
5206 /* If we want to use a nontemporal store, force the value to
5207 register first. */
5208 tmp_target = nontemporal ? NULL_RTX : target;
5209 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5210 (call_param_p
5211 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
5212 &alt_rtl);
5215 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5216 the same as that of TARGET, adjust the constant. This is needed, for
5217 example, in case it is a CONST_DOUBLE and we want only a word-sized
5218 value. */
5219 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5220 && TREE_CODE (exp) != ERROR_MARK
5221 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5222 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5223 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5225 /* If value was not generated in the target, store it there.
5226 Convert the value to TARGET's type first if necessary and emit the
5227 pending incrementations that have been queued when expanding EXP.
5228 Note that we cannot emit the whole queue blindly because this will
5229 effectively disable the POST_INC optimization later.
5231 If TEMP and TARGET compare equal according to rtx_equal_p, but
5232 one or both of them are volatile memory refs, we have to distinguish
5233 two cases:
5234 - expand_expr has used TARGET. In this case, we must not generate
5235 another copy. This can be detected by TARGET being equal according
5236 to == .
5237 - expand_expr has not used TARGET - that means that the source just
5238 happens to have the same RTX form. Since temp will have been created
5239 by expand_expr, it will compare unequal according to == .
5240 We must generate a copy in this case, to reach the correct number
5241 of volatile memory references. */
5243 if ((! rtx_equal_p (temp, target)
5244 || (temp != target && (side_effects_p (temp)
5245 || side_effects_p (target))))
5246 && TREE_CODE (exp) != ERROR_MARK
5247 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5248 but TARGET is not valid memory reference, TEMP will differ
5249 from TARGET although it is really the same location. */
5250 && !(alt_rtl
5251 && rtx_equal_p (alt_rtl, target)
5252 && !side_effects_p (alt_rtl)
5253 && !side_effects_p (target))
5254 /* If there's nothing to copy, don't bother. Don't call
5255 expr_size unless necessary, because some front-ends (C++)
5256 expr_size-hook must not be given objects that are not
5257 supposed to be bit-copied or bit-initialized. */
5258 && expr_size (exp) != const0_rtx)
5260 if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5262 if (GET_MODE (target) == BLKmode)
5264 /* Handle calls that return BLKmode values in registers. */
5265 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5266 copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5267 else
5268 store_bit_field (target,
5269 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5270 0, 0, 0, GET_MODE (temp), temp);
5272 else
5273 convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5276 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5278 /* Handle copying a string constant into an array. The string
5279 constant may be shorter than the array. So copy just the string's
5280 actual length, and clear the rest. First get the size of the data
5281 type of the string, which is actually the size of the target. */
5282 rtx size = expr_size (exp);
5284 if (CONST_INT_P (size)
5285 && INTVAL (size) < TREE_STRING_LENGTH (exp))
5286 emit_block_move (target, temp, size,
5287 (call_param_p
5288 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5289 else
5291 enum machine_mode pointer_mode
5292 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5293 enum machine_mode address_mode = get_address_mode (target);
5295 /* Compute the size of the data to copy from the string. */
5296 tree copy_size
5297 = size_binop_loc (loc, MIN_EXPR,
5298 make_tree (sizetype, size),
5299 size_int (TREE_STRING_LENGTH (exp)));
5300 rtx copy_size_rtx
5301 = expand_expr (copy_size, NULL_RTX, VOIDmode,
5302 (call_param_p
5303 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
5304 rtx label = 0;
5306 /* Copy that much. */
5307 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5308 TYPE_UNSIGNED (sizetype));
5309 emit_block_move (target, temp, copy_size_rtx,
5310 (call_param_p
5311 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5313 /* Figure out how much is left in TARGET that we have to clear.
5314 Do all calculations in pointer_mode. */
5315 if (CONST_INT_P (copy_size_rtx))
5317 size = plus_constant (address_mode, size,
5318 -INTVAL (copy_size_rtx));
5319 target = adjust_address (target, BLKmode,
5320 INTVAL (copy_size_rtx));
5322 else
5324 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5325 copy_size_rtx, NULL_RTX, 0,
5326 OPTAB_LIB_WIDEN);
5328 if (GET_MODE (copy_size_rtx) != address_mode)
5329 copy_size_rtx = convert_to_mode (address_mode,
5330 copy_size_rtx,
5331 TYPE_UNSIGNED (sizetype));
5333 target = offset_address (target, copy_size_rtx,
5334 highest_pow2_factor (copy_size));
5335 label = gen_label_rtx ();
5336 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5337 GET_MODE (size), 0, label);
5340 if (size != const0_rtx)
5341 clear_storage (target, size, BLOCK_OP_NORMAL);
5343 if (label)
5344 emit_label (label);
5347 /* Handle calls that return values in multiple non-contiguous locations.
5348 The Irix 6 ABI has examples of this. */
5349 else if (GET_CODE (target) == PARALLEL)
5351 if (GET_CODE (temp) == PARALLEL)
5352 emit_group_move (target, temp);
5353 else
5354 emit_group_load (target, temp, TREE_TYPE (exp),
5355 int_size_in_bytes (TREE_TYPE (exp)));
5357 else if (GET_CODE (temp) == PARALLEL)
5358 emit_group_store (target, temp, TREE_TYPE (exp),
5359 int_size_in_bytes (TREE_TYPE (exp)));
5360 else if (GET_MODE (temp) == BLKmode)
5361 emit_block_move (target, temp, expr_size (exp),
5362 (call_param_p
5363 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5364 /* If we emit a nontemporal store, there is nothing else to do. */
5365 else if (nontemporal && emit_storent_insn (target, temp))
5367 else
5369 temp = force_operand (temp, target);
5370 if (temp != target)
5371 emit_move_insn (target, temp);
5375 return NULL_RTX;
5378 /* Return true if field F of structure TYPE is a flexible array. */
5380 static bool
5381 flexible_array_member_p (const_tree f, const_tree type)
5383 const_tree tf;
5385 tf = TREE_TYPE (f);
5386 return (DECL_CHAIN (f) == NULL
5387 && TREE_CODE (tf) == ARRAY_TYPE
5388 && TYPE_DOMAIN (tf)
5389 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5390 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5391 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5392 && int_size_in_bytes (type) >= 0);
5395 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5396 must have in order for it to completely initialize a value of type TYPE.
5397 Return -1 if the number isn't known.
5399 If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE. */
5401 static HOST_WIDE_INT
5402 count_type_elements (const_tree type, bool for_ctor_p)
5404 switch (TREE_CODE (type))
5406 case ARRAY_TYPE:
5408 tree nelts;
5410 nelts = array_type_nelts (type);
5411 if (nelts && host_integerp (nelts, 1))
5413 unsigned HOST_WIDE_INT n;
5415 n = tree_low_cst (nelts, 1) + 1;
5416 if (n == 0 || for_ctor_p)
5417 return n;
5418 else
5419 return n * count_type_elements (TREE_TYPE (type), false);
5421 return for_ctor_p ? -1 : 1;
5424 case RECORD_TYPE:
5426 unsigned HOST_WIDE_INT n;
5427 tree f;
5429 n = 0;
5430 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5431 if (TREE_CODE (f) == FIELD_DECL)
5433 if (!for_ctor_p)
5434 n += count_type_elements (TREE_TYPE (f), false);
5435 else if (!flexible_array_member_p (f, type))
5436 /* Don't count flexible arrays, which are not supposed
5437 to be initialized. */
5438 n += 1;
5441 return n;
5444 case UNION_TYPE:
5445 case QUAL_UNION_TYPE:
5447 tree f;
5448 HOST_WIDE_INT n, m;
5450 gcc_assert (!for_ctor_p);
5451 /* Estimate the number of scalars in each field and pick the
5452 maximum. Other estimates would do instead; the idea is simply
5453 to make sure that the estimate is not sensitive to the ordering
5454 of the fields. */
5455 n = 1;
5456 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5457 if (TREE_CODE (f) == FIELD_DECL)
5459 m = count_type_elements (TREE_TYPE (f), false);
5460 /* If the field doesn't span the whole union, add an extra
5461 scalar for the rest. */
5462 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5463 TYPE_SIZE (type)) != 1)
5464 m++;
5465 if (n < m)
5466 n = m;
5468 return n;
5471 case COMPLEX_TYPE:
5472 return 2;
5474 case VECTOR_TYPE:
5475 return TYPE_VECTOR_SUBPARTS (type);
5477 case INTEGER_TYPE:
5478 case REAL_TYPE:
5479 case FIXED_POINT_TYPE:
5480 case ENUMERAL_TYPE:
5481 case BOOLEAN_TYPE:
5482 case POINTER_TYPE:
5483 case OFFSET_TYPE:
5484 case REFERENCE_TYPE:
5485 case NULLPTR_TYPE:
5486 return 1;
5488 case ERROR_MARK:
5489 return 0;
5491 case VOID_TYPE:
5492 case METHOD_TYPE:
5493 case FUNCTION_TYPE:
5494 case LANG_TYPE:
5495 default:
5496 gcc_unreachable ();
5500 /* Helper for categorize_ctor_elements. Identical interface. */
5502 static bool
5503 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5504 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5506 unsigned HOST_WIDE_INT idx;
5507 HOST_WIDE_INT nz_elts, init_elts, num_fields;
5508 tree value, purpose, elt_type;
5510 /* Whether CTOR is a valid constant initializer, in accordance with what
5511 initializer_constant_valid_p does. If inferred from the constructor
5512 elements, true until proven otherwise. */
5513 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5514 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5516 nz_elts = 0;
5517 init_elts = 0;
5518 num_fields = 0;
5519 elt_type = NULL_TREE;
5521 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5523 HOST_WIDE_INT mult = 1;
5525 if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5527 tree lo_index = TREE_OPERAND (purpose, 0);
5528 tree hi_index = TREE_OPERAND (purpose, 1);
5530 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
5531 mult = (tree_low_cst (hi_index, 1)
5532 - tree_low_cst (lo_index, 1) + 1);
5534 num_fields += mult;
5535 elt_type = TREE_TYPE (value);
5537 switch (TREE_CODE (value))
5539 case CONSTRUCTOR:
5541 HOST_WIDE_INT nz = 0, ic = 0;
5543 bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5544 p_complete);
5546 nz_elts += mult * nz;
5547 init_elts += mult * ic;
5549 if (const_from_elts_p && const_p)
5550 const_p = const_elt_p;
5552 break;
5554 case INTEGER_CST:
5555 case REAL_CST:
5556 case FIXED_CST:
5557 if (!initializer_zerop (value))
5558 nz_elts += mult;
5559 init_elts += mult;
5560 break;
5562 case STRING_CST:
5563 nz_elts += mult * TREE_STRING_LENGTH (value);
5564 init_elts += mult * TREE_STRING_LENGTH (value);
5565 break;
5567 case COMPLEX_CST:
5568 if (!initializer_zerop (TREE_REALPART (value)))
5569 nz_elts += mult;
5570 if (!initializer_zerop (TREE_IMAGPART (value)))
5571 nz_elts += mult;
5572 init_elts += mult;
5573 break;
5575 case VECTOR_CST:
5577 unsigned i;
5578 for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5580 tree v = VECTOR_CST_ELT (value, i);
5581 if (!initializer_zerop (v))
5582 nz_elts += mult;
5583 init_elts += mult;
5586 break;
5588 default:
5590 HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5591 nz_elts += mult * tc;
5592 init_elts += mult * tc;
5594 if (const_from_elts_p && const_p)
5595 const_p = initializer_constant_valid_p (value, elt_type)
5596 != NULL_TREE;
5598 break;
5602 if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5603 num_fields, elt_type))
5604 *p_complete = false;
5606 *p_nz_elts += nz_elts;
5607 *p_init_elts += init_elts;
5609 return const_p;
5612 /* Examine CTOR to discover:
5613 * how many scalar fields are set to nonzero values,
5614 and place it in *P_NZ_ELTS;
5615 * how many scalar fields in total are in CTOR,
5616 and place it in *P_ELT_COUNT.
5617 * whether the constructor is complete -- in the sense that every
5618 meaningful byte is explicitly given a value --
5619 and place it in *P_COMPLETE.
5621 Return whether or not CTOR is a valid static constant initializer, the same
5622 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5624 bool
5625 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5626 HOST_WIDE_INT *p_init_elts, bool *p_complete)
5628 *p_nz_elts = 0;
5629 *p_init_elts = 0;
5630 *p_complete = true;
5632 return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5635 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5636 of which had type LAST_TYPE. Each element was itself a complete
5637 initializer, in the sense that every meaningful byte was explicitly
5638 given a value. Return true if the same is true for the constructor
5639 as a whole. */
5641 bool
5642 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5643 const_tree last_type)
5645 if (TREE_CODE (type) == UNION_TYPE
5646 || TREE_CODE (type) == QUAL_UNION_TYPE)
5648 if (num_elts == 0)
5649 return false;
5651 gcc_assert (num_elts == 1 && last_type);
5653 /* ??? We could look at each element of the union, and find the
5654 largest element. Which would avoid comparing the size of the
5655 initialized element against any tail padding in the union.
5656 Doesn't seem worth the effort... */
5657 return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5660 return count_type_elements (type, true) == num_elts;
5663 /* Return 1 if EXP contains mostly (3/4) zeros. */
5665 static int
5666 mostly_zeros_p (const_tree exp)
5668 if (TREE_CODE (exp) == CONSTRUCTOR)
5670 HOST_WIDE_INT nz_elts, init_elts;
5671 bool complete_p;
5673 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5674 return !complete_p || nz_elts < init_elts / 4;
5677 return initializer_zerop (exp);
5680 /* Return 1 if EXP contains all zeros. */
5682 static int
5683 all_zeros_p (const_tree exp)
5685 if (TREE_CODE (exp) == CONSTRUCTOR)
5687 HOST_WIDE_INT nz_elts, init_elts;
5688 bool complete_p;
5690 categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5691 return nz_elts == 0;
5694 return initializer_zerop (exp);
5697 /* Helper function for store_constructor.
5698 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5699 CLEARED is as for store_constructor.
5700 ALIAS_SET is the alias set to use for any stores.
5702 This provides a recursive shortcut back to store_constructor when it isn't
5703 necessary to go through store_field. This is so that we can pass through
5704 the cleared field to let store_constructor know that we may not have to
5705 clear a substructure if the outer structure has already been cleared. */
5707 static void
5708 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5709 HOST_WIDE_INT bitpos, enum machine_mode mode,
5710 tree exp, int cleared, alias_set_type alias_set)
5712 if (TREE_CODE (exp) == CONSTRUCTOR
5713 /* We can only call store_constructor recursively if the size and
5714 bit position are on a byte boundary. */
5715 && bitpos % BITS_PER_UNIT == 0
5716 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5717 /* If we have a nonzero bitpos for a register target, then we just
5718 let store_field do the bitfield handling. This is unlikely to
5719 generate unnecessary clear instructions anyways. */
5720 && (bitpos == 0 || MEM_P (target)))
5722 if (MEM_P (target))
5723 target
5724 = adjust_address (target,
5725 GET_MODE (target) == BLKmode
5726 || 0 != (bitpos
5727 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5728 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5731 /* Update the alias set, if required. */
5732 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5733 && MEM_ALIAS_SET (target) != 0)
5735 target = copy_rtx (target);
5736 set_mem_alias_set (target, alias_set);
5739 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5741 else
5742 store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false);
5746 /* Returns the number of FIELD_DECLs in TYPE. */
5748 static int
5749 fields_length (const_tree type)
5751 tree t = TYPE_FIELDS (type);
5752 int count = 0;
5754 for (; t; t = DECL_CHAIN (t))
5755 if (TREE_CODE (t) == FIELD_DECL)
5756 ++count;
5758 return count;
5762 /* Store the value of constructor EXP into the rtx TARGET.
5763 TARGET is either a REG or a MEM; we know it cannot conflict, since
5764 safe_from_p has been called.
5765 CLEARED is true if TARGET is known to have been zero'd.
5766 SIZE is the number of bytes of TARGET we are allowed to modify: this
5767 may not be the same as the size of EXP if we are assigning to a field
5768 which has been packed to exclude padding bits. */
5770 static void
5771 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5773 tree type = TREE_TYPE (exp);
5774 #ifdef WORD_REGISTER_OPERATIONS
5775 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5776 #endif
5778 switch (TREE_CODE (type))
5780 case RECORD_TYPE:
5781 case UNION_TYPE:
5782 case QUAL_UNION_TYPE:
5784 unsigned HOST_WIDE_INT idx;
5785 tree field, value;
5787 /* If size is zero or the target is already cleared, do nothing. */
5788 if (size == 0 || cleared)
5789 cleared = 1;
5790 /* We either clear the aggregate or indicate the value is dead. */
5791 else if ((TREE_CODE (type) == UNION_TYPE
5792 || TREE_CODE (type) == QUAL_UNION_TYPE)
5793 && ! CONSTRUCTOR_ELTS (exp))
5794 /* If the constructor is empty, clear the union. */
5796 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5797 cleared = 1;
5800 /* If we are building a static constructor into a register,
5801 set the initial value as zero so we can fold the value into
5802 a constant. But if more than one register is involved,
5803 this probably loses. */
5804 else if (REG_P (target) && TREE_STATIC (exp)
5805 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5807 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5808 cleared = 1;
5811 /* If the constructor has fewer fields than the structure or
5812 if we are initializing the structure to mostly zeros, clear
5813 the whole structure first. Don't do this if TARGET is a
5814 register whose mode size isn't equal to SIZE since
5815 clear_storage can't handle this case. */
5816 else if (size > 0
5817 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
5818 != fields_length (type))
5819 || mostly_zeros_p (exp))
5820 && (!REG_P (target)
5821 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5822 == size)))
5824 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5825 cleared = 1;
5828 if (REG_P (target) && !cleared)
5829 emit_clobber (target);
5831 /* Store each element of the constructor into the
5832 corresponding field of TARGET. */
5833 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5835 enum machine_mode mode;
5836 HOST_WIDE_INT bitsize;
5837 HOST_WIDE_INT bitpos = 0;
5838 tree offset;
5839 rtx to_rtx = target;
5841 /* Just ignore missing fields. We cleared the whole
5842 structure, above, if any fields are missing. */
5843 if (field == 0)
5844 continue;
5846 if (cleared && initializer_zerop (value))
5847 continue;
5849 if (host_integerp (DECL_SIZE (field), 1))
5850 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5851 else
5852 bitsize = -1;
5854 mode = DECL_MODE (field);
5855 if (DECL_BIT_FIELD (field))
5856 mode = VOIDmode;
5858 offset = DECL_FIELD_OFFSET (field);
5859 if (host_integerp (offset, 0)
5860 && host_integerp (bit_position (field), 0))
5862 bitpos = int_bit_position (field);
5863 offset = 0;
5865 else
5866 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5868 if (offset)
5870 enum machine_mode address_mode;
5871 rtx offset_rtx;
5873 offset
5874 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5875 make_tree (TREE_TYPE (exp),
5876 target));
5878 offset_rtx = expand_normal (offset);
5879 gcc_assert (MEM_P (to_rtx));
5881 address_mode = get_address_mode (to_rtx);
5882 if (GET_MODE (offset_rtx) != address_mode)
5883 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5885 to_rtx = offset_address (to_rtx, offset_rtx,
5886 highest_pow2_factor (offset));
5889 #ifdef WORD_REGISTER_OPERATIONS
5890 /* If this initializes a field that is smaller than a
5891 word, at the start of a word, try to widen it to a full
5892 word. This special case allows us to output C++ member
5893 function initializations in a form that the optimizers
5894 can understand. */
5895 if (REG_P (target)
5896 && bitsize < BITS_PER_WORD
5897 && bitpos % BITS_PER_WORD == 0
5898 && GET_MODE_CLASS (mode) == MODE_INT
5899 && TREE_CODE (value) == INTEGER_CST
5900 && exp_size >= 0
5901 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5903 tree type = TREE_TYPE (value);
5905 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5907 type = lang_hooks.types.type_for_mode
5908 (word_mode, TYPE_UNSIGNED (type));
5909 value = fold_convert (type, value);
5912 if (BYTES_BIG_ENDIAN)
5913 value
5914 = fold_build2 (LSHIFT_EXPR, type, value,
5915 build_int_cst (type,
5916 BITS_PER_WORD - bitsize));
5917 bitsize = BITS_PER_WORD;
5918 mode = word_mode;
5920 #endif
5922 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5923 && DECL_NONADDRESSABLE_P (field))
5925 to_rtx = copy_rtx (to_rtx);
5926 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5929 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5930 value, cleared,
5931 get_alias_set (TREE_TYPE (field)));
5933 break;
5935 case ARRAY_TYPE:
5937 tree value, index;
5938 unsigned HOST_WIDE_INT i;
5939 int need_to_clear;
5940 tree domain;
5941 tree elttype = TREE_TYPE (type);
5942 int const_bounds_p;
5943 HOST_WIDE_INT minelt = 0;
5944 HOST_WIDE_INT maxelt = 0;
5946 domain = TYPE_DOMAIN (type);
5947 const_bounds_p = (TYPE_MIN_VALUE (domain)
5948 && TYPE_MAX_VALUE (domain)
5949 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5950 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5952 /* If we have constant bounds for the range of the type, get them. */
5953 if (const_bounds_p)
5955 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5956 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5959 /* If the constructor has fewer elements than the array, clear
5960 the whole array first. Similarly if this is static
5961 constructor of a non-BLKmode object. */
5962 if (cleared)
5963 need_to_clear = 0;
5964 else if (REG_P (target) && TREE_STATIC (exp))
5965 need_to_clear = 1;
5966 else
5968 unsigned HOST_WIDE_INT idx;
5969 tree index, value;
5970 HOST_WIDE_INT count = 0, zero_count = 0;
5971 need_to_clear = ! const_bounds_p;
5973 /* This loop is a more accurate version of the loop in
5974 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5975 is also needed to check for missing elements. */
5976 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5978 HOST_WIDE_INT this_node_count;
5980 if (need_to_clear)
5981 break;
5983 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5985 tree lo_index = TREE_OPERAND (index, 0);
5986 tree hi_index = TREE_OPERAND (index, 1);
5988 if (! host_integerp (lo_index, 1)
5989 || ! host_integerp (hi_index, 1))
5991 need_to_clear = 1;
5992 break;
5995 this_node_count = (tree_low_cst (hi_index, 1)
5996 - tree_low_cst (lo_index, 1) + 1);
5998 else
5999 this_node_count = 1;
6001 count += this_node_count;
6002 if (mostly_zeros_p (value))
6003 zero_count += this_node_count;
6006 /* Clear the entire array first if there are any missing
6007 elements, or if the incidence of zero elements is >=
6008 75%. */
6009 if (! need_to_clear
6010 && (count < maxelt - minelt + 1
6011 || 4 * zero_count >= 3 * count))
6012 need_to_clear = 1;
6015 if (need_to_clear && size > 0)
6017 if (REG_P (target))
6018 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6019 else
6020 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6021 cleared = 1;
6024 if (!cleared && REG_P (target))
6025 /* Inform later passes that the old value is dead. */
6026 emit_clobber (target);
6028 /* Store each element of the constructor into the
6029 corresponding element of TARGET, determined by counting the
6030 elements. */
6031 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6033 enum machine_mode mode;
6034 HOST_WIDE_INT bitsize;
6035 HOST_WIDE_INT bitpos;
6036 rtx xtarget = target;
6038 if (cleared && initializer_zerop (value))
6039 continue;
6041 mode = TYPE_MODE (elttype);
6042 if (mode == BLKmode)
6043 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
6044 ? tree_low_cst (TYPE_SIZE (elttype), 1)
6045 : -1);
6046 else
6047 bitsize = GET_MODE_BITSIZE (mode);
6049 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6051 tree lo_index = TREE_OPERAND (index, 0);
6052 tree hi_index = TREE_OPERAND (index, 1);
6053 rtx index_r, pos_rtx;
6054 HOST_WIDE_INT lo, hi, count;
6055 tree position;
6057 /* If the range is constant and "small", unroll the loop. */
6058 if (const_bounds_p
6059 && host_integerp (lo_index, 0)
6060 && host_integerp (hi_index, 0)
6061 && (lo = tree_low_cst (lo_index, 0),
6062 hi = tree_low_cst (hi_index, 0),
6063 count = hi - lo + 1,
6064 (!MEM_P (target)
6065 || count <= 2
6066 || (host_integerp (TYPE_SIZE (elttype), 1)
6067 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
6068 <= 40 * 8)))))
6070 lo -= minelt; hi -= minelt;
6071 for (; lo <= hi; lo++)
6073 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
6075 if (MEM_P (target)
6076 && !MEM_KEEP_ALIAS_SET_P (target)
6077 && TREE_CODE (type) == ARRAY_TYPE
6078 && TYPE_NONALIASED_COMPONENT (type))
6080 target = copy_rtx (target);
6081 MEM_KEEP_ALIAS_SET_P (target) = 1;
6084 store_constructor_field
6085 (target, bitsize, bitpos, mode, value, cleared,
6086 get_alias_set (elttype));
6089 else
6091 rtx loop_start = gen_label_rtx ();
6092 rtx loop_end = gen_label_rtx ();
6093 tree exit_cond;
6095 expand_normal (hi_index);
6097 index = build_decl (EXPR_LOCATION (exp),
6098 VAR_DECL, NULL_TREE, domain);
6099 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6100 SET_DECL_RTL (index, index_r);
6101 store_expr (lo_index, index_r, 0, false);
6103 /* Build the head of the loop. */
6104 do_pending_stack_adjust ();
6105 emit_label (loop_start);
6107 /* Assign value to element index. */
6108 position =
6109 fold_convert (ssizetype,
6110 fold_build2 (MINUS_EXPR,
6111 TREE_TYPE (index),
6112 index,
6113 TYPE_MIN_VALUE (domain)));
6115 position =
6116 size_binop (MULT_EXPR, position,
6117 fold_convert (ssizetype,
6118 TYPE_SIZE_UNIT (elttype)));
6120 pos_rtx = expand_normal (position);
6121 xtarget = offset_address (target, pos_rtx,
6122 highest_pow2_factor (position));
6123 xtarget = adjust_address (xtarget, mode, 0);
6124 if (TREE_CODE (value) == CONSTRUCTOR)
6125 store_constructor (value, xtarget, cleared,
6126 bitsize / BITS_PER_UNIT);
6127 else
6128 store_expr (value, xtarget, 0, false);
6130 /* Generate a conditional jump to exit the loop. */
6131 exit_cond = build2 (LT_EXPR, integer_type_node,
6132 index, hi_index);
6133 jumpif (exit_cond, loop_end, -1);
6135 /* Update the loop counter, and jump to the head of
6136 the loop. */
6137 expand_assignment (index,
6138 build2 (PLUS_EXPR, TREE_TYPE (index),
6139 index, integer_one_node),
6140 false);
6142 emit_jump (loop_start);
6144 /* Build the end of the loop. */
6145 emit_label (loop_end);
6148 else if ((index != 0 && ! host_integerp (index, 0))
6149 || ! host_integerp (TYPE_SIZE (elttype), 1))
6151 tree position;
6153 if (index == 0)
6154 index = ssize_int (1);
6156 if (minelt)
6157 index = fold_convert (ssizetype,
6158 fold_build2 (MINUS_EXPR,
6159 TREE_TYPE (index),
6160 index,
6161 TYPE_MIN_VALUE (domain)));
6163 position =
6164 size_binop (MULT_EXPR, index,
6165 fold_convert (ssizetype,
6166 TYPE_SIZE_UNIT (elttype)));
6167 xtarget = offset_address (target,
6168 expand_normal (position),
6169 highest_pow2_factor (position));
6170 xtarget = adjust_address (xtarget, mode, 0);
6171 store_expr (value, xtarget, 0, false);
6173 else
6175 if (index != 0)
6176 bitpos = ((tree_low_cst (index, 0) - minelt)
6177 * tree_low_cst (TYPE_SIZE (elttype), 1));
6178 else
6179 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
6181 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6182 && TREE_CODE (type) == ARRAY_TYPE
6183 && TYPE_NONALIASED_COMPONENT (type))
6185 target = copy_rtx (target);
6186 MEM_KEEP_ALIAS_SET_P (target) = 1;
6188 store_constructor_field (target, bitsize, bitpos, mode, value,
6189 cleared, get_alias_set (elttype));
6192 break;
6195 case VECTOR_TYPE:
6197 unsigned HOST_WIDE_INT idx;
6198 constructor_elt *ce;
6199 int i;
6200 int need_to_clear;
6201 int icode = CODE_FOR_nothing;
6202 tree elttype = TREE_TYPE (type);
6203 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
6204 enum machine_mode eltmode = TYPE_MODE (elttype);
6205 HOST_WIDE_INT bitsize;
6206 HOST_WIDE_INT bitpos;
6207 rtvec vector = NULL;
6208 unsigned n_elts;
6209 alias_set_type alias;
6211 gcc_assert (eltmode != BLKmode);
6213 n_elts = TYPE_VECTOR_SUBPARTS (type);
6214 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6216 enum machine_mode mode = GET_MODE (target);
6218 icode = (int) optab_handler (vec_init_optab, mode);
6219 if (icode != CODE_FOR_nothing)
6221 unsigned int i;
6223 vector = rtvec_alloc (n_elts);
6224 for (i = 0; i < n_elts; i++)
6225 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6229 /* If the constructor has fewer elements than the vector,
6230 clear the whole array first. Similarly if this is static
6231 constructor of a non-BLKmode object. */
6232 if (cleared)
6233 need_to_clear = 0;
6234 else if (REG_P (target) && TREE_STATIC (exp))
6235 need_to_clear = 1;
6236 else
6238 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6239 tree value;
6241 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6243 int n_elts_here = tree_low_cst
6244 (int_const_binop (TRUNC_DIV_EXPR,
6245 TYPE_SIZE (TREE_TYPE (value)),
6246 TYPE_SIZE (elttype)), 1);
6248 count += n_elts_here;
6249 if (mostly_zeros_p (value))
6250 zero_count += n_elts_here;
6253 /* Clear the entire vector first if there are any missing elements,
6254 or if the incidence of zero elements is >= 75%. */
6255 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6258 if (need_to_clear && size > 0 && !vector)
6260 if (REG_P (target))
6261 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6262 else
6263 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6264 cleared = 1;
6267 /* Inform later passes that the old value is dead. */
6268 if (!cleared && !vector && REG_P (target))
6269 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6271 if (MEM_P (target))
6272 alias = MEM_ALIAS_SET (target);
6273 else
6274 alias = get_alias_set (elttype);
6276 /* Store each element of the constructor into the corresponding
6277 element of TARGET, determined by counting the elements. */
6278 for (idx = 0, i = 0;
6279 vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6280 idx++, i += bitsize / elt_size)
6282 HOST_WIDE_INT eltpos;
6283 tree value = ce->value;
6285 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
6286 if (cleared && initializer_zerop (value))
6287 continue;
6289 if (ce->index)
6290 eltpos = tree_low_cst (ce->index, 1);
6291 else
6292 eltpos = i;
6294 if (vector)
6296 /* Vector CONSTRUCTORs should only be built from smaller
6297 vectors in the case of BLKmode vectors. */
6298 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6299 RTVEC_ELT (vector, eltpos)
6300 = expand_normal (value);
6302 else
6304 enum machine_mode value_mode =
6305 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6306 ? TYPE_MODE (TREE_TYPE (value))
6307 : eltmode;
6308 bitpos = eltpos * elt_size;
6309 store_constructor_field (target, bitsize, bitpos, value_mode,
6310 value, cleared, alias);
6314 if (vector)
6315 emit_insn (GEN_FCN (icode)
6316 (target,
6317 gen_rtx_PARALLEL (GET_MODE (target), vector)));
6318 break;
6321 default:
6322 gcc_unreachable ();
6326 /* Store the value of EXP (an expression tree)
6327 into a subfield of TARGET which has mode MODE and occupies
6328 BITSIZE bits, starting BITPOS bits from the start of TARGET.
6329 If MODE is VOIDmode, it means that we are storing into a bit-field.
6331 BITREGION_START is bitpos of the first bitfield in this region.
6332 BITREGION_END is the bitpos of the ending bitfield in this region.
6333 These two fields are 0, if the C++ memory model does not apply,
6334 or we are not interested in keeping track of bitfield regions.
6336 Always return const0_rtx unless we have something particular to
6337 return.
6339 ALIAS_SET is the alias set for the destination. This value will
6340 (in general) be different from that for TARGET, since TARGET is a
6341 reference to the containing structure.
6343 If NONTEMPORAL is true, try generating a nontemporal store. */
6345 static rtx
6346 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6347 unsigned HOST_WIDE_INT bitregion_start,
6348 unsigned HOST_WIDE_INT bitregion_end,
6349 enum machine_mode mode, tree exp,
6350 alias_set_type alias_set, bool nontemporal)
6352 if (TREE_CODE (exp) == ERROR_MARK)
6353 return const0_rtx;
6355 /* If we have nothing to store, do nothing unless the expression has
6356 side-effects. */
6357 if (bitsize == 0)
6358 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6360 if (GET_CODE (target) == CONCAT)
6362 /* We're storing into a struct containing a single __complex. */
6364 gcc_assert (!bitpos);
6365 return store_expr (exp, target, 0, nontemporal);
6368 /* If the structure is in a register or if the component
6369 is a bit field, we cannot use addressing to access it.
6370 Use bit-field techniques or SUBREG to store in it. */
6372 if (mode == VOIDmode
6373 || (mode != BLKmode && ! direct_store[(int) mode]
6374 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6375 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6376 || REG_P (target)
6377 || GET_CODE (target) == SUBREG
6378 /* If the field isn't aligned enough to store as an ordinary memref,
6379 store it as a bit field. */
6380 || (mode != BLKmode
6381 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6382 || bitpos % GET_MODE_ALIGNMENT (mode))
6383 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6384 || (bitpos % BITS_PER_UNIT != 0)))
6385 || (bitsize >= 0 && mode != BLKmode
6386 && GET_MODE_BITSIZE (mode) > bitsize)
6387 /* If the RHS and field are a constant size and the size of the
6388 RHS isn't the same size as the bitfield, we must use bitfield
6389 operations. */
6390 || (bitsize >= 0
6391 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6392 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6393 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6394 decl we must use bitfield operations. */
6395 || (bitsize >= 0
6396 && TREE_CODE (exp) == MEM_REF
6397 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6398 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6399 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6400 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6402 rtx temp;
6403 gimple nop_def;
6405 /* If EXP is a NOP_EXPR of precision less than its mode, then that
6406 implies a mask operation. If the precision is the same size as
6407 the field we're storing into, that mask is redundant. This is
6408 particularly common with bit field assignments generated by the
6409 C front end. */
6410 nop_def = get_def_for_expr (exp, NOP_EXPR);
6411 if (nop_def)
6413 tree type = TREE_TYPE (exp);
6414 if (INTEGRAL_TYPE_P (type)
6415 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6416 && bitsize == TYPE_PRECISION (type))
6418 tree op = gimple_assign_rhs1 (nop_def);
6419 type = TREE_TYPE (op);
6420 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6421 exp = op;
6425 temp = expand_normal (exp);
6427 /* If BITSIZE is narrower than the size of the type of EXP
6428 we will be narrowing TEMP. Normally, what's wanted are the
6429 low-order bits. However, if EXP's type is a record and this is
6430 big-endian machine, we want the upper BITSIZE bits. */
6431 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6432 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6433 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6434 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6435 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6436 NULL_RTX, 1);
6438 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE. */
6439 if (mode != VOIDmode && mode != BLKmode
6440 && mode != TYPE_MODE (TREE_TYPE (exp)))
6441 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6443 /* If the modes of TEMP and TARGET are both BLKmode, both
6444 must be in memory and BITPOS must be aligned on a byte
6445 boundary. If so, we simply do a block copy. Likewise
6446 for a BLKmode-like TARGET. */
6447 if (GET_MODE (temp) == BLKmode
6448 && (GET_MODE (target) == BLKmode
6449 || (MEM_P (target)
6450 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6451 && (bitpos % BITS_PER_UNIT) == 0
6452 && (bitsize % BITS_PER_UNIT) == 0)))
6454 gcc_assert (MEM_P (target) && MEM_P (temp)
6455 && (bitpos % BITS_PER_UNIT) == 0);
6457 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6458 emit_block_move (target, temp,
6459 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6460 / BITS_PER_UNIT),
6461 BLOCK_OP_NORMAL);
6463 return const0_rtx;
6466 /* Handle calls that return values in multiple non-contiguous locations.
6467 The Irix 6 ABI has examples of this. */
6468 if (GET_CODE (temp) == PARALLEL)
6470 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6471 rtx temp_target;
6472 if (mode == BLKmode)
6473 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6474 temp_target = gen_reg_rtx (mode);
6475 emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6476 temp = temp_target;
6478 else if (mode == BLKmode)
6480 /* Handle calls that return BLKmode values in registers. */
6481 if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6483 rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6484 copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6485 temp = temp_target;
6487 else
6489 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6490 rtx temp_target;
6491 mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6492 temp_target = gen_reg_rtx (mode);
6493 temp_target
6494 = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
6495 temp_target, mode, mode);
6496 temp = temp_target;
6500 /* Store the value in the bitfield. */
6501 store_bit_field (target, bitsize, bitpos,
6502 bitregion_start, bitregion_end,
6503 mode, temp);
6505 return const0_rtx;
6507 else
6509 /* Now build a reference to just the desired component. */
6510 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6512 if (to_rtx == target)
6513 to_rtx = copy_rtx (to_rtx);
6515 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6516 set_mem_alias_set (to_rtx, alias_set);
6518 return store_expr (exp, to_rtx, 0, nontemporal);
6522 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6523 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6524 codes and find the ultimate containing object, which we return.
6526 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6527 bit position, and *PUNSIGNEDP to the signedness of the field.
6528 If the position of the field is variable, we store a tree
6529 giving the variable offset (in units) in *POFFSET.
6530 This offset is in addition to the bit position.
6531 If the position is not variable, we store 0 in *POFFSET.
6533 If any of the extraction expressions is volatile,
6534 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6536 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6537 Otherwise, it is a mode that can be used to access the field.
6539 If the field describes a variable-sized object, *PMODE is set to
6540 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6541 this case, but the address of the object can be found.
6543 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6544 look through nodes that serve as markers of a greater alignment than
6545 the one that can be deduced from the expression. These nodes make it
6546 possible for front-ends to prevent temporaries from being created by
6547 the middle-end on alignment considerations. For that purpose, the
6548 normal operating mode at high-level is to always pass FALSE so that
6549 the ultimate containing object is really returned; moreover, the
6550 associated predicate handled_component_p will always return TRUE
6551 on these nodes, thus indicating that they are essentially handled
6552 by get_inner_reference. TRUE should only be passed when the caller
6553 is scanning the expression in order to build another representation
6554 and specifically knows how to handle these nodes; as such, this is
6555 the normal operating mode in the RTL expanders. */
6557 tree
6558 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6559 HOST_WIDE_INT *pbitpos, tree *poffset,
6560 enum machine_mode *pmode, int *punsignedp,
6561 int *pvolatilep, bool keep_aligning)
6563 tree size_tree = 0;
6564 enum machine_mode mode = VOIDmode;
6565 bool blkmode_bitfield = false;
6566 tree offset = size_zero_node;
6567 double_int bit_offset = double_int_zero;
6569 /* First get the mode, signedness, and size. We do this from just the
6570 outermost expression. */
6571 *pbitsize = -1;
6572 if (TREE_CODE (exp) == COMPONENT_REF)
6574 tree field = TREE_OPERAND (exp, 1);
6575 size_tree = DECL_SIZE (field);
6576 if (flag_strict_volatile_bitfields > 0
6577 && TREE_THIS_VOLATILE (exp)
6578 && DECL_BIT_FIELD_TYPE (field)
6579 && DECL_MODE (field) != BLKmode)
6580 /* Volatile bitfields should be accessed in the mode of the
6581 field's type, not the mode computed based on the bit
6582 size. */
6583 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6584 else if (!DECL_BIT_FIELD (field))
6585 mode = DECL_MODE (field);
6586 else if (DECL_MODE (field) == BLKmode)
6587 blkmode_bitfield = true;
6589 *punsignedp = DECL_UNSIGNED (field);
6591 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6593 size_tree = TREE_OPERAND (exp, 1);
6594 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6595 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6597 /* For vector types, with the correct size of access, use the mode of
6598 inner type. */
6599 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6600 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6601 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6602 mode = TYPE_MODE (TREE_TYPE (exp));
6604 else
6606 mode = TYPE_MODE (TREE_TYPE (exp));
6607 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6609 if (mode == BLKmode)
6610 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6611 else
6612 *pbitsize = GET_MODE_BITSIZE (mode);
6615 if (size_tree != 0)
6617 if (! host_integerp (size_tree, 1))
6618 mode = BLKmode, *pbitsize = -1;
6619 else
6620 *pbitsize = tree_low_cst (size_tree, 1);
6623 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6624 and find the ultimate containing object. */
6625 while (1)
6627 switch (TREE_CODE (exp))
6629 case BIT_FIELD_REF:
6630 bit_offset += tree_to_double_int (TREE_OPERAND (exp, 2));
6631 break;
6633 case COMPONENT_REF:
6635 tree field = TREE_OPERAND (exp, 1);
6636 tree this_offset = component_ref_field_offset (exp);
6638 /* If this field hasn't been filled in yet, don't go past it.
6639 This should only happen when folding expressions made during
6640 type construction. */
6641 if (this_offset == 0)
6642 break;
6644 offset = size_binop (PLUS_EXPR, offset, this_offset);
6645 bit_offset += tree_to_double_int (DECL_FIELD_BIT_OFFSET (field));
6647 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6649 break;
6651 case ARRAY_REF:
6652 case ARRAY_RANGE_REF:
6654 tree index = TREE_OPERAND (exp, 1);
6655 tree low_bound = array_ref_low_bound (exp);
6656 tree unit_size = array_ref_element_size (exp);
6658 /* We assume all arrays have sizes that are a multiple of a byte.
6659 First subtract the lower bound, if any, in the type of the
6660 index, then convert to sizetype and multiply by the size of
6661 the array element. */
6662 if (! integer_zerop (low_bound))
6663 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6664 index, low_bound);
6666 offset = size_binop (PLUS_EXPR, offset,
6667 size_binop (MULT_EXPR,
6668 fold_convert (sizetype, index),
6669 unit_size));
6671 break;
6673 case REALPART_EXPR:
6674 break;
6676 case IMAGPART_EXPR:
6677 bit_offset += double_int::from_uhwi (*pbitsize);
6678 break;
6680 case VIEW_CONVERT_EXPR:
6681 if (keep_aligning && STRICT_ALIGNMENT
6682 && (TYPE_ALIGN (TREE_TYPE (exp))
6683 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6684 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6685 < BIGGEST_ALIGNMENT)
6686 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6687 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6688 goto done;
6689 break;
6691 case MEM_REF:
6692 /* Hand back the decl for MEM[&decl, off]. */
6693 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6695 tree off = TREE_OPERAND (exp, 1);
6696 if (!integer_zerop (off))
6698 double_int boff, coff = mem_ref_offset (exp);
6699 boff = coff.lshift (BITS_PER_UNIT == 8
6700 ? 3 : exact_log2 (BITS_PER_UNIT));
6701 bit_offset += boff;
6703 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6705 goto done;
6707 default:
6708 goto done;
6711 /* If any reference in the chain is volatile, the effect is volatile. */
6712 if (TREE_THIS_VOLATILE (exp))
6713 *pvolatilep = 1;
6715 exp = TREE_OPERAND (exp, 0);
6717 done:
6719 /* If OFFSET is constant, see if we can return the whole thing as a
6720 constant bit position. Make sure to handle overflow during
6721 this conversion. */
6722 if (TREE_CODE (offset) == INTEGER_CST)
6724 double_int tem = tree_to_double_int (offset);
6725 tem = tem.sext (TYPE_PRECISION (sizetype));
6726 tem = tem.lshift (BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT));
6727 tem += bit_offset;
6728 if (tem.fits_shwi ())
6730 *pbitpos = tem.to_shwi ();
6731 *poffset = offset = NULL_TREE;
6735 /* Otherwise, split it up. */
6736 if (offset)
6738 /* Avoid returning a negative bitpos as this may wreak havoc later. */
6739 if (bit_offset.is_negative ())
6741 double_int mask
6742 = double_int::mask (BITS_PER_UNIT == 8
6743 ? 3 : exact_log2 (BITS_PER_UNIT));
6744 double_int tem = bit_offset.and_not (mask);
6745 /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6746 Subtract it to BIT_OFFSET and add it (scaled) to OFFSET. */
6747 bit_offset -= tem;
6748 tem = tem.arshift (BITS_PER_UNIT == 8
6749 ? 3 : exact_log2 (BITS_PER_UNIT),
6750 HOST_BITS_PER_DOUBLE_INT);
6751 offset = size_binop (PLUS_EXPR, offset,
6752 double_int_to_tree (sizetype, tem));
6755 *pbitpos = bit_offset.to_shwi ();
6756 *poffset = offset;
6759 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6760 if (mode == VOIDmode
6761 && blkmode_bitfield
6762 && (*pbitpos % BITS_PER_UNIT) == 0
6763 && (*pbitsize % BITS_PER_UNIT) == 0)
6764 *pmode = BLKmode;
6765 else
6766 *pmode = mode;
6768 return exp;
6771 /* Return a tree of sizetype representing the size, in bytes, of the element
6772 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6774 tree
6775 array_ref_element_size (tree exp)
6777 tree aligned_size = TREE_OPERAND (exp, 3);
6778 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6779 location_t loc = EXPR_LOCATION (exp);
6781 /* If a size was specified in the ARRAY_REF, it's the size measured
6782 in alignment units of the element type. So multiply by that value. */
6783 if (aligned_size)
6785 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6786 sizetype from another type of the same width and signedness. */
6787 if (TREE_TYPE (aligned_size) != sizetype)
6788 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6789 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6790 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6793 /* Otherwise, take the size from that of the element type. Substitute
6794 any PLACEHOLDER_EXPR that we have. */
6795 else
6796 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6799 /* Return a tree representing the lower bound of the array mentioned in
6800 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6802 tree
6803 array_ref_low_bound (tree exp)
6805 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6807 /* If a lower bound is specified in EXP, use it. */
6808 if (TREE_OPERAND (exp, 2))
6809 return TREE_OPERAND (exp, 2);
6811 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6812 substituting for a PLACEHOLDER_EXPR as needed. */
6813 if (domain_type && TYPE_MIN_VALUE (domain_type))
6814 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6816 /* Otherwise, return a zero of the appropriate type. */
6817 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6820 /* Returns true if REF is an array reference to an array at the end of
6821 a structure. If this is the case, the array may be allocated larger
6822 than its upper bound implies. */
6824 bool
6825 array_at_struct_end_p (tree ref)
6827 if (TREE_CODE (ref) != ARRAY_REF
6828 && TREE_CODE (ref) != ARRAY_RANGE_REF)
6829 return false;
6831 while (handled_component_p (ref))
6833 /* If the reference chain contains a component reference to a
6834 non-union type and there follows another field the reference
6835 is not at the end of a structure. */
6836 if (TREE_CODE (ref) == COMPONENT_REF
6837 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
6839 tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
6840 while (nextf && TREE_CODE (nextf) != FIELD_DECL)
6841 nextf = DECL_CHAIN (nextf);
6842 if (nextf)
6843 return false;
6846 ref = TREE_OPERAND (ref, 0);
6849 /* If the reference is based on a declared entity, the size of the array
6850 is constrained by its given domain. */
6851 if (DECL_P (ref))
6852 return false;
6854 return true;
6857 /* Return a tree representing the upper bound of the array mentioned in
6858 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6860 tree
6861 array_ref_up_bound (tree exp)
6863 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6865 /* If there is a domain type and it has an upper bound, use it, substituting
6866 for a PLACEHOLDER_EXPR as needed. */
6867 if (domain_type && TYPE_MAX_VALUE (domain_type))
6868 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6870 /* Otherwise fail. */
6871 return NULL_TREE;
6874 /* Return a tree representing the offset, in bytes, of the field referenced
6875 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6877 tree
6878 component_ref_field_offset (tree exp)
6880 tree aligned_offset = TREE_OPERAND (exp, 2);
6881 tree field = TREE_OPERAND (exp, 1);
6882 location_t loc = EXPR_LOCATION (exp);
6884 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6885 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6886 value. */
6887 if (aligned_offset)
6889 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6890 sizetype from another type of the same width and signedness. */
6891 if (TREE_TYPE (aligned_offset) != sizetype)
6892 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
6893 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
6894 size_int (DECL_OFFSET_ALIGN (field)
6895 / BITS_PER_UNIT));
6898 /* Otherwise, take the offset from that of the field. Substitute
6899 any PLACEHOLDER_EXPR that we have. */
6900 else
6901 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6904 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6906 static unsigned HOST_WIDE_INT
6907 target_align (const_tree target)
6909 /* We might have a chain of nested references with intermediate misaligning
6910 bitfields components, so need to recurse to find out. */
6912 unsigned HOST_WIDE_INT this_align, outer_align;
6914 switch (TREE_CODE (target))
6916 case BIT_FIELD_REF:
6917 return 1;
6919 case COMPONENT_REF:
6920 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
6921 outer_align = target_align (TREE_OPERAND (target, 0));
6922 return MIN (this_align, outer_align);
6924 case ARRAY_REF:
6925 case ARRAY_RANGE_REF:
6926 this_align = TYPE_ALIGN (TREE_TYPE (target));
6927 outer_align = target_align (TREE_OPERAND (target, 0));
6928 return MIN (this_align, outer_align);
6930 CASE_CONVERT:
6931 case NON_LVALUE_EXPR:
6932 case VIEW_CONVERT_EXPR:
6933 this_align = TYPE_ALIGN (TREE_TYPE (target));
6934 outer_align = target_align (TREE_OPERAND (target, 0));
6935 return MAX (this_align, outer_align);
6937 default:
6938 return TYPE_ALIGN (TREE_TYPE (target));
6943 /* Given an rtx VALUE that may contain additions and multiplications, return
6944 an equivalent value that just refers to a register, memory, or constant.
6945 This is done by generating instructions to perform the arithmetic and
6946 returning a pseudo-register containing the value.
6948 The returned value may be a REG, SUBREG, MEM or constant. */
6951 force_operand (rtx value, rtx target)
6953 rtx op1, op2;
6954 /* Use subtarget as the target for operand 0 of a binary operation. */
6955 rtx subtarget = get_subtarget (target);
6956 enum rtx_code code = GET_CODE (value);
6958 /* Check for subreg applied to an expression produced by loop optimizer. */
6959 if (code == SUBREG
6960 && !REG_P (SUBREG_REG (value))
6961 && !MEM_P (SUBREG_REG (value)))
6963 value
6964 = simplify_gen_subreg (GET_MODE (value),
6965 force_reg (GET_MODE (SUBREG_REG (value)),
6966 force_operand (SUBREG_REG (value),
6967 NULL_RTX)),
6968 GET_MODE (SUBREG_REG (value)),
6969 SUBREG_BYTE (value));
6970 code = GET_CODE (value);
6973 /* Check for a PIC address load. */
6974 if ((code == PLUS || code == MINUS)
6975 && XEXP (value, 0) == pic_offset_table_rtx
6976 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6977 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6978 || GET_CODE (XEXP (value, 1)) == CONST))
6980 if (!subtarget)
6981 subtarget = gen_reg_rtx (GET_MODE (value));
6982 emit_move_insn (subtarget, value);
6983 return subtarget;
6986 if (ARITHMETIC_P (value))
6988 op2 = XEXP (value, 1);
6989 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6990 subtarget = 0;
6991 if (code == MINUS && CONST_INT_P (op2))
6993 code = PLUS;
6994 op2 = negate_rtx (GET_MODE (value), op2);
6997 /* Check for an addition with OP2 a constant integer and our first
6998 operand a PLUS of a virtual register and something else. In that
6999 case, we want to emit the sum of the virtual register and the
7000 constant first and then add the other value. This allows virtual
7001 register instantiation to simply modify the constant rather than
7002 creating another one around this addition. */
7003 if (code == PLUS && CONST_INT_P (op2)
7004 && GET_CODE (XEXP (value, 0)) == PLUS
7005 && REG_P (XEXP (XEXP (value, 0), 0))
7006 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7007 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7009 rtx temp = expand_simple_binop (GET_MODE (value), code,
7010 XEXP (XEXP (value, 0), 0), op2,
7011 subtarget, 0, OPTAB_LIB_WIDEN);
7012 return expand_simple_binop (GET_MODE (value), code, temp,
7013 force_operand (XEXP (XEXP (value,
7014 0), 1), 0),
7015 target, 0, OPTAB_LIB_WIDEN);
7018 op1 = force_operand (XEXP (value, 0), subtarget);
7019 op2 = force_operand (op2, NULL_RTX);
7020 switch (code)
7022 case MULT:
7023 return expand_mult (GET_MODE (value), op1, op2, target, 1);
7024 case DIV:
7025 if (!INTEGRAL_MODE_P (GET_MODE (value)))
7026 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7027 target, 1, OPTAB_LIB_WIDEN);
7028 else
7029 return expand_divmod (0,
7030 FLOAT_MODE_P (GET_MODE (value))
7031 ? RDIV_EXPR : TRUNC_DIV_EXPR,
7032 GET_MODE (value), op1, op2, target, 0);
7033 case MOD:
7034 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7035 target, 0);
7036 case UDIV:
7037 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7038 target, 1);
7039 case UMOD:
7040 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7041 target, 1);
7042 case ASHIFTRT:
7043 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7044 target, 0, OPTAB_LIB_WIDEN);
7045 default:
7046 return expand_simple_binop (GET_MODE (value), code, op1, op2,
7047 target, 1, OPTAB_LIB_WIDEN);
7050 if (UNARY_P (value))
7052 if (!target)
7053 target = gen_reg_rtx (GET_MODE (value));
7054 op1 = force_operand (XEXP (value, 0), NULL_RTX);
7055 switch (code)
7057 case ZERO_EXTEND:
7058 case SIGN_EXTEND:
7059 case TRUNCATE:
7060 case FLOAT_EXTEND:
7061 case FLOAT_TRUNCATE:
7062 convert_move (target, op1, code == ZERO_EXTEND);
7063 return target;
7065 case FIX:
7066 case UNSIGNED_FIX:
7067 expand_fix (target, op1, code == UNSIGNED_FIX);
7068 return target;
7070 case FLOAT:
7071 case UNSIGNED_FLOAT:
7072 expand_float (target, op1, code == UNSIGNED_FLOAT);
7073 return target;
7075 default:
7076 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7080 #ifdef INSN_SCHEDULING
7081 /* On machines that have insn scheduling, we want all memory reference to be
7082 explicit, so we need to deal with such paradoxical SUBREGs. */
7083 if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7084 value
7085 = simplify_gen_subreg (GET_MODE (value),
7086 force_reg (GET_MODE (SUBREG_REG (value)),
7087 force_operand (SUBREG_REG (value),
7088 NULL_RTX)),
7089 GET_MODE (SUBREG_REG (value)),
7090 SUBREG_BYTE (value));
7091 #endif
7093 return value;
7096 /* Subroutine of expand_expr: return nonzero iff there is no way that
7097 EXP can reference X, which is being modified. TOP_P is nonzero if this
7098 call is going to be used to determine whether we need a temporary
7099 for EXP, as opposed to a recursive call to this function.
7101 It is always safe for this routine to return zero since it merely
7102 searches for optimization opportunities. */
7105 safe_from_p (const_rtx x, tree exp, int top_p)
7107 rtx exp_rtl = 0;
7108 int i, nops;
7110 if (x == 0
7111 /* If EXP has varying size, we MUST use a target since we currently
7112 have no way of allocating temporaries of variable size
7113 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7114 So we assume here that something at a higher level has prevented a
7115 clash. This is somewhat bogus, but the best we can do. Only
7116 do this when X is BLKmode and when we are at the top level. */
7117 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7118 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7119 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7120 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7121 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7122 != INTEGER_CST)
7123 && GET_MODE (x) == BLKmode)
7124 /* If X is in the outgoing argument area, it is always safe. */
7125 || (MEM_P (x)
7126 && (XEXP (x, 0) == virtual_outgoing_args_rtx
7127 || (GET_CODE (XEXP (x, 0)) == PLUS
7128 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7129 return 1;
7131 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7132 find the underlying pseudo. */
7133 if (GET_CODE (x) == SUBREG)
7135 x = SUBREG_REG (x);
7136 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7137 return 0;
7140 /* Now look at our tree code and possibly recurse. */
7141 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7143 case tcc_declaration:
7144 exp_rtl = DECL_RTL_IF_SET (exp);
7145 break;
7147 case tcc_constant:
7148 return 1;
7150 case tcc_exceptional:
7151 if (TREE_CODE (exp) == TREE_LIST)
7153 while (1)
7155 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7156 return 0;
7157 exp = TREE_CHAIN (exp);
7158 if (!exp)
7159 return 1;
7160 if (TREE_CODE (exp) != TREE_LIST)
7161 return safe_from_p (x, exp, 0);
7164 else if (TREE_CODE (exp) == CONSTRUCTOR)
7166 constructor_elt *ce;
7167 unsigned HOST_WIDE_INT idx;
7169 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7170 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7171 || !safe_from_p (x, ce->value, 0))
7172 return 0;
7173 return 1;
7175 else if (TREE_CODE (exp) == ERROR_MARK)
7176 return 1; /* An already-visited SAVE_EXPR? */
7177 else
7178 return 0;
7180 case tcc_statement:
7181 /* The only case we look at here is the DECL_INITIAL inside a
7182 DECL_EXPR. */
7183 return (TREE_CODE (exp) != DECL_EXPR
7184 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7185 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7186 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7188 case tcc_binary:
7189 case tcc_comparison:
7190 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7191 return 0;
7192 /* Fall through. */
7194 case tcc_unary:
7195 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7197 case tcc_expression:
7198 case tcc_reference:
7199 case tcc_vl_exp:
7200 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
7201 the expression. If it is set, we conflict iff we are that rtx or
7202 both are in memory. Otherwise, we check all operands of the
7203 expression recursively. */
7205 switch (TREE_CODE (exp))
7207 case ADDR_EXPR:
7208 /* If the operand is static or we are static, we can't conflict.
7209 Likewise if we don't conflict with the operand at all. */
7210 if (staticp (TREE_OPERAND (exp, 0))
7211 || TREE_STATIC (exp)
7212 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7213 return 1;
7215 /* Otherwise, the only way this can conflict is if we are taking
7216 the address of a DECL a that address if part of X, which is
7217 very rare. */
7218 exp = TREE_OPERAND (exp, 0);
7219 if (DECL_P (exp))
7221 if (!DECL_RTL_SET_P (exp)
7222 || !MEM_P (DECL_RTL (exp)))
7223 return 0;
7224 else
7225 exp_rtl = XEXP (DECL_RTL (exp), 0);
7227 break;
7229 case MEM_REF:
7230 if (MEM_P (x)
7231 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7232 get_alias_set (exp)))
7233 return 0;
7234 break;
7236 case CALL_EXPR:
7237 /* Assume that the call will clobber all hard registers and
7238 all of memory. */
7239 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7240 || MEM_P (x))
7241 return 0;
7242 break;
7244 case WITH_CLEANUP_EXPR:
7245 case CLEANUP_POINT_EXPR:
7246 /* Lowered by gimplify.c. */
7247 gcc_unreachable ();
7249 case SAVE_EXPR:
7250 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7252 default:
7253 break;
7256 /* If we have an rtx, we do not need to scan our operands. */
7257 if (exp_rtl)
7258 break;
7260 nops = TREE_OPERAND_LENGTH (exp);
7261 for (i = 0; i < nops; i++)
7262 if (TREE_OPERAND (exp, i) != 0
7263 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7264 return 0;
7266 break;
7268 case tcc_type:
7269 /* Should never get a type here. */
7270 gcc_unreachable ();
7273 /* If we have an rtl, find any enclosed object. Then see if we conflict
7274 with it. */
7275 if (exp_rtl)
7277 if (GET_CODE (exp_rtl) == SUBREG)
7279 exp_rtl = SUBREG_REG (exp_rtl);
7280 if (REG_P (exp_rtl)
7281 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7282 return 0;
7285 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
7286 are memory and they conflict. */
7287 return ! (rtx_equal_p (x, exp_rtl)
7288 || (MEM_P (x) && MEM_P (exp_rtl)
7289 && true_dependence (exp_rtl, VOIDmode, x)));
7292 /* If we reach here, it is safe. */
7293 return 1;
7297 /* Return the highest power of two that EXP is known to be a multiple of.
7298 This is used in updating alignment of MEMs in array references. */
7300 unsigned HOST_WIDE_INT
7301 highest_pow2_factor (const_tree exp)
7303 unsigned HOST_WIDE_INT ret;
7304 int trailing_zeros = tree_ctz (exp);
7305 if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7306 return BIGGEST_ALIGNMENT;
7307 ret = (unsigned HOST_WIDE_INT) 1 << trailing_zeros;
7308 if (ret > BIGGEST_ALIGNMENT)
7309 return BIGGEST_ALIGNMENT;
7310 return ret;
7313 /* Similar, except that the alignment requirements of TARGET are
7314 taken into account. Assume it is at least as aligned as its
7315 type, unless it is a COMPONENT_REF in which case the layout of
7316 the structure gives the alignment. */
7318 static unsigned HOST_WIDE_INT
7319 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7321 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7322 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7324 return MAX (factor, talign);
7327 #ifdef HAVE_conditional_move
7328 /* Convert the tree comparison code TCODE to the rtl one where the
7329 signedness is UNSIGNEDP. */
7331 static enum rtx_code
7332 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7334 enum rtx_code code;
7335 switch (tcode)
7337 case EQ_EXPR:
7338 code = EQ;
7339 break;
7340 case NE_EXPR:
7341 code = NE;
7342 break;
7343 case LT_EXPR:
7344 code = unsignedp ? LTU : LT;
7345 break;
7346 case LE_EXPR:
7347 code = unsignedp ? LEU : LE;
7348 break;
7349 case GT_EXPR:
7350 code = unsignedp ? GTU : GT;
7351 break;
7352 case GE_EXPR:
7353 code = unsignedp ? GEU : GE;
7354 break;
7355 case UNORDERED_EXPR:
7356 code = UNORDERED;
7357 break;
7358 case ORDERED_EXPR:
7359 code = ORDERED;
7360 break;
7361 case UNLT_EXPR:
7362 code = UNLT;
7363 break;
7364 case UNLE_EXPR:
7365 code = UNLE;
7366 break;
7367 case UNGT_EXPR:
7368 code = UNGT;
7369 break;
7370 case UNGE_EXPR:
7371 code = UNGE;
7372 break;
7373 case UNEQ_EXPR:
7374 code = UNEQ;
7375 break;
7376 case LTGT_EXPR:
7377 code = LTGT;
7378 break;
7380 default:
7381 gcc_unreachable ();
7383 return code;
7385 #endif
7387 /* Subroutine of expand_expr. Expand the two operands of a binary
7388 expression EXP0 and EXP1 placing the results in OP0 and OP1.
7389 The value may be stored in TARGET if TARGET is nonzero. The
7390 MODIFIER argument is as documented by expand_expr. */
7392 static void
7393 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7394 enum expand_modifier modifier)
7396 if (! safe_from_p (target, exp1, 1))
7397 target = 0;
7398 if (operand_equal_p (exp0, exp1, 0))
7400 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7401 *op1 = copy_rtx (*op0);
7403 else
7405 /* If we need to preserve evaluation order, copy exp0 into its own
7406 temporary variable so that it can't be clobbered by exp1. */
7407 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7408 exp0 = save_expr (exp0);
7409 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7410 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7415 /* Return a MEM that contains constant EXP. DEFER is as for
7416 output_constant_def and MODIFIER is as for expand_expr. */
7418 static rtx
7419 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7421 rtx mem;
7423 mem = output_constant_def (exp, defer);
7424 if (modifier != EXPAND_INITIALIZER)
7425 mem = use_anchored_address (mem);
7426 return mem;
7429 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
7430 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7432 static rtx
7433 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
7434 enum expand_modifier modifier, addr_space_t as)
7436 rtx result, subtarget;
7437 tree inner, offset;
7438 HOST_WIDE_INT bitsize, bitpos;
7439 int volatilep, unsignedp;
7440 enum machine_mode mode1;
7442 /* If we are taking the address of a constant and are at the top level,
7443 we have to use output_constant_def since we can't call force_const_mem
7444 at top level. */
7445 /* ??? This should be considered a front-end bug. We should not be
7446 generating ADDR_EXPR of something that isn't an LVALUE. The only
7447 exception here is STRING_CST. */
7448 if (CONSTANT_CLASS_P (exp))
7450 result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7451 if (modifier < EXPAND_SUM)
7452 result = force_operand (result, target);
7453 return result;
7456 /* Everything must be something allowed by is_gimple_addressable. */
7457 switch (TREE_CODE (exp))
7459 case INDIRECT_REF:
7460 /* This case will happen via recursion for &a->b. */
7461 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7463 case MEM_REF:
7465 tree tem = TREE_OPERAND (exp, 0);
7466 if (!integer_zerop (TREE_OPERAND (exp, 1)))
7467 tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7468 return expand_expr (tem, target, tmode, modifier);
7471 case CONST_DECL:
7472 /* Expand the initializer like constants above. */
7473 result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7474 0, modifier), 0);
7475 if (modifier < EXPAND_SUM)
7476 result = force_operand (result, target);
7477 return result;
7479 case REALPART_EXPR:
7480 /* The real part of the complex number is always first, therefore
7481 the address is the same as the address of the parent object. */
7482 offset = 0;
7483 bitpos = 0;
7484 inner = TREE_OPERAND (exp, 0);
7485 break;
7487 case IMAGPART_EXPR:
7488 /* The imaginary part of the complex number is always second.
7489 The expression is therefore always offset by the size of the
7490 scalar type. */
7491 offset = 0;
7492 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7493 inner = TREE_OPERAND (exp, 0);
7494 break;
7496 case COMPOUND_LITERAL_EXPR:
7497 /* Allow COMPOUND_LITERAL_EXPR in initializers, if e.g.
7498 rtl_for_decl_init is called on DECL_INITIAL with
7499 COMPOUNT_LITERAL_EXPRs in it, they aren't gimplified. */
7500 if (modifier == EXPAND_INITIALIZER
7501 && COMPOUND_LITERAL_EXPR_DECL (exp))
7502 return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7503 target, tmode, modifier, as);
7504 /* FALLTHRU */
7505 default:
7506 /* If the object is a DECL, then expand it for its rtl. Don't bypass
7507 expand_expr, as that can have various side effects; LABEL_DECLs for
7508 example, may not have their DECL_RTL set yet. Expand the rtl of
7509 CONSTRUCTORs too, which should yield a memory reference for the
7510 constructor's contents. Assume language specific tree nodes can
7511 be expanded in some interesting way. */
7512 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7513 if (DECL_P (exp)
7514 || TREE_CODE (exp) == CONSTRUCTOR
7515 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7517 result = expand_expr (exp, target, tmode,
7518 modifier == EXPAND_INITIALIZER
7519 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7521 /* If the DECL isn't in memory, then the DECL wasn't properly
7522 marked TREE_ADDRESSABLE, which will be either a front-end
7523 or a tree optimizer bug. */
7525 if (TREE_ADDRESSABLE (exp)
7526 && ! MEM_P (result)
7527 && ! targetm.calls.allocate_stack_slots_for_args ())
7529 error ("local frame unavailable (naked function?)");
7530 return result;
7532 else
7533 gcc_assert (MEM_P (result));
7534 result = XEXP (result, 0);
7536 /* ??? Is this needed anymore? */
7537 if (DECL_P (exp))
7538 TREE_USED (exp) = 1;
7540 if (modifier != EXPAND_INITIALIZER
7541 && modifier != EXPAND_CONST_ADDRESS
7542 && modifier != EXPAND_SUM)
7543 result = force_operand (result, target);
7544 return result;
7547 /* Pass FALSE as the last argument to get_inner_reference although
7548 we are expanding to RTL. The rationale is that we know how to
7549 handle "aligning nodes" here: we can just bypass them because
7550 they won't change the final object whose address will be returned
7551 (they actually exist only for that purpose). */
7552 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7553 &mode1, &unsignedp, &volatilep, false);
7554 break;
7557 /* We must have made progress. */
7558 gcc_assert (inner != exp);
7560 subtarget = offset || bitpos ? NULL_RTX : target;
7561 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7562 inner alignment, force the inner to be sufficiently aligned. */
7563 if (CONSTANT_CLASS_P (inner)
7564 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7566 inner = copy_node (inner);
7567 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7568 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7569 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7571 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7573 if (offset)
7575 rtx tmp;
7577 if (modifier != EXPAND_NORMAL)
7578 result = force_operand (result, NULL);
7579 tmp = expand_expr (offset, NULL_RTX, tmode,
7580 modifier == EXPAND_INITIALIZER
7581 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7583 result = convert_memory_address_addr_space (tmode, result, as);
7584 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7586 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7587 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7588 else
7590 subtarget = bitpos ? NULL_RTX : target;
7591 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7592 1, OPTAB_LIB_WIDEN);
7596 if (bitpos)
7598 /* Someone beforehand should have rejected taking the address
7599 of such an object. */
7600 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7602 result = convert_memory_address_addr_space (tmode, result, as);
7603 result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7604 if (modifier < EXPAND_SUM)
7605 result = force_operand (result, target);
7608 return result;
7611 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7612 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7614 static rtx
7615 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
7616 enum expand_modifier modifier)
7618 addr_space_t as = ADDR_SPACE_GENERIC;
7619 enum machine_mode address_mode = Pmode;
7620 enum machine_mode pointer_mode = ptr_mode;
7621 enum machine_mode rmode;
7622 rtx result;
7624 /* Target mode of VOIDmode says "whatever's natural". */
7625 if (tmode == VOIDmode)
7626 tmode = TYPE_MODE (TREE_TYPE (exp));
7628 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7630 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7631 address_mode = targetm.addr_space.address_mode (as);
7632 pointer_mode = targetm.addr_space.pointer_mode (as);
7635 /* We can get called with some Weird Things if the user does silliness
7636 like "(short) &a". In that case, convert_memory_address won't do
7637 the right thing, so ignore the given target mode. */
7638 if (tmode != address_mode && tmode != pointer_mode)
7639 tmode = address_mode;
7641 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7642 tmode, modifier, as);
7644 /* Despite expand_expr claims concerning ignoring TMODE when not
7645 strictly convenient, stuff breaks if we don't honor it. Note
7646 that combined with the above, we only do this for pointer modes. */
7647 rmode = GET_MODE (result);
7648 if (rmode == VOIDmode)
7649 rmode = tmode;
7650 if (rmode != tmode)
7651 result = convert_memory_address_addr_space (tmode, result, as);
7653 return result;
7656 /* Generate code for computing CONSTRUCTOR EXP.
7657 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7658 is TRUE, instead of creating a temporary variable in memory
7659 NULL is returned and the caller needs to handle it differently. */
7661 static rtx
7662 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7663 bool avoid_temp_mem)
7665 tree type = TREE_TYPE (exp);
7666 enum machine_mode mode = TYPE_MODE (type);
7668 /* Try to avoid creating a temporary at all. This is possible
7669 if all of the initializer is zero.
7670 FIXME: try to handle all [0..255] initializers we can handle
7671 with memset. */
7672 if (TREE_STATIC (exp)
7673 && !TREE_ADDRESSABLE (exp)
7674 && target != 0 && mode == BLKmode
7675 && all_zeros_p (exp))
7677 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7678 return target;
7681 /* All elts simple constants => refer to a constant in memory. But
7682 if this is a non-BLKmode mode, let it store a field at a time
7683 since that should make a CONST_INT or CONST_DOUBLE when we
7684 fold. Likewise, if we have a target we can use, it is best to
7685 store directly into the target unless the type is large enough
7686 that memcpy will be used. If we are making an initializer and
7687 all operands are constant, put it in memory as well.
7689 FIXME: Avoid trying to fill vector constructors piece-meal.
7690 Output them with output_constant_def below unless we're sure
7691 they're zeros. This should go away when vector initializers
7692 are treated like VECTOR_CST instead of arrays. */
7693 if ((TREE_STATIC (exp)
7694 && ((mode == BLKmode
7695 && ! (target != 0 && safe_from_p (target, exp, 1)))
7696 || TREE_ADDRESSABLE (exp)
7697 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7698 && (! MOVE_BY_PIECES_P
7699 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7700 TYPE_ALIGN (type)))
7701 && ! mostly_zeros_p (exp))))
7702 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7703 && TREE_CONSTANT (exp)))
7705 rtx constructor;
7707 if (avoid_temp_mem)
7708 return NULL_RTX;
7710 constructor = expand_expr_constant (exp, 1, modifier);
7712 if (modifier != EXPAND_CONST_ADDRESS
7713 && modifier != EXPAND_INITIALIZER
7714 && modifier != EXPAND_SUM)
7715 constructor = validize_mem (constructor);
7717 return constructor;
7720 /* Handle calls that pass values in multiple non-contiguous
7721 locations. The Irix 6 ABI has examples of this. */
7722 if (target == 0 || ! safe_from_p (target, exp, 1)
7723 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7725 if (avoid_temp_mem)
7726 return NULL_RTX;
7728 target
7729 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
7730 | (TREE_READONLY (exp)
7731 * TYPE_QUAL_CONST))),
7732 TREE_ADDRESSABLE (exp), 1);
7735 store_constructor (exp, target, 0, int_expr_size (exp));
7736 return target;
7740 /* expand_expr: generate code for computing expression EXP.
7741 An rtx for the computed value is returned. The value is never null.
7742 In the case of a void EXP, const0_rtx is returned.
7744 The value may be stored in TARGET if TARGET is nonzero.
7745 TARGET is just a suggestion; callers must assume that
7746 the rtx returned may not be the same as TARGET.
7748 If TARGET is CONST0_RTX, it means that the value will be ignored.
7750 If TMODE is not VOIDmode, it suggests generating the
7751 result in mode TMODE. But this is done only when convenient.
7752 Otherwise, TMODE is ignored and the value generated in its natural mode.
7753 TMODE is just a suggestion; callers must assume that
7754 the rtx returned may not have mode TMODE.
7756 Note that TARGET may have neither TMODE nor MODE. In that case, it
7757 probably will not be used.
7759 If MODIFIER is EXPAND_SUM then when EXP is an addition
7760 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7761 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7762 products as above, or REG or MEM, or constant.
7763 Ordinarily in such cases we would output mul or add instructions
7764 and then return a pseudo reg containing the sum.
7766 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7767 it also marks a label as absolutely required (it can't be dead).
7768 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7769 This is used for outputting expressions used in initializers.
7771 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7772 with a constant address even if that address is not normally legitimate.
7773 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7775 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7776 a call parameter. Such targets require special care as we haven't yet
7777 marked TARGET so that it's safe from being trashed by libcalls. We
7778 don't want to use TARGET for anything but the final result;
7779 Intermediate values must go elsewhere. Additionally, calls to
7780 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7782 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7783 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7784 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7785 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7786 recursively. */
7789 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7790 enum expand_modifier modifier, rtx *alt_rtl)
7792 rtx ret;
7794 /* Handle ERROR_MARK before anybody tries to access its type. */
7795 if (TREE_CODE (exp) == ERROR_MARK
7796 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7798 ret = CONST0_RTX (tmode);
7799 return ret ? ret : const0_rtx;
7802 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7803 return ret;
7806 /* Try to expand the conditional expression which is represented by
7807 TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves. If succeseds
7808 return the rtl reg which repsents the result. Otherwise return
7809 NULL_RTL. */
7811 static rtx
7812 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
7813 tree treeop1 ATTRIBUTE_UNUSED,
7814 tree treeop2 ATTRIBUTE_UNUSED)
7816 #ifdef HAVE_conditional_move
7817 rtx insn;
7818 rtx op00, op01, op1, op2;
7819 enum rtx_code comparison_code;
7820 enum machine_mode comparison_mode;
7821 gimple srcstmt;
7822 rtx temp;
7823 tree type = TREE_TYPE (treeop1);
7824 int unsignedp = TYPE_UNSIGNED (type);
7825 enum machine_mode mode = TYPE_MODE (type);
7826 enum machine_mode orig_mode = mode;
7828 /* If we cannot do a conditional move on the mode, try doing it
7829 with the promoted mode. */
7830 if (!can_conditionally_move_p (mode))
7832 mode = promote_mode (type, mode, &unsignedp);
7833 if (!can_conditionally_move_p (mode))
7834 return NULL_RTX;
7835 temp = assign_temp (type, 0, 0); /* Use promoted mode for temp. */
7837 else
7838 temp = assign_temp (type, 0, 1);
7840 start_sequence ();
7841 expand_operands (treeop1, treeop2,
7842 temp, &op1, &op2, EXPAND_NORMAL);
7844 if (TREE_CODE (treeop0) == SSA_NAME
7845 && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
7847 tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
7848 enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
7849 op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
7850 op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
7851 comparison_mode = TYPE_MODE (type);
7852 unsignedp = TYPE_UNSIGNED (type);
7853 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7855 else if (TREE_CODE_CLASS (TREE_CODE (treeop0)) == tcc_comparison)
7857 tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
7858 enum tree_code cmpcode = TREE_CODE (treeop0);
7859 op00 = expand_normal (TREE_OPERAND (treeop0, 0));
7860 op01 = expand_normal (TREE_OPERAND (treeop0, 1));
7861 unsignedp = TYPE_UNSIGNED (type);
7862 comparison_mode = TYPE_MODE (type);
7863 comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7865 else
7867 op00 = expand_normal (treeop0);
7868 op01 = const0_rtx;
7869 comparison_code = NE;
7870 comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
7873 if (GET_MODE (op1) != mode)
7874 op1 = gen_lowpart (mode, op1);
7876 if (GET_MODE (op2) != mode)
7877 op2 = gen_lowpart (mode, op2);
7879 /* Try to emit the conditional move. */
7880 insn = emit_conditional_move (temp, comparison_code,
7881 op00, op01, comparison_mode,
7882 op1, op2, mode,
7883 unsignedp);
7885 /* If we could do the conditional move, emit the sequence,
7886 and return. */
7887 if (insn)
7889 rtx seq = get_insns ();
7890 end_sequence ();
7891 emit_insn (seq);
7892 return convert_modes (orig_mode, mode, temp, 0);
7895 /* Otherwise discard the sequence and fall back to code with
7896 branches. */
7897 end_sequence ();
7898 #endif
7899 return NULL_RTX;
7903 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
7904 enum expand_modifier modifier)
7906 rtx op0, op1, op2, temp;
7907 tree type;
7908 int unsignedp;
7909 enum machine_mode mode;
7910 enum tree_code code = ops->code;
7911 optab this_optab;
7912 rtx subtarget, original_target;
7913 int ignore;
7914 bool reduce_bit_field;
7915 location_t loc = ops->location;
7916 tree treeop0, treeop1, treeop2;
7917 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7918 ? reduce_to_bit_field_precision ((expr), \
7919 target, \
7920 type) \
7921 : (expr))
7923 type = ops->type;
7924 mode = TYPE_MODE (type);
7925 unsignedp = TYPE_UNSIGNED (type);
7927 treeop0 = ops->op0;
7928 treeop1 = ops->op1;
7929 treeop2 = ops->op2;
7931 /* We should be called only on simple (binary or unary) expressions,
7932 exactly those that are valid in gimple expressions that aren't
7933 GIMPLE_SINGLE_RHS (or invalid). */
7934 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
7935 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
7936 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
7938 ignore = (target == const0_rtx
7939 || ((CONVERT_EXPR_CODE_P (code)
7940 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
7941 && TREE_CODE (type) == VOID_TYPE));
7943 /* We should be called only if we need the result. */
7944 gcc_assert (!ignore);
7946 /* An operation in what may be a bit-field type needs the
7947 result to be reduced to the precision of the bit-field type,
7948 which is narrower than that of the type's mode. */
7949 reduce_bit_field = (INTEGRAL_TYPE_P (type)
7950 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
7952 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
7953 target = 0;
7955 /* Use subtarget as the target for operand 0 of a binary operation. */
7956 subtarget = get_subtarget (target);
7957 original_target = target;
7959 switch (code)
7961 case NON_LVALUE_EXPR:
7962 case PAREN_EXPR:
7963 CASE_CONVERT:
7964 if (treeop0 == error_mark_node)
7965 return const0_rtx;
7967 if (TREE_CODE (type) == UNION_TYPE)
7969 tree valtype = TREE_TYPE (treeop0);
7971 /* If both input and output are BLKmode, this conversion isn't doing
7972 anything except possibly changing memory attribute. */
7973 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7975 rtx result = expand_expr (treeop0, target, tmode,
7976 modifier);
7978 result = copy_rtx (result);
7979 set_mem_attributes (result, type, 0);
7980 return result;
7983 if (target == 0)
7985 if (TYPE_MODE (type) != BLKmode)
7986 target = gen_reg_rtx (TYPE_MODE (type));
7987 else
7988 target = assign_temp (type, 1, 1);
7991 if (MEM_P (target))
7992 /* Store data into beginning of memory target. */
7993 store_expr (treeop0,
7994 adjust_address (target, TYPE_MODE (valtype), 0),
7995 modifier == EXPAND_STACK_PARM,
7996 false);
7998 else
8000 gcc_assert (REG_P (target));
8002 /* Store this field into a union of the proper type. */
8003 store_field (target,
8004 MIN ((int_size_in_bytes (TREE_TYPE
8005 (treeop0))
8006 * BITS_PER_UNIT),
8007 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8008 0, 0, 0, TYPE_MODE (valtype), treeop0, 0, false);
8011 /* Return the entire union. */
8012 return target;
8015 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8017 op0 = expand_expr (treeop0, target, VOIDmode,
8018 modifier);
8020 /* If the signedness of the conversion differs and OP0 is
8021 a promoted SUBREG, clear that indication since we now
8022 have to do the proper extension. */
8023 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8024 && GET_CODE (op0) == SUBREG)
8025 SUBREG_PROMOTED_VAR_P (op0) = 0;
8027 return REDUCE_BIT_FIELD (op0);
8030 op0 = expand_expr (treeop0, NULL_RTX, mode,
8031 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8032 if (GET_MODE (op0) == mode)
8035 /* If OP0 is a constant, just convert it into the proper mode. */
8036 else if (CONSTANT_P (op0))
8038 tree inner_type = TREE_TYPE (treeop0);
8039 enum machine_mode inner_mode = GET_MODE (op0);
8041 if (inner_mode == VOIDmode)
8042 inner_mode = TYPE_MODE (inner_type);
8044 if (modifier == EXPAND_INITIALIZER)
8045 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8046 subreg_lowpart_offset (mode,
8047 inner_mode));
8048 else
8049 op0= convert_modes (mode, inner_mode, op0,
8050 TYPE_UNSIGNED (inner_type));
8053 else if (modifier == EXPAND_INITIALIZER)
8054 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8056 else if (target == 0)
8057 op0 = convert_to_mode (mode, op0,
8058 TYPE_UNSIGNED (TREE_TYPE
8059 (treeop0)));
8060 else
8062 convert_move (target, op0,
8063 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8064 op0 = target;
8067 return REDUCE_BIT_FIELD (op0);
8069 case ADDR_SPACE_CONVERT_EXPR:
8071 tree treeop0_type = TREE_TYPE (treeop0);
8072 addr_space_t as_to;
8073 addr_space_t as_from;
8075 gcc_assert (POINTER_TYPE_P (type));
8076 gcc_assert (POINTER_TYPE_P (treeop0_type));
8078 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8079 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8081 /* Conversions between pointers to the same address space should
8082 have been implemented via CONVERT_EXPR / NOP_EXPR. */
8083 gcc_assert (as_to != as_from);
8085 /* Ask target code to handle conversion between pointers
8086 to overlapping address spaces. */
8087 if (targetm.addr_space.subset_p (as_to, as_from)
8088 || targetm.addr_space.subset_p (as_from, as_to))
8090 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8091 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8092 gcc_assert (op0);
8093 return op0;
8096 /* For disjoint address spaces, converting anything but
8097 a null pointer invokes undefined behaviour. We simply
8098 always return a null pointer here. */
8099 return CONST0_RTX (mode);
8102 case POINTER_PLUS_EXPR:
8103 /* Even though the sizetype mode and the pointer's mode can be different
8104 expand is able to handle this correctly and get the correct result out
8105 of the PLUS_EXPR code. */
8106 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8107 if sizetype precision is smaller than pointer precision. */
8108 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8109 treeop1 = fold_convert_loc (loc, type,
8110 fold_convert_loc (loc, ssizetype,
8111 treeop1));
8112 /* If sizetype precision is larger than pointer precision, truncate the
8113 offset to have matching modes. */
8114 else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8115 treeop1 = fold_convert_loc (loc, type, treeop1);
8117 case PLUS_EXPR:
8118 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8119 something else, make sure we add the register to the constant and
8120 then to the other thing. This case can occur during strength
8121 reduction and doing it this way will produce better code if the
8122 frame pointer or argument pointer is eliminated.
8124 fold-const.c will ensure that the constant is always in the inner
8125 PLUS_EXPR, so the only case we need to do anything about is if
8126 sp, ap, or fp is our second argument, in which case we must swap
8127 the innermost first argument and our second argument. */
8129 if (TREE_CODE (treeop0) == PLUS_EXPR
8130 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8131 && TREE_CODE (treeop1) == VAR_DECL
8132 && (DECL_RTL (treeop1) == frame_pointer_rtx
8133 || DECL_RTL (treeop1) == stack_pointer_rtx
8134 || DECL_RTL (treeop1) == arg_pointer_rtx))
8136 gcc_unreachable ();
8139 /* If the result is to be ptr_mode and we are adding an integer to
8140 something, we might be forming a constant. So try to use
8141 plus_constant. If it produces a sum and we can't accept it,
8142 use force_operand. This allows P = &ARR[const] to generate
8143 efficient code on machines where a SYMBOL_REF is not a valid
8144 address.
8146 If this is an EXPAND_SUM call, always return the sum. */
8147 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8148 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8150 if (modifier == EXPAND_STACK_PARM)
8151 target = 0;
8152 if (TREE_CODE (treeop0) == INTEGER_CST
8153 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8154 && TREE_CONSTANT (treeop1))
8156 rtx constant_part;
8158 op1 = expand_expr (treeop1, subtarget, VOIDmode,
8159 EXPAND_SUM);
8160 /* Use immed_double_const to ensure that the constant is
8161 truncated according to the mode of OP1, then sign extended
8162 to a HOST_WIDE_INT. Using the constant directly can result
8163 in non-canonical RTL in a 64x32 cross compile. */
8164 constant_part
8165 = immed_double_const (TREE_INT_CST_LOW (treeop0),
8166 (HOST_WIDE_INT) 0,
8167 TYPE_MODE (TREE_TYPE (treeop1)));
8168 op1 = plus_constant (mode, op1, INTVAL (constant_part));
8169 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8170 op1 = force_operand (op1, target);
8171 return REDUCE_BIT_FIELD (op1);
8174 else if (TREE_CODE (treeop1) == INTEGER_CST
8175 && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8176 && TREE_CONSTANT (treeop0))
8178 rtx constant_part;
8180 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8181 (modifier == EXPAND_INITIALIZER
8182 ? EXPAND_INITIALIZER : EXPAND_SUM));
8183 if (! CONSTANT_P (op0))
8185 op1 = expand_expr (treeop1, NULL_RTX,
8186 VOIDmode, modifier);
8187 /* Return a PLUS if modifier says it's OK. */
8188 if (modifier == EXPAND_SUM
8189 || modifier == EXPAND_INITIALIZER)
8190 return simplify_gen_binary (PLUS, mode, op0, op1);
8191 goto binop2;
8193 /* Use immed_double_const to ensure that the constant is
8194 truncated according to the mode of OP1, then sign extended
8195 to a HOST_WIDE_INT. Using the constant directly can result
8196 in non-canonical RTL in a 64x32 cross compile. */
8197 constant_part
8198 = immed_double_const (TREE_INT_CST_LOW (treeop1),
8199 (HOST_WIDE_INT) 0,
8200 TYPE_MODE (TREE_TYPE (treeop0)));
8201 op0 = plus_constant (mode, op0, INTVAL (constant_part));
8202 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8203 op0 = force_operand (op0, target);
8204 return REDUCE_BIT_FIELD (op0);
8208 /* Use TER to expand pointer addition of a negated value
8209 as pointer subtraction. */
8210 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8211 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8212 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8213 && TREE_CODE (treeop1) == SSA_NAME
8214 && TYPE_MODE (TREE_TYPE (treeop0))
8215 == TYPE_MODE (TREE_TYPE (treeop1)))
8217 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8218 if (def)
8220 treeop1 = gimple_assign_rhs1 (def);
8221 code = MINUS_EXPR;
8222 goto do_minus;
8226 /* No sense saving up arithmetic to be done
8227 if it's all in the wrong mode to form part of an address.
8228 And force_operand won't know whether to sign-extend or
8229 zero-extend. */
8230 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8231 || mode != ptr_mode)
8233 expand_operands (treeop0, treeop1,
8234 subtarget, &op0, &op1, EXPAND_NORMAL);
8235 if (op0 == const0_rtx)
8236 return op1;
8237 if (op1 == const0_rtx)
8238 return op0;
8239 goto binop2;
8242 expand_operands (treeop0, treeop1,
8243 subtarget, &op0, &op1, modifier);
8244 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8246 case MINUS_EXPR:
8247 do_minus:
8248 /* For initializers, we are allowed to return a MINUS of two
8249 symbolic constants. Here we handle all cases when both operands
8250 are constant. */
8251 /* Handle difference of two symbolic constants,
8252 for the sake of an initializer. */
8253 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8254 && really_constant_p (treeop0)
8255 && really_constant_p (treeop1))
8257 expand_operands (treeop0, treeop1,
8258 NULL_RTX, &op0, &op1, modifier);
8260 /* If the last operand is a CONST_INT, use plus_constant of
8261 the negated constant. Else make the MINUS. */
8262 if (CONST_INT_P (op1))
8263 return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8264 -INTVAL (op1)));
8265 else
8266 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8269 /* No sense saving up arithmetic to be done
8270 if it's all in the wrong mode to form part of an address.
8271 And force_operand won't know whether to sign-extend or
8272 zero-extend. */
8273 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8274 || mode != ptr_mode)
8275 goto binop;
8277 expand_operands (treeop0, treeop1,
8278 subtarget, &op0, &op1, modifier);
8280 /* Convert A - const to A + (-const). */
8281 if (CONST_INT_P (op1))
8283 op1 = negate_rtx (mode, op1);
8284 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8287 goto binop2;
8289 case WIDEN_MULT_PLUS_EXPR:
8290 case WIDEN_MULT_MINUS_EXPR:
8291 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8292 op2 = expand_normal (treeop2);
8293 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8294 target, unsignedp);
8295 return target;
8297 case WIDEN_MULT_EXPR:
8298 /* If first operand is constant, swap them.
8299 Thus the following special case checks need only
8300 check the second operand. */
8301 if (TREE_CODE (treeop0) == INTEGER_CST)
8303 tree t1 = treeop0;
8304 treeop0 = treeop1;
8305 treeop1 = t1;
8308 /* First, check if we have a multiplication of one signed and one
8309 unsigned operand. */
8310 if (TREE_CODE (treeop1) != INTEGER_CST
8311 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8312 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8314 enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8315 this_optab = usmul_widen_optab;
8316 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8317 != CODE_FOR_nothing)
8319 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8320 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8321 EXPAND_NORMAL);
8322 else
8323 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8324 EXPAND_NORMAL);
8325 /* op0 and op1 might still be constant, despite the above
8326 != INTEGER_CST check. Handle it. */
8327 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8329 op0 = convert_modes (innermode, mode, op0, true);
8330 op1 = convert_modes (innermode, mode, op1, false);
8331 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8332 target, unsignedp));
8334 goto binop3;
8337 /* Check for a multiplication with matching signedness. */
8338 else if ((TREE_CODE (treeop1) == INTEGER_CST
8339 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8340 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8341 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8343 tree op0type = TREE_TYPE (treeop0);
8344 enum machine_mode innermode = TYPE_MODE (op0type);
8345 bool zextend_p = TYPE_UNSIGNED (op0type);
8346 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8347 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8349 if (TREE_CODE (treeop0) != INTEGER_CST)
8351 if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8352 != CODE_FOR_nothing)
8354 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8355 EXPAND_NORMAL);
8356 /* op0 and op1 might still be constant, despite the above
8357 != INTEGER_CST check. Handle it. */
8358 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8360 widen_mult_const:
8361 op0 = convert_modes (innermode, mode, op0, zextend_p);
8363 = convert_modes (innermode, mode, op1,
8364 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8365 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8366 target,
8367 unsignedp));
8369 temp = expand_widening_mult (mode, op0, op1, target,
8370 unsignedp, this_optab);
8371 return REDUCE_BIT_FIELD (temp);
8373 if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8374 != CODE_FOR_nothing
8375 && innermode == word_mode)
8377 rtx htem, hipart;
8378 op0 = expand_normal (treeop0);
8379 if (TREE_CODE (treeop1) == INTEGER_CST)
8380 op1 = convert_modes (innermode, mode,
8381 expand_normal (treeop1),
8382 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8383 else
8384 op1 = expand_normal (treeop1);
8385 /* op0 and op1 might still be constant, despite the above
8386 != INTEGER_CST check. Handle it. */
8387 if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8388 goto widen_mult_const;
8389 temp = expand_binop (mode, other_optab, op0, op1, target,
8390 unsignedp, OPTAB_LIB_WIDEN);
8391 hipart = gen_highpart (innermode, temp);
8392 htem = expand_mult_highpart_adjust (innermode, hipart,
8393 op0, op1, hipart,
8394 zextend_p);
8395 if (htem != hipart)
8396 emit_move_insn (hipart, htem);
8397 return REDUCE_BIT_FIELD (temp);
8401 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8402 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8403 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8404 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8406 case FMA_EXPR:
8408 optab opt = fma_optab;
8409 gimple def0, def2;
8411 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8412 call. */
8413 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8415 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8416 tree call_expr;
8418 gcc_assert (fn != NULL_TREE);
8419 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8420 return expand_builtin (call_expr, target, subtarget, mode, false);
8423 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8424 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8426 op0 = op2 = NULL;
8428 if (def0 && def2
8429 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8431 opt = fnms_optab;
8432 op0 = expand_normal (gimple_assign_rhs1 (def0));
8433 op2 = expand_normal (gimple_assign_rhs1 (def2));
8435 else if (def0
8436 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8438 opt = fnma_optab;
8439 op0 = expand_normal (gimple_assign_rhs1 (def0));
8441 else if (def2
8442 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8444 opt = fms_optab;
8445 op2 = expand_normal (gimple_assign_rhs1 (def2));
8448 if (op0 == NULL)
8449 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8450 if (op2 == NULL)
8451 op2 = expand_normal (treeop2);
8452 op1 = expand_normal (treeop1);
8454 return expand_ternary_op (TYPE_MODE (type), opt,
8455 op0, op1, op2, target, 0);
8458 case MULT_EXPR:
8459 /* If this is a fixed-point operation, then we cannot use the code
8460 below because "expand_mult" doesn't support sat/no-sat fixed-point
8461 multiplications. */
8462 if (ALL_FIXED_POINT_MODE_P (mode))
8463 goto binop;
8465 /* If first operand is constant, swap them.
8466 Thus the following special case checks need only
8467 check the second operand. */
8468 if (TREE_CODE (treeop0) == INTEGER_CST)
8470 tree t1 = treeop0;
8471 treeop0 = treeop1;
8472 treeop1 = t1;
8475 /* Attempt to return something suitable for generating an
8476 indexed address, for machines that support that. */
8478 if (modifier == EXPAND_SUM && mode == ptr_mode
8479 && host_integerp (treeop1, 0))
8481 tree exp1 = treeop1;
8483 op0 = expand_expr (treeop0, subtarget, VOIDmode,
8484 EXPAND_SUM);
8486 if (!REG_P (op0))
8487 op0 = force_operand (op0, NULL_RTX);
8488 if (!REG_P (op0))
8489 op0 = copy_to_mode_reg (mode, op0);
8491 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8492 gen_int_mode (tree_low_cst (exp1, 0),
8493 TYPE_MODE (TREE_TYPE (exp1)))));
8496 if (modifier == EXPAND_STACK_PARM)
8497 target = 0;
8499 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8500 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8502 case TRUNC_DIV_EXPR:
8503 case FLOOR_DIV_EXPR:
8504 case CEIL_DIV_EXPR:
8505 case ROUND_DIV_EXPR:
8506 case EXACT_DIV_EXPR:
8507 /* If this is a fixed-point operation, then we cannot use the code
8508 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8509 divisions. */
8510 if (ALL_FIXED_POINT_MODE_P (mode))
8511 goto binop;
8513 if (modifier == EXPAND_STACK_PARM)
8514 target = 0;
8515 /* Possible optimization: compute the dividend with EXPAND_SUM
8516 then if the divisor is constant can optimize the case
8517 where some terms of the dividend have coeffs divisible by it. */
8518 expand_operands (treeop0, treeop1,
8519 subtarget, &op0, &op1, EXPAND_NORMAL);
8520 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8522 case RDIV_EXPR:
8523 goto binop;
8525 case MULT_HIGHPART_EXPR:
8526 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8527 temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8528 gcc_assert (temp);
8529 return temp;
8531 case TRUNC_MOD_EXPR:
8532 case FLOOR_MOD_EXPR:
8533 case CEIL_MOD_EXPR:
8534 case ROUND_MOD_EXPR:
8535 if (modifier == EXPAND_STACK_PARM)
8536 target = 0;
8537 expand_operands (treeop0, treeop1,
8538 subtarget, &op0, &op1, EXPAND_NORMAL);
8539 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8541 case FIXED_CONVERT_EXPR:
8542 op0 = expand_normal (treeop0);
8543 if (target == 0 || modifier == EXPAND_STACK_PARM)
8544 target = gen_reg_rtx (mode);
8546 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8547 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8548 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8549 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8550 else
8551 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8552 return target;
8554 case FIX_TRUNC_EXPR:
8555 op0 = expand_normal (treeop0);
8556 if (target == 0 || modifier == EXPAND_STACK_PARM)
8557 target = gen_reg_rtx (mode);
8558 expand_fix (target, op0, unsignedp);
8559 return target;
8561 case FLOAT_EXPR:
8562 op0 = expand_normal (treeop0);
8563 if (target == 0 || modifier == EXPAND_STACK_PARM)
8564 target = gen_reg_rtx (mode);
8565 /* expand_float can't figure out what to do if FROM has VOIDmode.
8566 So give it the correct mode. With -O, cse will optimize this. */
8567 if (GET_MODE (op0) == VOIDmode)
8568 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8569 op0);
8570 expand_float (target, op0,
8571 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8572 return target;
8574 case NEGATE_EXPR:
8575 op0 = expand_expr (treeop0, subtarget,
8576 VOIDmode, EXPAND_NORMAL);
8577 if (modifier == EXPAND_STACK_PARM)
8578 target = 0;
8579 temp = expand_unop (mode,
8580 optab_for_tree_code (NEGATE_EXPR, type,
8581 optab_default),
8582 op0, target, 0);
8583 gcc_assert (temp);
8584 return REDUCE_BIT_FIELD (temp);
8586 case ABS_EXPR:
8587 op0 = expand_expr (treeop0, subtarget,
8588 VOIDmode, EXPAND_NORMAL);
8589 if (modifier == EXPAND_STACK_PARM)
8590 target = 0;
8592 /* ABS_EXPR is not valid for complex arguments. */
8593 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8594 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8596 /* Unsigned abs is simply the operand. Testing here means we don't
8597 risk generating incorrect code below. */
8598 if (TYPE_UNSIGNED (type))
8599 return op0;
8601 return expand_abs (mode, op0, target, unsignedp,
8602 safe_from_p (target, treeop0, 1));
8604 case MAX_EXPR:
8605 case MIN_EXPR:
8606 target = original_target;
8607 if (target == 0
8608 || modifier == EXPAND_STACK_PARM
8609 || (MEM_P (target) && MEM_VOLATILE_P (target))
8610 || GET_MODE (target) != mode
8611 || (REG_P (target)
8612 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8613 target = gen_reg_rtx (mode);
8614 expand_operands (treeop0, treeop1,
8615 target, &op0, &op1, EXPAND_NORMAL);
8617 /* First try to do it with a special MIN or MAX instruction.
8618 If that does not win, use a conditional jump to select the proper
8619 value. */
8620 this_optab = optab_for_tree_code (code, type, optab_default);
8621 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8622 OPTAB_WIDEN);
8623 if (temp != 0)
8624 return temp;
8626 /* At this point, a MEM target is no longer useful; we will get better
8627 code without it. */
8629 if (! REG_P (target))
8630 target = gen_reg_rtx (mode);
8632 /* If op1 was placed in target, swap op0 and op1. */
8633 if (target != op0 && target == op1)
8635 temp = op0;
8636 op0 = op1;
8637 op1 = temp;
8640 /* We generate better code and avoid problems with op1 mentioning
8641 target by forcing op1 into a pseudo if it isn't a constant. */
8642 if (! CONSTANT_P (op1))
8643 op1 = force_reg (mode, op1);
8646 enum rtx_code comparison_code;
8647 rtx cmpop1 = op1;
8649 if (code == MAX_EXPR)
8650 comparison_code = unsignedp ? GEU : GE;
8651 else
8652 comparison_code = unsignedp ? LEU : LE;
8654 /* Canonicalize to comparisons against 0. */
8655 if (op1 == const1_rtx)
8657 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8658 or (a != 0 ? a : 1) for unsigned.
8659 For MIN we are safe converting (a <= 1 ? a : 1)
8660 into (a <= 0 ? a : 1) */
8661 cmpop1 = const0_rtx;
8662 if (code == MAX_EXPR)
8663 comparison_code = unsignedp ? NE : GT;
8665 if (op1 == constm1_rtx && !unsignedp)
8667 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8668 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8669 cmpop1 = const0_rtx;
8670 if (code == MIN_EXPR)
8671 comparison_code = LT;
8673 #ifdef HAVE_conditional_move
8674 /* Use a conditional move if possible. */
8675 if (can_conditionally_move_p (mode))
8677 rtx insn;
8679 /* ??? Same problem as in expmed.c: emit_conditional_move
8680 forces a stack adjustment via compare_from_rtx, and we
8681 lose the stack adjustment if the sequence we are about
8682 to create is discarded. */
8683 do_pending_stack_adjust ();
8685 start_sequence ();
8687 /* Try to emit the conditional move. */
8688 insn = emit_conditional_move (target, comparison_code,
8689 op0, cmpop1, mode,
8690 op0, op1, mode,
8691 unsignedp);
8693 /* If we could do the conditional move, emit the sequence,
8694 and return. */
8695 if (insn)
8697 rtx seq = get_insns ();
8698 end_sequence ();
8699 emit_insn (seq);
8700 return target;
8703 /* Otherwise discard the sequence and fall back to code with
8704 branches. */
8705 end_sequence ();
8707 #endif
8708 if (target != op0)
8709 emit_move_insn (target, op0);
8711 temp = gen_label_rtx ();
8712 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8713 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8714 -1);
8716 emit_move_insn (target, op1);
8717 emit_label (temp);
8718 return target;
8720 case BIT_NOT_EXPR:
8721 op0 = expand_expr (treeop0, subtarget,
8722 VOIDmode, EXPAND_NORMAL);
8723 if (modifier == EXPAND_STACK_PARM)
8724 target = 0;
8725 /* In case we have to reduce the result to bitfield precision
8726 for unsigned bitfield expand this as XOR with a proper constant
8727 instead. */
8728 if (reduce_bit_field && TYPE_UNSIGNED (type))
8729 temp = expand_binop (mode, xor_optab, op0,
8730 immed_double_int_const
8731 (double_int::mask (TYPE_PRECISION (type)), mode),
8732 target, 1, OPTAB_LIB_WIDEN);
8733 else
8734 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8735 gcc_assert (temp);
8736 return temp;
8738 /* ??? Can optimize bitwise operations with one arg constant.
8739 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8740 and (a bitwise1 b) bitwise2 b (etc)
8741 but that is probably not worth while. */
8743 case BIT_AND_EXPR:
8744 case BIT_IOR_EXPR:
8745 case BIT_XOR_EXPR:
8746 goto binop;
8748 case LROTATE_EXPR:
8749 case RROTATE_EXPR:
8750 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8751 || (GET_MODE_PRECISION (TYPE_MODE (type))
8752 == TYPE_PRECISION (type)));
8753 /* fall through */
8755 case LSHIFT_EXPR:
8756 case RSHIFT_EXPR:
8757 /* If this is a fixed-point operation, then we cannot use the code
8758 below because "expand_shift" doesn't support sat/no-sat fixed-point
8759 shifts. */
8760 if (ALL_FIXED_POINT_MODE_P (mode))
8761 goto binop;
8763 if (! safe_from_p (subtarget, treeop1, 1))
8764 subtarget = 0;
8765 if (modifier == EXPAND_STACK_PARM)
8766 target = 0;
8767 op0 = expand_expr (treeop0, subtarget,
8768 VOIDmode, EXPAND_NORMAL);
8769 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8770 unsignedp);
8771 if (code == LSHIFT_EXPR)
8772 temp = REDUCE_BIT_FIELD (temp);
8773 return temp;
8775 /* Could determine the answer when only additive constants differ. Also,
8776 the addition of one can be handled by changing the condition. */
8777 case LT_EXPR:
8778 case LE_EXPR:
8779 case GT_EXPR:
8780 case GE_EXPR:
8781 case EQ_EXPR:
8782 case NE_EXPR:
8783 case UNORDERED_EXPR:
8784 case ORDERED_EXPR:
8785 case UNLT_EXPR:
8786 case UNLE_EXPR:
8787 case UNGT_EXPR:
8788 case UNGE_EXPR:
8789 case UNEQ_EXPR:
8790 case LTGT_EXPR:
8791 temp = do_store_flag (ops,
8792 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8793 tmode != VOIDmode ? tmode : mode);
8794 if (temp)
8795 return temp;
8797 /* Use a compare and a jump for BLKmode comparisons, or for function
8798 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8800 if ((target == 0
8801 || modifier == EXPAND_STACK_PARM
8802 || ! safe_from_p (target, treeop0, 1)
8803 || ! safe_from_p (target, treeop1, 1)
8804 /* Make sure we don't have a hard reg (such as function's return
8805 value) live across basic blocks, if not optimizing. */
8806 || (!optimize && REG_P (target)
8807 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8808 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8810 emit_move_insn (target, const0_rtx);
8812 op1 = gen_label_rtx ();
8813 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8815 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8816 emit_move_insn (target, constm1_rtx);
8817 else
8818 emit_move_insn (target, const1_rtx);
8820 emit_label (op1);
8821 return target;
8823 case COMPLEX_EXPR:
8824 /* Get the rtx code of the operands. */
8825 op0 = expand_normal (treeop0);
8826 op1 = expand_normal (treeop1);
8828 if (!target)
8829 target = gen_reg_rtx (TYPE_MODE (type));
8830 else
8831 /* If target overlaps with op1, then either we need to force
8832 op1 into a pseudo (if target also overlaps with op0),
8833 or write the complex parts in reverse order. */
8834 switch (GET_CODE (target))
8836 case CONCAT:
8837 if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
8839 if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
8841 complex_expr_force_op1:
8842 temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
8843 emit_move_insn (temp, op1);
8844 op1 = temp;
8845 break;
8847 complex_expr_swap_order:
8848 /* Move the imaginary (op1) and real (op0) parts to their
8849 location. */
8850 write_complex_part (target, op1, true);
8851 write_complex_part (target, op0, false);
8853 return target;
8855 break;
8856 case MEM:
8857 temp = adjust_address_nv (target,
8858 GET_MODE_INNER (GET_MODE (target)), 0);
8859 if (reg_overlap_mentioned_p (temp, op1))
8861 enum machine_mode imode = GET_MODE_INNER (GET_MODE (target));
8862 temp = adjust_address_nv (target, imode,
8863 GET_MODE_SIZE (imode));
8864 if (reg_overlap_mentioned_p (temp, op0))
8865 goto complex_expr_force_op1;
8866 goto complex_expr_swap_order;
8868 break;
8869 default:
8870 if (reg_overlap_mentioned_p (target, op1))
8872 if (reg_overlap_mentioned_p (target, op0))
8873 goto complex_expr_force_op1;
8874 goto complex_expr_swap_order;
8876 break;
8879 /* Move the real (op0) and imaginary (op1) parts to their location. */
8880 write_complex_part (target, op0, false);
8881 write_complex_part (target, op1, true);
8883 return target;
8885 case WIDEN_SUM_EXPR:
8887 tree oprnd0 = treeop0;
8888 tree oprnd1 = treeop1;
8890 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8891 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8892 target, unsignedp);
8893 return target;
8896 case REDUC_MAX_EXPR:
8897 case REDUC_MIN_EXPR:
8898 case REDUC_PLUS_EXPR:
8900 op0 = expand_normal (treeop0);
8901 this_optab = optab_for_tree_code (code, type, optab_default);
8902 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8903 gcc_assert (temp);
8904 return temp;
8907 case VEC_LSHIFT_EXPR:
8908 case VEC_RSHIFT_EXPR:
8910 target = expand_vec_shift_expr (ops, target);
8911 return target;
8914 case VEC_UNPACK_HI_EXPR:
8915 case VEC_UNPACK_LO_EXPR:
8917 op0 = expand_normal (treeop0);
8918 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
8919 target, unsignedp);
8920 gcc_assert (temp);
8921 return temp;
8924 case VEC_UNPACK_FLOAT_HI_EXPR:
8925 case VEC_UNPACK_FLOAT_LO_EXPR:
8927 op0 = expand_normal (treeop0);
8928 /* The signedness is determined from input operand. */
8929 temp = expand_widen_pattern_expr
8930 (ops, op0, NULL_RTX, NULL_RTX,
8931 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8933 gcc_assert (temp);
8934 return temp;
8937 case VEC_WIDEN_MULT_HI_EXPR:
8938 case VEC_WIDEN_MULT_LO_EXPR:
8939 case VEC_WIDEN_MULT_EVEN_EXPR:
8940 case VEC_WIDEN_MULT_ODD_EXPR:
8941 case VEC_WIDEN_LSHIFT_HI_EXPR:
8942 case VEC_WIDEN_LSHIFT_LO_EXPR:
8943 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8944 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
8945 target, unsignedp);
8946 gcc_assert (target);
8947 return target;
8949 case VEC_PACK_TRUNC_EXPR:
8950 case VEC_PACK_SAT_EXPR:
8951 case VEC_PACK_FIX_TRUNC_EXPR:
8952 mode = TYPE_MODE (TREE_TYPE (treeop0));
8953 goto binop;
8955 case VEC_PERM_EXPR:
8956 expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
8957 op2 = expand_normal (treeop2);
8959 /* Careful here: if the target doesn't support integral vector modes,
8960 a constant selection vector could wind up smooshed into a normal
8961 integral constant. */
8962 if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
8964 tree sel_type = TREE_TYPE (treeop2);
8965 enum machine_mode vmode
8966 = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
8967 TYPE_VECTOR_SUBPARTS (sel_type));
8968 gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
8969 op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
8970 gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
8972 else
8973 gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
8975 temp = expand_vec_perm (mode, op0, op1, op2, target);
8976 gcc_assert (temp);
8977 return temp;
8979 case DOT_PROD_EXPR:
8981 tree oprnd0 = treeop0;
8982 tree oprnd1 = treeop1;
8983 tree oprnd2 = treeop2;
8984 rtx op2;
8986 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8987 op2 = expand_normal (oprnd2);
8988 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8989 target, unsignedp);
8990 return target;
8993 case REALIGN_LOAD_EXPR:
8995 tree oprnd0 = treeop0;
8996 tree oprnd1 = treeop1;
8997 tree oprnd2 = treeop2;
8998 rtx op2;
9000 this_optab = optab_for_tree_code (code, type, optab_default);
9001 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9002 op2 = expand_normal (oprnd2);
9003 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9004 target, unsignedp);
9005 gcc_assert (temp);
9006 return temp;
9009 case COND_EXPR:
9010 /* A COND_EXPR with its type being VOID_TYPE represents a
9011 conditional jump and is handled in
9012 expand_gimple_cond_expr. */
9013 gcc_assert (!VOID_TYPE_P (type));
9015 /* Note that COND_EXPRs whose type is a structure or union
9016 are required to be constructed to contain assignments of
9017 a temporary variable, so that we can evaluate them here
9018 for side effect only. If type is void, we must do likewise. */
9020 gcc_assert (!TREE_ADDRESSABLE (type)
9021 && !ignore
9022 && TREE_TYPE (treeop1) != void_type_node
9023 && TREE_TYPE (treeop2) != void_type_node);
9025 temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9026 if (temp)
9027 return temp;
9029 /* If we are not to produce a result, we have no target. Otherwise,
9030 if a target was specified use it; it will not be used as an
9031 intermediate target unless it is safe. If no target, use a
9032 temporary. */
9034 if (modifier != EXPAND_STACK_PARM
9035 && original_target
9036 && safe_from_p (original_target, treeop0, 1)
9037 && GET_MODE (original_target) == mode
9038 && !MEM_P (original_target))
9039 temp = original_target;
9040 else
9041 temp = assign_temp (type, 0, 1);
9043 do_pending_stack_adjust ();
9044 NO_DEFER_POP;
9045 op0 = gen_label_rtx ();
9046 op1 = gen_label_rtx ();
9047 jumpifnot (treeop0, op0, -1);
9048 store_expr (treeop1, temp,
9049 modifier == EXPAND_STACK_PARM,
9050 false);
9052 emit_jump_insn (gen_jump (op1));
9053 emit_barrier ();
9054 emit_label (op0);
9055 store_expr (treeop2, temp,
9056 modifier == EXPAND_STACK_PARM,
9057 false);
9059 emit_label (op1);
9060 OK_DEFER_POP;
9061 return temp;
9063 case VEC_COND_EXPR:
9064 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9065 return target;
9067 default:
9068 gcc_unreachable ();
9071 /* Here to do an ordinary binary operator. */
9072 binop:
9073 expand_operands (treeop0, treeop1,
9074 subtarget, &op0, &op1, EXPAND_NORMAL);
9075 binop2:
9076 this_optab = optab_for_tree_code (code, type, optab_default);
9077 binop3:
9078 if (modifier == EXPAND_STACK_PARM)
9079 target = 0;
9080 temp = expand_binop (mode, this_optab, op0, op1, target,
9081 unsignedp, OPTAB_LIB_WIDEN);
9082 gcc_assert (temp);
9083 /* Bitwise operations do not need bitfield reduction as we expect their
9084 operands being properly truncated. */
9085 if (code == BIT_XOR_EXPR
9086 || code == BIT_AND_EXPR
9087 || code == BIT_IOR_EXPR)
9088 return temp;
9089 return REDUCE_BIT_FIELD (temp);
9091 #undef REDUCE_BIT_FIELD
9094 /* Return TRUE if expression STMT is suitable for replacement.
9095 Never consider memory loads as replaceable, because those don't ever lead
9096 into constant expressions. */
9098 static bool
9099 stmt_is_replaceable_p (gimple stmt)
9101 if (ssa_is_replaceable_p (stmt))
9103 /* Don't move around loads. */
9104 if (!gimple_assign_single_p (stmt)
9105 || is_gimple_val (gimple_assign_rhs1 (stmt)))
9106 return true;
9108 return false;
9112 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
9113 enum expand_modifier modifier, rtx *alt_rtl)
9115 rtx op0, op1, temp, decl_rtl;
9116 tree type;
9117 int unsignedp;
9118 enum machine_mode mode;
9119 enum tree_code code = TREE_CODE (exp);
9120 rtx subtarget, original_target;
9121 int ignore;
9122 tree context;
9123 bool reduce_bit_field;
9124 location_t loc = EXPR_LOCATION (exp);
9125 struct separate_ops ops;
9126 tree treeop0, treeop1, treeop2;
9127 tree ssa_name = NULL_TREE;
9128 gimple g;
9130 type = TREE_TYPE (exp);
9131 mode = TYPE_MODE (type);
9132 unsignedp = TYPE_UNSIGNED (type);
9134 treeop0 = treeop1 = treeop2 = NULL_TREE;
9135 if (!VL_EXP_CLASS_P (exp))
9136 switch (TREE_CODE_LENGTH (code))
9138 default:
9139 case 3: treeop2 = TREE_OPERAND (exp, 2);
9140 case 2: treeop1 = TREE_OPERAND (exp, 1);
9141 case 1: treeop0 = TREE_OPERAND (exp, 0);
9142 case 0: break;
9144 ops.code = code;
9145 ops.type = type;
9146 ops.op0 = treeop0;
9147 ops.op1 = treeop1;
9148 ops.op2 = treeop2;
9149 ops.location = loc;
9151 ignore = (target == const0_rtx
9152 || ((CONVERT_EXPR_CODE_P (code)
9153 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9154 && TREE_CODE (type) == VOID_TYPE));
9156 /* An operation in what may be a bit-field type needs the
9157 result to be reduced to the precision of the bit-field type,
9158 which is narrower than that of the type's mode. */
9159 reduce_bit_field = (!ignore
9160 && INTEGRAL_TYPE_P (type)
9161 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9163 /* If we are going to ignore this result, we need only do something
9164 if there is a side-effect somewhere in the expression. If there
9165 is, short-circuit the most common cases here. Note that we must
9166 not call expand_expr with anything but const0_rtx in case this
9167 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
9169 if (ignore)
9171 if (! TREE_SIDE_EFFECTS (exp))
9172 return const0_rtx;
9174 /* Ensure we reference a volatile object even if value is ignored, but
9175 don't do this if all we are doing is taking its address. */
9176 if (TREE_THIS_VOLATILE (exp)
9177 && TREE_CODE (exp) != FUNCTION_DECL
9178 && mode != VOIDmode && mode != BLKmode
9179 && modifier != EXPAND_CONST_ADDRESS)
9181 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9182 if (MEM_P (temp))
9183 copy_to_reg (temp);
9184 return const0_rtx;
9187 if (TREE_CODE_CLASS (code) == tcc_unary
9188 || code == BIT_FIELD_REF
9189 || code == COMPONENT_REF
9190 || code == INDIRECT_REF)
9191 return expand_expr (treeop0, const0_rtx, VOIDmode,
9192 modifier);
9194 else if (TREE_CODE_CLASS (code) == tcc_binary
9195 || TREE_CODE_CLASS (code) == tcc_comparison
9196 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9198 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9199 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9200 return const0_rtx;
9203 target = 0;
9206 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9207 target = 0;
9209 /* Use subtarget as the target for operand 0 of a binary operation. */
9210 subtarget = get_subtarget (target);
9211 original_target = target;
9213 switch (code)
9215 case LABEL_DECL:
9217 tree function = decl_function_context (exp);
9219 temp = label_rtx (exp);
9220 temp = gen_rtx_LABEL_REF (Pmode, temp);
9222 if (function != current_function_decl
9223 && function != 0)
9224 LABEL_REF_NONLOCAL_P (temp) = 1;
9226 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9227 return temp;
9230 case SSA_NAME:
9231 /* ??? ivopts calls expander, without any preparation from
9232 out-of-ssa. So fake instructions as if this was an access to the
9233 base variable. This unnecessarily allocates a pseudo, see how we can
9234 reuse it, if partition base vars have it set already. */
9235 if (!currently_expanding_to_rtl)
9237 tree var = SSA_NAME_VAR (exp);
9238 if (var && DECL_RTL_SET_P (var))
9239 return DECL_RTL (var);
9240 return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9241 LAST_VIRTUAL_REGISTER + 1);
9244 g = get_gimple_for_ssa_name (exp);
9245 /* For EXPAND_INITIALIZER try harder to get something simpler. */
9246 if (g == NULL
9247 && modifier == EXPAND_INITIALIZER
9248 && !SSA_NAME_IS_DEFAULT_DEF (exp)
9249 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9250 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9251 g = SSA_NAME_DEF_STMT (exp);
9252 if (g)
9254 rtx r;
9255 location_t saved_loc = curr_insn_location ();
9257 set_curr_insn_location (gimple_location (g));
9258 r = expand_expr_real (gimple_assign_rhs_to_tree (g), target,
9259 tmode, modifier, NULL);
9260 set_curr_insn_location (saved_loc);
9261 if (REG_P (r) && !REG_EXPR (r))
9262 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9263 return r;
9266 ssa_name = exp;
9267 decl_rtl = get_rtx_for_ssa_name (ssa_name);
9268 exp = SSA_NAME_VAR (ssa_name);
9269 goto expand_decl_rtl;
9271 case PARM_DECL:
9272 case VAR_DECL:
9273 /* If a static var's type was incomplete when the decl was written,
9274 but the type is complete now, lay out the decl now. */
9275 if (DECL_SIZE (exp) == 0
9276 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9277 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9278 layout_decl (exp, 0);
9280 /* ... fall through ... */
9282 case FUNCTION_DECL:
9283 case RESULT_DECL:
9284 decl_rtl = DECL_RTL (exp);
9285 expand_decl_rtl:
9286 gcc_assert (decl_rtl);
9287 decl_rtl = copy_rtx (decl_rtl);
9288 /* Record writes to register variables. */
9289 if (modifier == EXPAND_WRITE
9290 && REG_P (decl_rtl)
9291 && HARD_REGISTER_P (decl_rtl))
9292 add_to_hard_reg_set (&crtl->asm_clobbers,
9293 GET_MODE (decl_rtl), REGNO (decl_rtl));
9295 /* Ensure variable marked as used even if it doesn't go through
9296 a parser. If it hasn't be used yet, write out an external
9297 definition. */
9298 TREE_USED (exp) = 1;
9300 /* Show we haven't gotten RTL for this yet. */
9301 temp = 0;
9303 /* Variables inherited from containing functions should have
9304 been lowered by this point. */
9305 context = decl_function_context (exp);
9306 gcc_assert (SCOPE_FILE_SCOPE_P (context)
9307 || context == current_function_decl
9308 || TREE_STATIC (exp)
9309 || DECL_EXTERNAL (exp)
9310 /* ??? C++ creates functions that are not TREE_STATIC. */
9311 || TREE_CODE (exp) == FUNCTION_DECL);
9313 /* This is the case of an array whose size is to be determined
9314 from its initializer, while the initializer is still being parsed.
9315 ??? We aren't parsing while expanding anymore. */
9317 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9318 temp = validize_mem (decl_rtl);
9320 /* If DECL_RTL is memory, we are in the normal case and the
9321 address is not valid, get the address into a register. */
9323 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9325 if (alt_rtl)
9326 *alt_rtl = decl_rtl;
9327 decl_rtl = use_anchored_address (decl_rtl);
9328 if (modifier != EXPAND_CONST_ADDRESS
9329 && modifier != EXPAND_SUM
9330 && !memory_address_addr_space_p (DECL_MODE (exp),
9331 XEXP (decl_rtl, 0),
9332 MEM_ADDR_SPACE (decl_rtl)))
9333 temp = replace_equiv_address (decl_rtl,
9334 copy_rtx (XEXP (decl_rtl, 0)));
9337 /* If we got something, return it. But first, set the alignment
9338 if the address is a register. */
9339 if (temp != 0)
9341 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9342 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9344 return temp;
9347 /* If the mode of DECL_RTL does not match that of the decl,
9348 there are two cases: we are dealing with a BLKmode value
9349 that is returned in a register, or we are dealing with
9350 a promoted value. In the latter case, return a SUBREG
9351 of the wanted mode, but mark it so that we know that it
9352 was already extended. */
9353 if (REG_P (decl_rtl)
9354 && DECL_MODE (exp) != BLKmode
9355 && GET_MODE (decl_rtl) != DECL_MODE (exp))
9357 enum machine_mode pmode;
9359 /* Get the signedness to be used for this variable. Ensure we get
9360 the same mode we got when the variable was declared. */
9361 if (code == SSA_NAME
9362 && (g = SSA_NAME_DEF_STMT (ssa_name))
9363 && gimple_code (g) == GIMPLE_CALL)
9365 gcc_assert (!gimple_call_internal_p (g));
9366 pmode = promote_function_mode (type, mode, &unsignedp,
9367 gimple_call_fntype (g),
9370 else
9371 pmode = promote_decl_mode (exp, &unsignedp);
9372 gcc_assert (GET_MODE (decl_rtl) == pmode);
9374 temp = gen_lowpart_SUBREG (mode, decl_rtl);
9375 SUBREG_PROMOTED_VAR_P (temp) = 1;
9376 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
9377 return temp;
9380 return decl_rtl;
9382 case INTEGER_CST:
9383 temp = immed_double_const (TREE_INT_CST_LOW (exp),
9384 TREE_INT_CST_HIGH (exp), mode);
9386 return temp;
9388 case VECTOR_CST:
9390 tree tmp = NULL_TREE;
9391 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9392 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9393 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9394 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9395 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9396 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9397 return const_vector_from_tree (exp);
9398 if (GET_MODE_CLASS (mode) == MODE_INT)
9400 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9401 if (type_for_mode)
9402 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9404 if (!tmp)
9406 vec<constructor_elt, va_gc> *v;
9407 unsigned i;
9408 vec_alloc (v, VECTOR_CST_NELTS (exp));
9409 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9410 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9411 tmp = build_constructor (type, v);
9413 return expand_expr (tmp, ignore ? const0_rtx : target,
9414 tmode, modifier);
9417 case CONST_DECL:
9418 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9420 case REAL_CST:
9421 /* If optimized, generate immediate CONST_DOUBLE
9422 which will be turned into memory by reload if necessary.
9424 We used to force a register so that loop.c could see it. But
9425 this does not allow gen_* patterns to perform optimizations with
9426 the constants. It also produces two insns in cases like "x = 1.0;".
9427 On most machines, floating-point constants are not permitted in
9428 many insns, so we'd end up copying it to a register in any case.
9430 Now, we do the copying in expand_binop, if appropriate. */
9431 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9432 TYPE_MODE (TREE_TYPE (exp)));
9434 case FIXED_CST:
9435 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9436 TYPE_MODE (TREE_TYPE (exp)));
9438 case COMPLEX_CST:
9439 /* Handle evaluating a complex constant in a CONCAT target. */
9440 if (original_target && GET_CODE (original_target) == CONCAT)
9442 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9443 rtx rtarg, itarg;
9445 rtarg = XEXP (original_target, 0);
9446 itarg = XEXP (original_target, 1);
9448 /* Move the real and imaginary parts separately. */
9449 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9450 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9452 if (op0 != rtarg)
9453 emit_move_insn (rtarg, op0);
9454 if (op1 != itarg)
9455 emit_move_insn (itarg, op1);
9457 return original_target;
9460 /* ... fall through ... */
9462 case STRING_CST:
9463 temp = expand_expr_constant (exp, 1, modifier);
9465 /* temp contains a constant address.
9466 On RISC machines where a constant address isn't valid,
9467 make some insns to get that address into a register. */
9468 if (modifier != EXPAND_CONST_ADDRESS
9469 && modifier != EXPAND_INITIALIZER
9470 && modifier != EXPAND_SUM
9471 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9472 MEM_ADDR_SPACE (temp)))
9473 return replace_equiv_address (temp,
9474 copy_rtx (XEXP (temp, 0)));
9475 return temp;
9477 case SAVE_EXPR:
9479 tree val = treeop0;
9480 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
9482 if (!SAVE_EXPR_RESOLVED_P (exp))
9484 /* We can indeed still hit this case, typically via builtin
9485 expanders calling save_expr immediately before expanding
9486 something. Assume this means that we only have to deal
9487 with non-BLKmode values. */
9488 gcc_assert (GET_MODE (ret) != BLKmode);
9490 val = build_decl (curr_insn_location (),
9491 VAR_DECL, NULL, TREE_TYPE (exp));
9492 DECL_ARTIFICIAL (val) = 1;
9493 DECL_IGNORED_P (val) = 1;
9494 treeop0 = val;
9495 TREE_OPERAND (exp, 0) = treeop0;
9496 SAVE_EXPR_RESOLVED_P (exp) = 1;
9498 if (!CONSTANT_P (ret))
9499 ret = copy_to_reg (ret);
9500 SET_DECL_RTL (val, ret);
9503 return ret;
9507 case CONSTRUCTOR:
9508 /* If we don't need the result, just ensure we evaluate any
9509 subexpressions. */
9510 if (ignore)
9512 unsigned HOST_WIDE_INT idx;
9513 tree value;
9515 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9516 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9518 return const0_rtx;
9521 return expand_constructor (exp, target, modifier, false);
9523 case TARGET_MEM_REF:
9525 addr_space_t as
9526 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9527 enum insn_code icode;
9528 unsigned int align;
9530 op0 = addr_for_mem_ref (exp, as, true);
9531 op0 = memory_address_addr_space (mode, op0, as);
9532 temp = gen_rtx_MEM (mode, op0);
9533 set_mem_attributes (temp, exp, 0);
9534 set_mem_addr_space (temp, as);
9535 align = get_object_alignment (exp);
9536 if (modifier != EXPAND_WRITE
9537 && modifier != EXPAND_MEMORY
9538 && mode != BLKmode
9539 && align < GET_MODE_ALIGNMENT (mode)
9540 /* If the target does not have special handling for unaligned
9541 loads of mode then it can use regular moves for them. */
9542 && ((icode = optab_handler (movmisalign_optab, mode))
9543 != CODE_FOR_nothing))
9545 struct expand_operand ops[2];
9547 /* We've already validated the memory, and we're creating a
9548 new pseudo destination. The predicates really can't fail,
9549 nor can the generator. */
9550 create_output_operand (&ops[0], NULL_RTX, mode);
9551 create_fixed_operand (&ops[1], temp);
9552 expand_insn (icode, 2, ops);
9553 temp = ops[0].value;
9555 return temp;
9558 case MEM_REF:
9560 addr_space_t as
9561 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9562 enum machine_mode address_mode;
9563 tree base = TREE_OPERAND (exp, 0);
9564 gimple def_stmt;
9565 enum insn_code icode;
9566 unsigned align;
9567 /* Handle expansion of non-aliased memory with non-BLKmode. That
9568 might end up in a register. */
9569 if (mem_ref_refers_to_non_mem_p (exp))
9571 HOST_WIDE_INT offset = mem_ref_offset (exp).low;
9572 base = TREE_OPERAND (base, 0);
9573 if (offset == 0
9574 && host_integerp (TYPE_SIZE (type), 1)
9575 && (GET_MODE_BITSIZE (DECL_MODE (base))
9576 == TREE_INT_CST_LOW (TYPE_SIZE (type))))
9577 return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
9578 target, tmode, modifier);
9579 if (TYPE_MODE (type) == BLKmode)
9581 temp = assign_stack_temp (DECL_MODE (base),
9582 GET_MODE_SIZE (DECL_MODE (base)));
9583 store_expr (base, temp, 0, false);
9584 temp = adjust_address (temp, BLKmode, offset);
9585 set_mem_size (temp, int_size_in_bytes (type));
9586 return temp;
9588 exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
9589 bitsize_int (offset * BITS_PER_UNIT));
9590 return expand_expr (exp, target, tmode, modifier);
9592 address_mode = targetm.addr_space.address_mode (as);
9593 base = TREE_OPERAND (exp, 0);
9594 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9596 tree mask = gimple_assign_rhs2 (def_stmt);
9597 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9598 gimple_assign_rhs1 (def_stmt), mask);
9599 TREE_OPERAND (exp, 0) = base;
9601 align = get_object_alignment (exp);
9602 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9603 op0 = memory_address_addr_space (mode, op0, as);
9604 if (!integer_zerop (TREE_OPERAND (exp, 1)))
9606 rtx off
9607 = immed_double_int_const (mem_ref_offset (exp), address_mode);
9608 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9609 op0 = memory_address_addr_space (mode, op0, as);
9611 temp = gen_rtx_MEM (mode, op0);
9612 set_mem_attributes (temp, exp, 0);
9613 set_mem_addr_space (temp, as);
9614 if (TREE_THIS_VOLATILE (exp))
9615 MEM_VOLATILE_P (temp) = 1;
9616 if (modifier != EXPAND_WRITE
9617 && modifier != EXPAND_MEMORY
9618 && mode != BLKmode
9619 && align < GET_MODE_ALIGNMENT (mode))
9621 if ((icode = optab_handler (movmisalign_optab, mode))
9622 != CODE_FOR_nothing)
9624 struct expand_operand ops[2];
9626 /* We've already validated the memory, and we're creating a
9627 new pseudo destination. The predicates really can't fail,
9628 nor can the generator. */
9629 create_output_operand (&ops[0], NULL_RTX, mode);
9630 create_fixed_operand (&ops[1], temp);
9631 expand_insn (icode, 2, ops);
9632 temp = ops[0].value;
9634 else if (SLOW_UNALIGNED_ACCESS (mode, align))
9635 temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9636 0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9637 (modifier == EXPAND_STACK_PARM
9638 ? NULL_RTX : target),
9639 mode, mode);
9641 return temp;
9644 case ARRAY_REF:
9647 tree array = treeop0;
9648 tree index = treeop1;
9649 tree init;
9651 /* Fold an expression like: "foo"[2].
9652 This is not done in fold so it won't happen inside &.
9653 Don't fold if this is for wide characters since it's too
9654 difficult to do correctly and this is a very rare case. */
9656 if (modifier != EXPAND_CONST_ADDRESS
9657 && modifier != EXPAND_INITIALIZER
9658 && modifier != EXPAND_MEMORY)
9660 tree t = fold_read_from_constant_string (exp);
9662 if (t)
9663 return expand_expr (t, target, tmode, modifier);
9666 /* If this is a constant index into a constant array,
9667 just get the value from the array. Handle both the cases when
9668 we have an explicit constructor and when our operand is a variable
9669 that was declared const. */
9671 if (modifier != EXPAND_CONST_ADDRESS
9672 && modifier != EXPAND_INITIALIZER
9673 && modifier != EXPAND_MEMORY
9674 && TREE_CODE (array) == CONSTRUCTOR
9675 && ! TREE_SIDE_EFFECTS (array)
9676 && TREE_CODE (index) == INTEGER_CST)
9678 unsigned HOST_WIDE_INT ix;
9679 tree field, value;
9681 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9682 field, value)
9683 if (tree_int_cst_equal (field, index))
9685 if (!TREE_SIDE_EFFECTS (value))
9686 return expand_expr (fold (value), target, tmode, modifier);
9687 break;
9691 else if (optimize >= 1
9692 && modifier != EXPAND_CONST_ADDRESS
9693 && modifier != EXPAND_INITIALIZER
9694 && modifier != EXPAND_MEMORY
9695 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9696 && TREE_CODE (index) == INTEGER_CST
9697 && (TREE_CODE (array) == VAR_DECL
9698 || TREE_CODE (array) == CONST_DECL)
9699 && (init = ctor_for_folding (array)) != error_mark_node)
9701 if (TREE_CODE (init) == CONSTRUCTOR)
9703 unsigned HOST_WIDE_INT ix;
9704 tree field, value;
9706 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
9707 field, value)
9708 if (tree_int_cst_equal (field, index))
9710 if (TREE_SIDE_EFFECTS (value))
9711 break;
9713 if (TREE_CODE (value) == CONSTRUCTOR)
9715 /* If VALUE is a CONSTRUCTOR, this
9716 optimization is only useful if
9717 this doesn't store the CONSTRUCTOR
9718 into memory. If it does, it is more
9719 efficient to just load the data from
9720 the array directly. */
9721 rtx ret = expand_constructor (value, target,
9722 modifier, true);
9723 if (ret == NULL_RTX)
9724 break;
9727 return
9728 expand_expr (fold (value), target, tmode, modifier);
9731 else if (TREE_CODE (init) == STRING_CST)
9733 tree low_bound = array_ref_low_bound (exp);
9734 tree index1 = fold_convert_loc (loc, sizetype, treeop1);
9736 /* Optimize the special case of a zero lower bound.
9738 We convert the lower bound to sizetype to avoid problems
9739 with constant folding. E.g. suppose the lower bound is
9740 1 and its mode is QI. Without the conversion
9741 (ARRAY + (INDEX - (unsigned char)1))
9742 becomes
9743 (ARRAY + (-(unsigned char)1) + INDEX)
9744 which becomes
9745 (ARRAY + 255 + INDEX). Oops! */
9746 if (!integer_zerop (low_bound))
9747 index1 = size_diffop_loc (loc, index1,
9748 fold_convert_loc (loc, sizetype,
9749 low_bound));
9751 if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
9753 tree type = TREE_TYPE (TREE_TYPE (init));
9754 enum machine_mode mode = TYPE_MODE (type);
9756 if (GET_MODE_CLASS (mode) == MODE_INT
9757 && GET_MODE_SIZE (mode) == 1)
9758 return gen_int_mode (TREE_STRING_POINTER (init)
9759 [TREE_INT_CST_LOW (index1)],
9760 mode);
9765 goto normal_inner_ref;
9767 case COMPONENT_REF:
9768 /* If the operand is a CONSTRUCTOR, we can just extract the
9769 appropriate field if it is present. */
9770 if (TREE_CODE (treeop0) == CONSTRUCTOR)
9772 unsigned HOST_WIDE_INT idx;
9773 tree field, value;
9775 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
9776 idx, field, value)
9777 if (field == treeop1
9778 /* We can normally use the value of the field in the
9779 CONSTRUCTOR. However, if this is a bitfield in
9780 an integral mode that we can fit in a HOST_WIDE_INT,
9781 we must mask only the number of bits in the bitfield,
9782 since this is done implicitly by the constructor. If
9783 the bitfield does not meet either of those conditions,
9784 we can't do this optimization. */
9785 && (! DECL_BIT_FIELD (field)
9786 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
9787 && (GET_MODE_PRECISION (DECL_MODE (field))
9788 <= HOST_BITS_PER_WIDE_INT))))
9790 if (DECL_BIT_FIELD (field)
9791 && modifier == EXPAND_STACK_PARM)
9792 target = 0;
9793 op0 = expand_expr (value, target, tmode, modifier);
9794 if (DECL_BIT_FIELD (field))
9796 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
9797 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
9799 if (TYPE_UNSIGNED (TREE_TYPE (field)))
9801 op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
9802 imode);
9803 op0 = expand_and (imode, op0, op1, target);
9805 else
9807 int count = GET_MODE_PRECISION (imode) - bitsize;
9809 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
9810 target, 0);
9811 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
9812 target, 0);
9816 return op0;
9819 goto normal_inner_ref;
9821 case BIT_FIELD_REF:
9822 case ARRAY_RANGE_REF:
9823 normal_inner_ref:
9825 enum machine_mode mode1, mode2;
9826 HOST_WIDE_INT bitsize, bitpos;
9827 tree offset;
9828 int volatilep = 0, must_force_mem;
9829 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9830 &mode1, &unsignedp, &volatilep, true);
9831 rtx orig_op0, memloc;
9832 bool mem_attrs_from_type = false;
9834 /* If we got back the original object, something is wrong. Perhaps
9835 we are evaluating an expression too early. In any event, don't
9836 infinitely recurse. */
9837 gcc_assert (tem != exp);
9839 /* If TEM's type is a union of variable size, pass TARGET to the inner
9840 computation, since it will need a temporary and TARGET is known
9841 to have to do. This occurs in unchecked conversion in Ada. */
9842 orig_op0 = op0
9843 = expand_expr (tem,
9844 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9845 && COMPLETE_TYPE_P (TREE_TYPE (tem))
9846 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9847 != INTEGER_CST)
9848 && modifier != EXPAND_STACK_PARM
9849 ? target : NULL_RTX),
9850 VOIDmode,
9851 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
9853 /* If the bitfield is volatile, we want to access it in the
9854 field's mode, not the computed mode.
9855 If a MEM has VOIDmode (external with incomplete type),
9856 use BLKmode for it instead. */
9857 if (MEM_P (op0))
9859 if (volatilep && flag_strict_volatile_bitfields > 0)
9860 op0 = adjust_address (op0, mode1, 0);
9861 else if (GET_MODE (op0) == VOIDmode)
9862 op0 = adjust_address (op0, BLKmode, 0);
9865 mode2
9866 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
9868 /* If we have either an offset, a BLKmode result, or a reference
9869 outside the underlying object, we must force it to memory.
9870 Such a case can occur in Ada if we have unchecked conversion
9871 of an expression from a scalar type to an aggregate type or
9872 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
9873 passed a partially uninitialized object or a view-conversion
9874 to a larger size. */
9875 must_force_mem = (offset
9876 || mode1 == BLKmode
9877 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
9879 /* Handle CONCAT first. */
9880 if (GET_CODE (op0) == CONCAT && !must_force_mem)
9882 if (bitpos == 0
9883 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
9884 return op0;
9885 if (bitpos == 0
9886 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9887 && bitsize)
9889 op0 = XEXP (op0, 0);
9890 mode2 = GET_MODE (op0);
9892 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9893 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
9894 && bitpos
9895 && bitsize)
9897 op0 = XEXP (op0, 1);
9898 bitpos = 0;
9899 mode2 = GET_MODE (op0);
9901 else
9902 /* Otherwise force into memory. */
9903 must_force_mem = 1;
9906 /* If this is a constant, put it in a register if it is a legitimate
9907 constant and we don't need a memory reference. */
9908 if (CONSTANT_P (op0)
9909 && mode2 != BLKmode
9910 && targetm.legitimate_constant_p (mode2, op0)
9911 && !must_force_mem)
9912 op0 = force_reg (mode2, op0);
9914 /* Otherwise, if this is a constant, try to force it to the constant
9915 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
9916 is a legitimate constant. */
9917 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
9918 op0 = validize_mem (memloc);
9920 /* Otherwise, if this is a constant or the object is not in memory
9921 and need be, put it there. */
9922 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
9924 tree nt = build_qualified_type (TREE_TYPE (tem),
9925 (TYPE_QUALS (TREE_TYPE (tem))
9926 | TYPE_QUAL_CONST));
9927 memloc = assign_temp (nt, 1, 1);
9928 emit_move_insn (memloc, op0);
9929 op0 = memloc;
9930 mem_attrs_from_type = true;
9933 if (offset)
9935 enum machine_mode address_mode;
9936 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
9937 EXPAND_SUM);
9939 gcc_assert (MEM_P (op0));
9941 address_mode = get_address_mode (op0);
9942 if (GET_MODE (offset_rtx) != address_mode)
9943 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
9945 if (GET_MODE (op0) == BLKmode
9946 /* A constant address in OP0 can have VOIDmode, we must
9947 not try to call force_reg in that case. */
9948 && GET_MODE (XEXP (op0, 0)) != VOIDmode
9949 && bitsize != 0
9950 && (bitpos % bitsize) == 0
9951 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
9952 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
9954 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9955 bitpos = 0;
9958 op0 = offset_address (op0, offset_rtx,
9959 highest_pow2_factor (offset));
9962 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
9963 record its alignment as BIGGEST_ALIGNMENT. */
9964 if (MEM_P (op0) && bitpos == 0 && offset != 0
9965 && is_aligning_offset (offset, tem))
9966 set_mem_align (op0, BIGGEST_ALIGNMENT);
9968 /* Don't forget about volatility even if this is a bitfield. */
9969 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
9971 if (op0 == orig_op0)
9972 op0 = copy_rtx (op0);
9974 MEM_VOLATILE_P (op0) = 1;
9977 /* In cases where an aligned union has an unaligned object
9978 as a field, we might be extracting a BLKmode value from
9979 an integer-mode (e.g., SImode) object. Handle this case
9980 by doing the extract into an object as wide as the field
9981 (which we know to be the width of a basic mode), then
9982 storing into memory, and changing the mode to BLKmode. */
9983 if (mode1 == VOIDmode
9984 || REG_P (op0) || GET_CODE (op0) == SUBREG
9985 || (mode1 != BLKmode && ! direct_load[(int) mode1]
9986 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
9987 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
9988 && modifier != EXPAND_CONST_ADDRESS
9989 && modifier != EXPAND_INITIALIZER
9990 && modifier != EXPAND_MEMORY)
9991 /* If the field is volatile, we always want an aligned
9992 access. Do this in following two situations:
9993 1. the access is not already naturally
9994 aligned, otherwise "normal" (non-bitfield) volatile fields
9995 become non-addressable.
9996 2. the bitsize is narrower than the access size. Need
9997 to extract bitfields from the access. */
9998 || (volatilep && flag_strict_volatile_bitfields > 0
9999 && (bitpos % GET_MODE_ALIGNMENT (mode) != 0
10000 || (mode1 != BLKmode
10001 && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)))
10002 /* If the field isn't aligned enough to fetch as a memref,
10003 fetch it as a bit field. */
10004 || (mode1 != BLKmode
10005 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10006 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10007 || (MEM_P (op0)
10008 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10009 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10010 && modifier != EXPAND_MEMORY
10011 && ((modifier == EXPAND_CONST_ADDRESS
10012 || modifier == EXPAND_INITIALIZER)
10013 ? STRICT_ALIGNMENT
10014 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10015 || (bitpos % BITS_PER_UNIT != 0)))
10016 /* If the type and the field are a constant size and the
10017 size of the type isn't the same size as the bitfield,
10018 we must use bitfield operations. */
10019 || (bitsize >= 0
10020 && TYPE_SIZE (TREE_TYPE (exp))
10021 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10022 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10023 bitsize)))
10025 enum machine_mode ext_mode = mode;
10027 if (ext_mode == BLKmode
10028 && ! (target != 0 && MEM_P (op0)
10029 && MEM_P (target)
10030 && bitpos % BITS_PER_UNIT == 0))
10031 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10033 if (ext_mode == BLKmode)
10035 if (target == 0)
10036 target = assign_temp (type, 1, 1);
10038 if (bitsize == 0)
10039 return target;
10041 /* In this case, BITPOS must start at a byte boundary and
10042 TARGET, if specified, must be a MEM. */
10043 gcc_assert (MEM_P (op0)
10044 && (!target || MEM_P (target))
10045 && !(bitpos % BITS_PER_UNIT));
10047 emit_block_move (target,
10048 adjust_address (op0, VOIDmode,
10049 bitpos / BITS_PER_UNIT),
10050 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10051 / BITS_PER_UNIT),
10052 (modifier == EXPAND_STACK_PARM
10053 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10055 return target;
10058 op0 = validize_mem (op0);
10060 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10061 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10063 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10064 (modifier == EXPAND_STACK_PARM
10065 ? NULL_RTX : target),
10066 ext_mode, ext_mode);
10068 /* If the result is a record type and BITSIZE is narrower than
10069 the mode of OP0, an integral mode, and this is a big endian
10070 machine, we must put the field into the high-order bits. */
10071 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10072 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10073 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10074 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10075 GET_MODE_BITSIZE (GET_MODE (op0))
10076 - bitsize, op0, 1);
10078 /* If the result type is BLKmode, store the data into a temporary
10079 of the appropriate type, but with the mode corresponding to the
10080 mode for the data we have (op0's mode). It's tempting to make
10081 this a constant type, since we know it's only being stored once,
10082 but that can cause problems if we are taking the address of this
10083 COMPONENT_REF because the MEM of any reference via that address
10084 will have flags corresponding to the type, which will not
10085 necessarily be constant. */
10086 if (mode == BLKmode)
10088 rtx new_rtx;
10090 new_rtx = assign_stack_temp_for_type (ext_mode,
10091 GET_MODE_BITSIZE (ext_mode),
10092 type);
10093 emit_move_insn (new_rtx, op0);
10094 op0 = copy_rtx (new_rtx);
10095 PUT_MODE (op0, BLKmode);
10098 return op0;
10101 /* If the result is BLKmode, use that to access the object
10102 now as well. */
10103 if (mode == BLKmode)
10104 mode1 = BLKmode;
10106 /* Get a reference to just this component. */
10107 if (modifier == EXPAND_CONST_ADDRESS
10108 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10109 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10110 else
10111 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10113 if (op0 == orig_op0)
10114 op0 = copy_rtx (op0);
10116 /* If op0 is a temporary because of forcing to memory, pass only the
10117 type to set_mem_attributes so that the original expression is never
10118 marked as ADDRESSABLE through MEM_EXPR of the temporary. */
10119 if (mem_attrs_from_type)
10120 set_mem_attributes (op0, type, 0);
10121 else
10122 set_mem_attributes (op0, exp, 0);
10124 if (REG_P (XEXP (op0, 0)))
10125 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10127 MEM_VOLATILE_P (op0) |= volatilep;
10128 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10129 || modifier == EXPAND_CONST_ADDRESS
10130 || modifier == EXPAND_INITIALIZER)
10131 return op0;
10133 if (target == 0)
10134 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10136 convert_move (target, op0, unsignedp);
10137 return target;
10140 case OBJ_TYPE_REF:
10141 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10143 case CALL_EXPR:
10144 /* All valid uses of __builtin_va_arg_pack () are removed during
10145 inlining. */
10146 if (CALL_EXPR_VA_ARG_PACK (exp))
10147 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10149 tree fndecl = get_callee_fndecl (exp), attr;
10151 if (fndecl
10152 && (attr = lookup_attribute ("error",
10153 DECL_ATTRIBUTES (fndecl))) != NULL)
10154 error ("%Kcall to %qs declared with attribute error: %s",
10155 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10156 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10157 if (fndecl
10158 && (attr = lookup_attribute ("warning",
10159 DECL_ATTRIBUTES (fndecl))) != NULL)
10160 warning_at (tree_nonartificial_location (exp),
10161 0, "%Kcall to %qs declared with attribute warning: %s",
10162 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10163 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10165 /* Check for a built-in function. */
10166 if (fndecl && DECL_BUILT_IN (fndecl))
10168 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10169 return expand_builtin (exp, target, subtarget, tmode, ignore);
10172 return expand_call (exp, target, ignore);
10174 case VIEW_CONVERT_EXPR:
10175 op0 = NULL_RTX;
10177 /* If we are converting to BLKmode, try to avoid an intermediate
10178 temporary by fetching an inner memory reference. */
10179 if (mode == BLKmode
10180 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10181 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10182 && handled_component_p (treeop0))
10184 enum machine_mode mode1;
10185 HOST_WIDE_INT bitsize, bitpos;
10186 tree offset;
10187 int unsignedp;
10188 int volatilep = 0;
10189 tree tem
10190 = get_inner_reference (treeop0, &bitsize, &bitpos,
10191 &offset, &mode1, &unsignedp, &volatilep,
10192 true);
10193 rtx orig_op0;
10195 /* ??? We should work harder and deal with non-zero offsets. */
10196 if (!offset
10197 && (bitpos % BITS_PER_UNIT) == 0
10198 && bitsize >= 0
10199 && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10201 /* See the normal_inner_ref case for the rationale. */
10202 orig_op0
10203 = expand_expr (tem,
10204 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10205 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10206 != INTEGER_CST)
10207 && modifier != EXPAND_STACK_PARM
10208 ? target : NULL_RTX),
10209 VOIDmode,
10210 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
10212 if (MEM_P (orig_op0))
10214 op0 = orig_op0;
10216 /* Get a reference to just this component. */
10217 if (modifier == EXPAND_CONST_ADDRESS
10218 || modifier == EXPAND_SUM
10219 || modifier == EXPAND_INITIALIZER)
10220 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10221 else
10222 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10224 if (op0 == orig_op0)
10225 op0 = copy_rtx (op0);
10227 set_mem_attributes (op0, treeop0, 0);
10228 if (REG_P (XEXP (op0, 0)))
10229 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10231 MEM_VOLATILE_P (op0) |= volatilep;
10236 if (!op0)
10237 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
10239 /* If the input and output modes are both the same, we are done. */
10240 if (mode == GET_MODE (op0))
10242 /* If neither mode is BLKmode, and both modes are the same size
10243 then we can use gen_lowpart. */
10244 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10245 && (GET_MODE_PRECISION (mode)
10246 == GET_MODE_PRECISION (GET_MODE (op0)))
10247 && !COMPLEX_MODE_P (GET_MODE (op0)))
10249 if (GET_CODE (op0) == SUBREG)
10250 op0 = force_reg (GET_MODE (op0), op0);
10251 temp = gen_lowpart_common (mode, op0);
10252 if (temp)
10253 op0 = temp;
10254 else
10256 if (!REG_P (op0) && !MEM_P (op0))
10257 op0 = force_reg (GET_MODE (op0), op0);
10258 op0 = gen_lowpart (mode, op0);
10261 /* If both types are integral, convert from one mode to the other. */
10262 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10263 op0 = convert_modes (mode, GET_MODE (op0), op0,
10264 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10265 /* As a last resort, spill op0 to memory, and reload it in a
10266 different mode. */
10267 else if (!MEM_P (op0))
10269 /* If the operand is not a MEM, force it into memory. Since we
10270 are going to be changing the mode of the MEM, don't call
10271 force_const_mem for constants because we don't allow pool
10272 constants to change mode. */
10273 tree inner_type = TREE_TYPE (treeop0);
10275 gcc_assert (!TREE_ADDRESSABLE (exp));
10277 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10278 target
10279 = assign_stack_temp_for_type
10280 (TYPE_MODE (inner_type),
10281 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10283 emit_move_insn (target, op0);
10284 op0 = target;
10287 /* At this point, OP0 is in the correct mode. If the output type is
10288 such that the operand is known to be aligned, indicate that it is.
10289 Otherwise, we need only be concerned about alignment for non-BLKmode
10290 results. */
10291 if (MEM_P (op0))
10293 enum insn_code icode;
10295 if (TYPE_ALIGN_OK (type))
10297 /* ??? Copying the MEM without substantially changing it might
10298 run afoul of the code handling volatile memory references in
10299 store_expr, which assumes that TARGET is returned unmodified
10300 if it has been used. */
10301 op0 = copy_rtx (op0);
10302 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10304 else if (mode != BLKmode
10305 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode)
10306 /* If the target does have special handling for unaligned
10307 loads of mode then use them. */
10308 && ((icode = optab_handler (movmisalign_optab, mode))
10309 != CODE_FOR_nothing))
10311 rtx reg, insn;
10313 op0 = adjust_address (op0, mode, 0);
10314 /* We've already validated the memory, and we're creating a
10315 new pseudo destination. The predicates really can't
10316 fail. */
10317 reg = gen_reg_rtx (mode);
10319 /* Nor can the insn generator. */
10320 insn = GEN_FCN (icode) (reg, op0);
10321 emit_insn (insn);
10322 return reg;
10324 else if (STRICT_ALIGNMENT
10325 && mode != BLKmode
10326 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10328 tree inner_type = TREE_TYPE (treeop0);
10329 HOST_WIDE_INT temp_size
10330 = MAX (int_size_in_bytes (inner_type),
10331 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10332 rtx new_rtx
10333 = assign_stack_temp_for_type (mode, temp_size, type);
10334 rtx new_with_op0_mode
10335 = adjust_address (new_rtx, GET_MODE (op0), 0);
10337 gcc_assert (!TREE_ADDRESSABLE (exp));
10339 if (GET_MODE (op0) == BLKmode)
10340 emit_block_move (new_with_op0_mode, op0,
10341 GEN_INT (GET_MODE_SIZE (mode)),
10342 (modifier == EXPAND_STACK_PARM
10343 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10344 else
10345 emit_move_insn (new_with_op0_mode, op0);
10347 op0 = new_rtx;
10350 op0 = adjust_address (op0, mode, 0);
10353 return op0;
10355 case MODIFY_EXPR:
10357 tree lhs = treeop0;
10358 tree rhs = treeop1;
10359 gcc_assert (ignore);
10361 /* Check for |= or &= of a bitfield of size one into another bitfield
10362 of size 1. In this case, (unless we need the result of the
10363 assignment) we can do this more efficiently with a
10364 test followed by an assignment, if necessary.
10366 ??? At this point, we can't get a BIT_FIELD_REF here. But if
10367 things change so we do, this code should be enhanced to
10368 support it. */
10369 if (TREE_CODE (lhs) == COMPONENT_REF
10370 && (TREE_CODE (rhs) == BIT_IOR_EXPR
10371 || TREE_CODE (rhs) == BIT_AND_EXPR)
10372 && TREE_OPERAND (rhs, 0) == lhs
10373 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10374 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10375 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10377 rtx label = gen_label_rtx ();
10378 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10379 do_jump (TREE_OPERAND (rhs, 1),
10380 value ? label : 0,
10381 value ? 0 : label, -1);
10382 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10383 false);
10384 do_pending_stack_adjust ();
10385 emit_label (label);
10386 return const0_rtx;
10389 expand_assignment (lhs, rhs, false);
10390 return const0_rtx;
10393 case ADDR_EXPR:
10394 return expand_expr_addr_expr (exp, target, tmode, modifier);
10396 case REALPART_EXPR:
10397 op0 = expand_normal (treeop0);
10398 return read_complex_part (op0, false);
10400 case IMAGPART_EXPR:
10401 op0 = expand_normal (treeop0);
10402 return read_complex_part (op0, true);
10404 case RETURN_EXPR:
10405 case LABEL_EXPR:
10406 case GOTO_EXPR:
10407 case SWITCH_EXPR:
10408 case ASM_EXPR:
10409 /* Expanded in cfgexpand.c. */
10410 gcc_unreachable ();
10412 case TRY_CATCH_EXPR:
10413 case CATCH_EXPR:
10414 case EH_FILTER_EXPR:
10415 case TRY_FINALLY_EXPR:
10416 /* Lowered by tree-eh.c. */
10417 gcc_unreachable ();
10419 case WITH_CLEANUP_EXPR:
10420 case CLEANUP_POINT_EXPR:
10421 case TARGET_EXPR:
10422 case CASE_LABEL_EXPR:
10423 case VA_ARG_EXPR:
10424 case BIND_EXPR:
10425 case INIT_EXPR:
10426 case CONJ_EXPR:
10427 case COMPOUND_EXPR:
10428 case PREINCREMENT_EXPR:
10429 case PREDECREMENT_EXPR:
10430 case POSTINCREMENT_EXPR:
10431 case POSTDECREMENT_EXPR:
10432 case LOOP_EXPR:
10433 case EXIT_EXPR:
10434 case COMPOUND_LITERAL_EXPR:
10435 /* Lowered by gimplify.c. */
10436 gcc_unreachable ();
10438 case FDESC_EXPR:
10439 /* Function descriptors are not valid except for as
10440 initialization constants, and should not be expanded. */
10441 gcc_unreachable ();
10443 case WITH_SIZE_EXPR:
10444 /* WITH_SIZE_EXPR expands to its first argument. The caller should
10445 have pulled out the size to use in whatever context it needed. */
10446 return expand_expr_real (treeop0, original_target, tmode,
10447 modifier, alt_rtl);
10449 default:
10450 return expand_expr_real_2 (&ops, target, tmode, modifier);
10454 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10455 signedness of TYPE), possibly returning the result in TARGET. */
10456 static rtx
10457 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10459 HOST_WIDE_INT prec = TYPE_PRECISION (type);
10460 if (target && GET_MODE (target) != GET_MODE (exp))
10461 target = 0;
10462 /* For constant values, reduce using build_int_cst_type. */
10463 if (CONST_INT_P (exp))
10465 HOST_WIDE_INT value = INTVAL (exp);
10466 tree t = build_int_cst_type (type, value);
10467 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10469 else if (TYPE_UNSIGNED (type))
10471 rtx mask = immed_double_int_const (double_int::mask (prec),
10472 GET_MODE (exp));
10473 return expand_and (GET_MODE (exp), exp, mask, target);
10475 else
10477 int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10478 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10479 exp, count, target, 0);
10480 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10481 exp, count, target, 0);
10485 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10486 when applied to the address of EXP produces an address known to be
10487 aligned more than BIGGEST_ALIGNMENT. */
10489 static int
10490 is_aligning_offset (const_tree offset, const_tree exp)
10492 /* Strip off any conversions. */
10493 while (CONVERT_EXPR_P (offset))
10494 offset = TREE_OPERAND (offset, 0);
10496 /* We must now have a BIT_AND_EXPR with a constant that is one less than
10497 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
10498 if (TREE_CODE (offset) != BIT_AND_EXPR
10499 || !host_integerp (TREE_OPERAND (offset, 1), 1)
10500 || compare_tree_int (TREE_OPERAND (offset, 1),
10501 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10502 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
10503 return 0;
10505 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10506 It must be NEGATE_EXPR. Then strip any more conversions. */
10507 offset = TREE_OPERAND (offset, 0);
10508 while (CONVERT_EXPR_P (offset))
10509 offset = TREE_OPERAND (offset, 0);
10511 if (TREE_CODE (offset) != NEGATE_EXPR)
10512 return 0;
10514 offset = TREE_OPERAND (offset, 0);
10515 while (CONVERT_EXPR_P (offset))
10516 offset = TREE_OPERAND (offset, 0);
10518 /* This must now be the address of EXP. */
10519 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10522 /* Return the tree node if an ARG corresponds to a string constant or zero
10523 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
10524 in bytes within the string that ARG is accessing. The type of the
10525 offset will be `sizetype'. */
10527 tree
10528 string_constant (tree arg, tree *ptr_offset)
10530 tree array, offset, lower_bound;
10531 STRIP_NOPS (arg);
10533 if (TREE_CODE (arg) == ADDR_EXPR)
10535 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10537 *ptr_offset = size_zero_node;
10538 return TREE_OPERAND (arg, 0);
10540 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10542 array = TREE_OPERAND (arg, 0);
10543 offset = size_zero_node;
10545 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10547 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10548 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10549 if (TREE_CODE (array) != STRING_CST
10550 && TREE_CODE (array) != VAR_DECL)
10551 return 0;
10553 /* Check if the array has a nonzero lower bound. */
10554 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10555 if (!integer_zerop (lower_bound))
10557 /* If the offset and base aren't both constants, return 0. */
10558 if (TREE_CODE (lower_bound) != INTEGER_CST)
10559 return 0;
10560 if (TREE_CODE (offset) != INTEGER_CST)
10561 return 0;
10562 /* Adjust offset by the lower bound. */
10563 offset = size_diffop (fold_convert (sizetype, offset),
10564 fold_convert (sizetype, lower_bound));
10567 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10569 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10570 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10571 if (TREE_CODE (array) != ADDR_EXPR)
10572 return 0;
10573 array = TREE_OPERAND (array, 0);
10574 if (TREE_CODE (array) != STRING_CST
10575 && TREE_CODE (array) != VAR_DECL)
10576 return 0;
10578 else
10579 return 0;
10581 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10583 tree arg0 = TREE_OPERAND (arg, 0);
10584 tree arg1 = TREE_OPERAND (arg, 1);
10586 STRIP_NOPS (arg0);
10587 STRIP_NOPS (arg1);
10589 if (TREE_CODE (arg0) == ADDR_EXPR
10590 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10591 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10593 array = TREE_OPERAND (arg0, 0);
10594 offset = arg1;
10596 else if (TREE_CODE (arg1) == ADDR_EXPR
10597 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10598 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10600 array = TREE_OPERAND (arg1, 0);
10601 offset = arg0;
10603 else
10604 return 0;
10606 else
10607 return 0;
10609 if (TREE_CODE (array) == STRING_CST)
10611 *ptr_offset = fold_convert (sizetype, offset);
10612 return array;
10614 else if (TREE_CODE (array) == VAR_DECL
10615 || TREE_CODE (array) == CONST_DECL)
10617 int length;
10618 tree init = ctor_for_folding (array);
10620 /* Variables initialized to string literals can be handled too. */
10621 if (init == error_mark_node
10622 || !init
10623 || TREE_CODE (init) != STRING_CST)
10624 return 0;
10626 /* Avoid const char foo[4] = "abcde"; */
10627 if (DECL_SIZE_UNIT (array) == NULL_TREE
10628 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10629 || (length = TREE_STRING_LENGTH (init)) <= 0
10630 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10631 return 0;
10633 /* If variable is bigger than the string literal, OFFSET must be constant
10634 and inside of the bounds of the string literal. */
10635 offset = fold_convert (sizetype, offset);
10636 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10637 && (! host_integerp (offset, 1)
10638 || compare_tree_int (offset, length) >= 0))
10639 return 0;
10641 *ptr_offset = offset;
10642 return init;
10645 return 0;
10648 /* Generate code to calculate OPS, and exploded expression
10649 using a store-flag instruction and return an rtx for the result.
10650 OPS reflects a comparison.
10652 If TARGET is nonzero, store the result there if convenient.
10654 Return zero if there is no suitable set-flag instruction
10655 available on this machine.
10657 Once expand_expr has been called on the arguments of the comparison,
10658 we are committed to doing the store flag, since it is not safe to
10659 re-evaluate the expression. We emit the store-flag insn by calling
10660 emit_store_flag, but only expand the arguments if we have a reason
10661 to believe that emit_store_flag will be successful. If we think that
10662 it will, but it isn't, we have to simulate the store-flag with a
10663 set/jump/set sequence. */
10665 static rtx
10666 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
10668 enum rtx_code code;
10669 tree arg0, arg1, type;
10670 tree tem;
10671 enum machine_mode operand_mode;
10672 int unsignedp;
10673 rtx op0, op1;
10674 rtx subtarget = target;
10675 location_t loc = ops->location;
10677 arg0 = ops->op0;
10678 arg1 = ops->op1;
10680 /* Don't crash if the comparison was erroneous. */
10681 if (arg0 == error_mark_node || arg1 == error_mark_node)
10682 return const0_rtx;
10684 type = TREE_TYPE (arg0);
10685 operand_mode = TYPE_MODE (type);
10686 unsignedp = TYPE_UNSIGNED (type);
10688 /* We won't bother with BLKmode store-flag operations because it would mean
10689 passing a lot of information to emit_store_flag. */
10690 if (operand_mode == BLKmode)
10691 return 0;
10693 /* We won't bother with store-flag operations involving function pointers
10694 when function pointers must be canonicalized before comparisons. */
10695 #ifdef HAVE_canonicalize_funcptr_for_compare
10696 if (HAVE_canonicalize_funcptr_for_compare
10697 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
10698 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
10699 == FUNCTION_TYPE))
10700 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
10701 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
10702 == FUNCTION_TYPE))))
10703 return 0;
10704 #endif
10706 STRIP_NOPS (arg0);
10707 STRIP_NOPS (arg1);
10709 /* For vector typed comparisons emit code to generate the desired
10710 all-ones or all-zeros mask. Conveniently use the VEC_COND_EXPR
10711 expander for this. */
10712 if (TREE_CODE (ops->type) == VECTOR_TYPE)
10714 tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10715 tree if_true = constant_boolean_node (true, ops->type);
10716 tree if_false = constant_boolean_node (false, ops->type);
10717 return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10720 /* Get the rtx comparison code to use. We know that EXP is a comparison
10721 operation of some type. Some comparisons against 1 and -1 can be
10722 converted to comparisons with zero. Do so here so that the tests
10723 below will be aware that we have a comparison with zero. These
10724 tests will not catch constants in the first operand, but constants
10725 are rarely passed as the first operand. */
10727 switch (ops->code)
10729 case EQ_EXPR:
10730 code = EQ;
10731 break;
10732 case NE_EXPR:
10733 code = NE;
10734 break;
10735 case LT_EXPR:
10736 if (integer_onep (arg1))
10737 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10738 else
10739 code = unsignedp ? LTU : LT;
10740 break;
10741 case LE_EXPR:
10742 if (! unsignedp && integer_all_onesp (arg1))
10743 arg1 = integer_zero_node, code = LT;
10744 else
10745 code = unsignedp ? LEU : LE;
10746 break;
10747 case GT_EXPR:
10748 if (! unsignedp && integer_all_onesp (arg1))
10749 arg1 = integer_zero_node, code = GE;
10750 else
10751 code = unsignedp ? GTU : GT;
10752 break;
10753 case GE_EXPR:
10754 if (integer_onep (arg1))
10755 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10756 else
10757 code = unsignedp ? GEU : GE;
10758 break;
10760 case UNORDERED_EXPR:
10761 code = UNORDERED;
10762 break;
10763 case ORDERED_EXPR:
10764 code = ORDERED;
10765 break;
10766 case UNLT_EXPR:
10767 code = UNLT;
10768 break;
10769 case UNLE_EXPR:
10770 code = UNLE;
10771 break;
10772 case UNGT_EXPR:
10773 code = UNGT;
10774 break;
10775 case UNGE_EXPR:
10776 code = UNGE;
10777 break;
10778 case UNEQ_EXPR:
10779 code = UNEQ;
10780 break;
10781 case LTGT_EXPR:
10782 code = LTGT;
10783 break;
10785 default:
10786 gcc_unreachable ();
10789 /* Put a constant second. */
10790 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
10791 || TREE_CODE (arg0) == FIXED_CST)
10793 tem = arg0; arg0 = arg1; arg1 = tem;
10794 code = swap_condition (code);
10797 /* If this is an equality or inequality test of a single bit, we can
10798 do this by shifting the bit being tested to the low-order bit and
10799 masking the result with the constant 1. If the condition was EQ,
10800 we xor it with 1. This does not require an scc insn and is faster
10801 than an scc insn even if we have it.
10803 The code to make this transformation was moved into fold_single_bit_test,
10804 so we just call into the folder and expand its result. */
10806 if ((code == NE || code == EQ)
10807 && integer_zerop (arg1)
10808 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
10810 gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
10811 if (srcstmt
10812 && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
10814 enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
10815 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
10816 tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
10817 gimple_assign_rhs1 (srcstmt),
10818 gimple_assign_rhs2 (srcstmt));
10819 temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
10820 if (temp)
10821 return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
10825 if (! get_subtarget (target)
10826 || GET_MODE (subtarget) != operand_mode)
10827 subtarget = 0;
10829 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
10831 if (target == 0)
10832 target = gen_reg_rtx (mode);
10834 /* Try a cstore if possible. */
10835 return emit_store_flag_force (target, code, op0, op1,
10836 operand_mode, unsignedp,
10837 (TYPE_PRECISION (ops->type) == 1
10838 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
10842 /* Stubs in case we haven't got a casesi insn. */
10843 #ifndef HAVE_casesi
10844 # define HAVE_casesi 0
10845 # define gen_casesi(a, b, c, d, e) (0)
10846 # define CODE_FOR_casesi CODE_FOR_nothing
10847 #endif
10849 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10850 0 otherwise (i.e. if there is no casesi instruction).
10852 DEFAULT_PROBABILITY is the probability of jumping to the default
10853 label. */
10855 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10856 rtx table_label, rtx default_label, rtx fallback_label,
10857 int default_probability)
10859 struct expand_operand ops[5];
10860 enum machine_mode index_mode = SImode;
10861 rtx op1, op2, index;
10863 if (! HAVE_casesi)
10864 return 0;
10866 /* Convert the index to SImode. */
10867 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10869 enum machine_mode omode = TYPE_MODE (index_type);
10870 rtx rangertx = expand_normal (range);
10872 /* We must handle the endpoints in the original mode. */
10873 index_expr = build2 (MINUS_EXPR, index_type,
10874 index_expr, minval);
10875 minval = integer_zero_node;
10876 index = expand_normal (index_expr);
10877 if (default_label)
10878 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10879 omode, 1, default_label,
10880 default_probability);
10881 /* Now we can safely truncate. */
10882 index = convert_to_mode (index_mode, index, 0);
10884 else
10886 if (TYPE_MODE (index_type) != index_mode)
10888 index_type = lang_hooks.types.type_for_mode (index_mode, 0);
10889 index_expr = fold_convert (index_type, index_expr);
10892 index = expand_normal (index_expr);
10895 do_pending_stack_adjust ();
10897 op1 = expand_normal (minval);
10898 op2 = expand_normal (range);
10900 create_input_operand (&ops[0], index, index_mode);
10901 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
10902 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
10903 create_fixed_operand (&ops[3], table_label);
10904 create_fixed_operand (&ops[4], (default_label
10905 ? default_label
10906 : fallback_label));
10907 expand_jump_insn (CODE_FOR_casesi, 5, ops);
10908 return 1;
10911 /* Attempt to generate a tablejump instruction; same concept. */
10912 #ifndef HAVE_tablejump
10913 #define HAVE_tablejump 0
10914 #define gen_tablejump(x, y) (0)
10915 #endif
10917 /* Subroutine of the next function.
10919 INDEX is the value being switched on, with the lowest value
10920 in the table already subtracted.
10921 MODE is its expected mode (needed if INDEX is constant).
10922 RANGE is the length of the jump table.
10923 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10925 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10926 index value is out of range.
10927 DEFAULT_PROBABILITY is the probability of jumping to
10928 the default label. */
10930 static void
10931 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10932 rtx default_label, int default_probability)
10934 rtx temp, vector;
10936 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
10937 cfun->cfg->max_jumptable_ents = INTVAL (range);
10939 /* Do an unsigned comparison (in the proper mode) between the index
10940 expression and the value which represents the length of the range.
10941 Since we just finished subtracting the lower bound of the range
10942 from the index expression, this comparison allows us to simultaneously
10943 check that the original index expression value is both greater than
10944 or equal to the minimum value of the range and less than or equal to
10945 the maximum value of the range. */
10947 if (default_label)
10948 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10949 default_label, default_probability);
10952 /* If index is in range, it must fit in Pmode.
10953 Convert to Pmode so we can index with it. */
10954 if (mode != Pmode)
10955 index = convert_to_mode (Pmode, index, 1);
10957 /* Don't let a MEM slip through, because then INDEX that comes
10958 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10959 and break_out_memory_refs will go to work on it and mess it up. */
10960 #ifdef PIC_CASE_VECTOR_ADDRESS
10961 if (flag_pic && !REG_P (index))
10962 index = copy_to_mode_reg (Pmode, index);
10963 #endif
10965 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10966 GET_MODE_SIZE, because this indicates how large insns are. The other
10967 uses should all be Pmode, because they are addresses. This code
10968 could fail if addresses and insns are not the same size. */
10969 index = gen_rtx_PLUS
10970 (Pmode,
10971 gen_rtx_MULT (Pmode, index,
10972 gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE), Pmode)),
10973 gen_rtx_LABEL_REF (Pmode, table_label));
10974 #ifdef PIC_CASE_VECTOR_ADDRESS
10975 if (flag_pic)
10976 index = PIC_CASE_VECTOR_ADDRESS (index);
10977 else
10978 #endif
10979 index = memory_address (CASE_VECTOR_MODE, index);
10980 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10981 vector = gen_const_mem (CASE_VECTOR_MODE, index);
10982 convert_move (temp, vector, 0);
10984 emit_jump_insn (gen_tablejump (temp, table_label));
10986 /* If we are generating PIC code or if the table is PC-relative, the
10987 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10988 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10989 emit_barrier ();
10993 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10994 rtx table_label, rtx default_label, int default_probability)
10996 rtx index;
10998 if (! HAVE_tablejump)
10999 return 0;
11001 index_expr = fold_build2 (MINUS_EXPR, index_type,
11002 fold_convert (index_type, index_expr),
11003 fold_convert (index_type, minval));
11004 index = expand_normal (index_expr);
11005 do_pending_stack_adjust ();
11007 do_tablejump (index, TYPE_MODE (index_type),
11008 convert_modes (TYPE_MODE (index_type),
11009 TYPE_MODE (TREE_TYPE (range)),
11010 expand_normal (range),
11011 TYPE_UNSIGNED (TREE_TYPE (range))),
11012 table_label, default_label, default_probability);
11013 return 1;
11016 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
11017 static rtx
11018 const_vector_from_tree (tree exp)
11020 rtvec v;
11021 unsigned i;
11022 int units;
11023 tree elt;
11024 enum machine_mode inner, mode;
11026 mode = TYPE_MODE (TREE_TYPE (exp));
11028 if (initializer_zerop (exp))
11029 return CONST0_RTX (mode);
11031 units = GET_MODE_NUNITS (mode);
11032 inner = GET_MODE_INNER (mode);
11034 v = rtvec_alloc (units);
11036 for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11038 elt = VECTOR_CST_ELT (exp, i);
11040 if (TREE_CODE (elt) == REAL_CST)
11041 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11042 inner);
11043 else if (TREE_CODE (elt) == FIXED_CST)
11044 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11045 inner);
11046 else
11047 RTVEC_ELT (v, i) = immed_double_int_const (tree_to_double_int (elt),
11048 inner);
11051 return gen_rtx_CONST_VECTOR (mode, v);
11054 /* Build a decl for a personality function given a language prefix. */
11056 tree
11057 build_personality_function (const char *lang)
11059 const char *unwind_and_version;
11060 tree decl, type;
11061 char *name;
11063 switch (targetm_common.except_unwind_info (&global_options))
11065 case UI_NONE:
11066 return NULL;
11067 case UI_SJLJ:
11068 unwind_and_version = "_sj0";
11069 break;
11070 case UI_DWARF2:
11071 case UI_TARGET:
11072 unwind_and_version = "_v0";
11073 break;
11074 case UI_SEH:
11075 unwind_and_version = "_seh0";
11076 break;
11077 default:
11078 gcc_unreachable ();
11081 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11083 type = build_function_type_list (integer_type_node, integer_type_node,
11084 long_long_unsigned_type_node,
11085 ptr_type_node, ptr_type_node, NULL_TREE);
11086 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11087 get_identifier (name), type);
11088 DECL_ARTIFICIAL (decl) = 1;
11089 DECL_EXTERNAL (decl) = 1;
11090 TREE_PUBLIC (decl) = 1;
11092 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
11093 are the flags assigned by targetm.encode_section_info. */
11094 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11096 return decl;
11099 /* Extracts the personality function of DECL and returns the corresponding
11100 libfunc. */
11103 get_personality_function (tree decl)
11105 tree personality = DECL_FUNCTION_PERSONALITY (decl);
11106 enum eh_personality_kind pk;
11108 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11109 if (pk == eh_personality_none)
11110 return NULL;
11112 if (!personality
11113 && pk == eh_personality_any)
11114 personality = lang_hooks.eh_personality ();
11116 if (pk == eh_personality_lang)
11117 gcc_assert (personality != NULL_TREE);
11119 return XEXP (DECL_RTL (personality), 0);
11122 #include "gt-expr.h"