-dA enhancement
[official-gcc.git] / gcc / expr.c
blob6dc45666dad6b40825ee663dc5e3f621a5bfedc8
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "reload.h"
42 #include "output.h"
43 #include "typeclass.h"
44 #include "toplev.h"
45 #include "langhooks.h"
46 #include "intl.h"
47 #include "tm_p.h"
48 #include "tree-iterator.h"
49 #include "tree-pass.h"
50 #include "tree-flow.h"
51 #include "target.h"
52 #include "timevar.h"
53 #include "df.h"
54 #include "diagnostic.h"
55 #include "ssaexpand.h"
56 #include "target-globals.h"
58 /* Decide whether a function's arguments should be processed
59 from first to last or from last to first.
61 They should if the stack and args grow in opposite directions, but
62 only if we have push insns. */
64 #ifdef PUSH_ROUNDING
66 #ifndef PUSH_ARGS_REVERSED
67 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
68 #define PUSH_ARGS_REVERSED /* If it's last to first. */
69 #endif
70 #endif
72 #endif
74 #ifndef STACK_PUSH_CODE
75 #ifdef STACK_GROWS_DOWNWARD
76 #define STACK_PUSH_CODE PRE_DEC
77 #else
78 #define STACK_PUSH_CODE PRE_INC
79 #endif
80 #endif
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
89 int cse_not_expected;
91 /* This structure is used by move_by_pieces to describe the move to
92 be performed. */
93 struct move_by_pieces_d
95 rtx to;
96 rtx to_addr;
97 int autinc_to;
98 int explicit_inc_to;
99 rtx from;
100 rtx from_addr;
101 int autinc_from;
102 int explicit_inc_from;
103 unsigned HOST_WIDE_INT len;
104 HOST_WIDE_INT offset;
105 int reverse;
108 /* This structure is used by store_by_pieces to describe the clear to
109 be performed. */
111 struct store_by_pieces_d
113 rtx to;
114 rtx to_addr;
115 int autinc_to;
116 int explicit_inc_to;
117 unsigned HOST_WIDE_INT len;
118 HOST_WIDE_INT offset;
119 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
120 void *constfundata;
121 int reverse;
124 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
125 unsigned int,
126 unsigned int);
127 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
128 struct move_by_pieces_d *);
129 static bool block_move_libcall_safe_for_call_parm (void);
130 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
131 static tree emit_block_move_libcall_fn (int);
132 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
133 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
134 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
135 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
136 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
137 struct store_by_pieces_d *);
138 static tree clear_storage_libcall_fn (int);
139 static rtx compress_float_constant (rtx, rtx);
140 static rtx get_subtarget (rtx);
141 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, alias_set_type);
144 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
146 tree, tree, alias_set_type, bool);
148 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
150 static int is_aligning_offset (const_tree, const_tree);
151 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
152 enum expand_modifier);
153 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
154 static rtx do_store_flag (sepops, rtx, enum machine_mode);
155 #ifdef PUSH_ROUNDING
156 static void emit_single_push_insn (enum machine_mode, rtx, tree);
157 #endif
158 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
159 static rtx const_vector_from_tree (tree);
160 static void write_complex_part (rtx, rtx, bool);
162 /* This macro is used to determine whether move_by_pieces should be called
163 to perform a structure copy. */
164 #ifndef MOVE_BY_PIECES_P
165 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
166 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
167 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
168 #endif
170 /* This macro is used to determine whether clear_by_pieces should be
171 called to clear storage. */
172 #ifndef CLEAR_BY_PIECES_P
173 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
174 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
175 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
176 #endif
178 /* This macro is used to determine whether store_by_pieces should be
179 called to "memset" storage with byte values other than zero. */
180 #ifndef SET_BY_PIECES_P
181 #define SET_BY_PIECES_P(SIZE, ALIGN) \
182 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
183 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
184 #endif
186 /* This macro is used to determine whether store_by_pieces should be
187 called to "memcpy" storage when the source is a constant string. */
188 #ifndef STORE_BY_PIECES_P
189 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
190 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
191 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
192 #endif
194 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
196 #ifndef SLOW_UNALIGNED_ACCESS
197 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
198 #endif
200 /* This is run to set up which modes can be used
201 directly in memory and to initialize the block move optab. It is run
202 at the beginning of compilation and when the target is reinitialized. */
204 void
205 init_expr_target (void)
207 rtx insn, pat;
208 enum machine_mode mode;
209 int num_clobbers;
210 rtx mem, mem1;
211 rtx reg;
213 /* Try indexing by frame ptr and try by stack ptr.
214 It is known that on the Convex the stack ptr isn't a valid index.
215 With luck, one or the other is valid on any machine. */
216 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
217 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
219 /* A scratch register we can modify in-place below to avoid
220 useless RTL allocations. */
221 reg = gen_rtx_REG (VOIDmode, -1);
223 insn = rtx_alloc (INSN);
224 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
225 PATTERN (insn) = pat;
227 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
228 mode = (enum machine_mode) ((int) mode + 1))
230 int regno;
232 direct_load[(int) mode] = direct_store[(int) mode] = 0;
233 PUT_MODE (mem, mode);
234 PUT_MODE (mem1, mode);
235 PUT_MODE (reg, mode);
237 /* See if there is some register that can be used in this mode and
238 directly loaded or stored from memory. */
240 if (mode != VOIDmode && mode != BLKmode)
241 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
242 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
243 regno++)
245 if (! HARD_REGNO_MODE_OK (regno, mode))
246 continue;
248 SET_REGNO (reg, regno);
250 SET_SRC (pat) = mem;
251 SET_DEST (pat) = reg;
252 if (recog (pat, insn, &num_clobbers) >= 0)
253 direct_load[(int) mode] = 1;
255 SET_SRC (pat) = mem1;
256 SET_DEST (pat) = reg;
257 if (recog (pat, insn, &num_clobbers) >= 0)
258 direct_load[(int) mode] = 1;
260 SET_SRC (pat) = reg;
261 SET_DEST (pat) = mem;
262 if (recog (pat, insn, &num_clobbers) >= 0)
263 direct_store[(int) mode] = 1;
265 SET_SRC (pat) = reg;
266 SET_DEST (pat) = mem1;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_store[(int) mode] = 1;
272 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
274 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
275 mode = GET_MODE_WIDER_MODE (mode))
277 enum machine_mode srcmode;
278 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
279 srcmode = GET_MODE_WIDER_MODE (srcmode))
281 enum insn_code ic;
283 ic = can_extend_p (mode, srcmode, 0);
284 if (ic == CODE_FOR_nothing)
285 continue;
287 PUT_MODE (mem, srcmode);
289 if (insn_operand_matches (ic, 1, mem))
290 float_extend_from_mem[mode][srcmode] = true;
295 /* This is run at the start of compiling a function. */
297 void
298 init_expr (void)
300 memset (&crtl->expr, 0, sizeof (crtl->expr));
303 /* Copy data from FROM to TO, where the machine modes are not the same.
304 Both modes may be integer, or both may be floating, or both may be
305 fixed-point.
306 UNSIGNEDP should be nonzero if FROM is an unsigned type.
307 This causes zero-extension instead of sign-extension. */
309 void
310 convert_move (rtx to, rtx from, int unsignedp)
312 enum machine_mode to_mode = GET_MODE (to);
313 enum machine_mode from_mode = GET_MODE (from);
314 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
315 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
316 enum insn_code code;
317 rtx libcall;
319 /* rtx code for making an equivalent value. */
320 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
321 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
324 gcc_assert (to_real == from_real);
325 gcc_assert (to_mode != BLKmode);
326 gcc_assert (from_mode != BLKmode);
328 /* If the source and destination are already the same, then there's
329 nothing to do. */
330 if (to == from)
331 return;
333 /* If FROM is a SUBREG that indicates that we have already done at least
334 the required extension, strip it. We don't handle such SUBREGs as
335 TO here. */
337 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
338 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
339 >= GET_MODE_SIZE (to_mode))
340 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
341 from = gen_lowpart (to_mode, from), from_mode = to_mode;
343 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
345 if (to_mode == from_mode
346 || (from_mode == VOIDmode && CONSTANT_P (from)))
348 emit_move_insn (to, from);
349 return;
352 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
354 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
356 if (VECTOR_MODE_P (to_mode))
357 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
358 else
359 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
361 emit_move_insn (to, from);
362 return;
365 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
367 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
368 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
369 return;
372 if (to_real)
374 rtx value, insns;
375 convert_optab tab;
377 gcc_assert ((GET_MODE_PRECISION (from_mode)
378 != GET_MODE_PRECISION (to_mode))
379 || (DECIMAL_FLOAT_MODE_P (from_mode)
380 != DECIMAL_FLOAT_MODE_P (to_mode)));
382 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
383 /* Conversion between decimal float and binary float, same size. */
384 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
385 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
386 tab = sext_optab;
387 else
388 tab = trunc_optab;
390 /* Try converting directly if the insn is supported. */
392 code = convert_optab_handler (tab, to_mode, from_mode);
393 if (code != CODE_FOR_nothing)
395 emit_unop_insn (code, to, from,
396 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
397 return;
400 /* Otherwise use a libcall. */
401 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
403 /* Is this conversion implemented yet? */
404 gcc_assert (libcall);
406 start_sequence ();
407 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
408 1, from, from_mode);
409 insns = get_insns ();
410 end_sequence ();
411 emit_libcall_block (insns, to, value,
412 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
413 from)
414 : gen_rtx_FLOAT_EXTEND (to_mode, from));
415 return;
418 /* Handle pointer conversion. */ /* SPEE 900220. */
419 /* Targets are expected to provide conversion insns between PxImode and
420 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
421 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
423 enum machine_mode full_mode
424 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
426 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
427 != CODE_FOR_nothing);
429 if (full_mode != from_mode)
430 from = convert_to_mode (full_mode, from, unsignedp);
431 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
432 to, from, UNKNOWN);
433 return;
435 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
437 rtx new_from;
438 enum machine_mode full_mode
439 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
441 gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)
442 != CODE_FOR_nothing);
444 if (to_mode == full_mode)
446 emit_unop_insn (convert_optab_handler (sext_optab, full_mode,
447 from_mode),
448 to, from, UNKNOWN);
449 return;
452 new_from = gen_reg_rtx (full_mode);
453 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode),
454 new_from, from, UNKNOWN);
456 /* else proceed to integer conversions below. */
457 from_mode = full_mode;
458 from = new_from;
461 /* Make sure both are fixed-point modes or both are not. */
462 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
463 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
464 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
466 /* If we widen from_mode to to_mode and they are in the same class,
467 we won't saturate the result.
468 Otherwise, always saturate the result to play safe. */
469 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
470 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
471 expand_fixed_convert (to, from, 0, 0);
472 else
473 expand_fixed_convert (to, from, 0, 1);
474 return;
477 /* Now both modes are integers. */
479 /* Handle expanding beyond a word. */
480 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
481 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
483 rtx insns;
484 rtx lowpart;
485 rtx fill_value;
486 rtx lowfrom;
487 int i;
488 enum machine_mode lowpart_mode;
489 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
491 /* Try converting directly if the insn is supported. */
492 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
493 != CODE_FOR_nothing)
495 /* If FROM is a SUBREG, put it into a register. Do this
496 so that we always generate the same set of insns for
497 better cse'ing; if an intermediate assignment occurred,
498 we won't be doing the operation directly on the SUBREG. */
499 if (optimize > 0 && GET_CODE (from) == SUBREG)
500 from = force_reg (from_mode, from);
501 emit_unop_insn (code, to, from, equiv_code);
502 return;
504 /* Next, try converting via full word. */
505 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
506 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
507 != CODE_FOR_nothing))
509 rtx word_to = gen_reg_rtx (word_mode);
510 if (REG_P (to))
512 if (reg_overlap_mentioned_p (to, from))
513 from = force_reg (from_mode, from);
514 emit_clobber (to);
516 convert_move (word_to, from, unsignedp);
517 emit_unop_insn (code, to, word_to, equiv_code);
518 return;
521 /* No special multiword conversion insn; do it by hand. */
522 start_sequence ();
524 /* Since we will turn this into a no conflict block, we must ensure
525 that the source does not overlap the target. */
527 if (reg_overlap_mentioned_p (to, from))
528 from = force_reg (from_mode, from);
530 /* Get a copy of FROM widened to a word, if necessary. */
531 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
532 lowpart_mode = word_mode;
533 else
534 lowpart_mode = from_mode;
536 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
538 lowpart = gen_lowpart (lowpart_mode, to);
539 emit_move_insn (lowpart, lowfrom);
541 /* Compute the value to put in each remaining word. */
542 if (unsignedp)
543 fill_value = const0_rtx;
544 else
545 fill_value = emit_store_flag (gen_reg_rtx (word_mode),
546 LT, lowfrom, const0_rtx,
547 VOIDmode, 0, -1);
549 /* Fill the remaining words. */
550 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
552 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
553 rtx subword = operand_subword (to, index, 1, to_mode);
555 gcc_assert (subword);
557 if (fill_value != subword)
558 emit_move_insn (subword, fill_value);
561 insns = get_insns ();
562 end_sequence ();
564 emit_insn (insns);
565 return;
568 /* Truncating multi-word to a word or less. */
569 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
570 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
572 if (!((MEM_P (from)
573 && ! MEM_VOLATILE_P (from)
574 && direct_load[(int) to_mode]
575 && ! mode_dependent_address_p (XEXP (from, 0)))
576 || REG_P (from)
577 || GET_CODE (from) == SUBREG))
578 from = force_reg (from_mode, from);
579 convert_move (to, gen_lowpart (word_mode, from), 0);
580 return;
583 /* Now follow all the conversions between integers
584 no more than a word long. */
586 /* For truncation, usually we can just refer to FROM in a narrower mode. */
587 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
588 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
589 GET_MODE_BITSIZE (from_mode)))
591 if (!((MEM_P (from)
592 && ! MEM_VOLATILE_P (from)
593 && direct_load[(int) to_mode]
594 && ! mode_dependent_address_p (XEXP (from, 0)))
595 || REG_P (from)
596 || GET_CODE (from) == SUBREG))
597 from = force_reg (from_mode, from);
598 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
599 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
600 from = copy_to_reg (from);
601 emit_move_insn (to, gen_lowpart (to_mode, from));
602 return;
605 /* Handle extension. */
606 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
608 /* Convert directly if that works. */
609 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
610 != CODE_FOR_nothing)
612 emit_unop_insn (code, to, from, equiv_code);
613 return;
615 else
617 enum machine_mode intermediate;
618 rtx tmp;
619 tree shift_amount;
621 /* Search for a mode to convert via. */
622 for (intermediate = from_mode; intermediate != VOIDmode;
623 intermediate = GET_MODE_WIDER_MODE (intermediate))
624 if (((can_extend_p (to_mode, intermediate, unsignedp)
625 != CODE_FOR_nothing)
626 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
627 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
628 GET_MODE_BITSIZE (intermediate))))
629 && (can_extend_p (intermediate, from_mode, unsignedp)
630 != CODE_FOR_nothing))
632 convert_move (to, convert_to_mode (intermediate, from,
633 unsignedp), unsignedp);
634 return;
637 /* No suitable intermediate mode.
638 Generate what we need with shifts. */
639 shift_amount = build_int_cst (NULL_TREE,
640 GET_MODE_BITSIZE (to_mode)
641 - GET_MODE_BITSIZE (from_mode));
642 from = gen_lowpart (to_mode, force_reg (from_mode, from));
643 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
644 to, unsignedp);
645 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
646 to, unsignedp);
647 if (tmp != to)
648 emit_move_insn (to, tmp);
649 return;
653 /* Support special truncate insns for certain modes. */
654 if (convert_optab_handler (trunc_optab, to_mode,
655 from_mode) != CODE_FOR_nothing)
657 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
658 to, from, UNKNOWN);
659 return;
662 /* Handle truncation of volatile memrefs, and so on;
663 the things that couldn't be truncated directly,
664 and for which there was no special instruction.
666 ??? Code above formerly short-circuited this, for most integer
667 mode pairs, with a force_reg in from_mode followed by a recursive
668 call to this routine. Appears always to have been wrong. */
669 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
671 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
672 emit_move_insn (to, temp);
673 return;
676 /* Mode combination is not recognized. */
677 gcc_unreachable ();
680 /* Return an rtx for a value that would result
681 from converting X to mode MODE.
682 Both X and MODE may be floating, or both integer.
683 UNSIGNEDP is nonzero if X is an unsigned value.
684 This can be done by referring to a part of X in place
685 or by copying to a new temporary with conversion. */
688 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
690 return convert_modes (mode, VOIDmode, x, unsignedp);
693 /* Return an rtx for a value that would result
694 from converting X from mode OLDMODE to mode MODE.
695 Both modes may be floating, or both integer.
696 UNSIGNEDP is nonzero if X is an unsigned value.
698 This can be done by referring to a part of X in place
699 or by copying to a new temporary with conversion.
701 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
704 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
706 rtx temp;
708 /* If FROM is a SUBREG that indicates that we have already done at least
709 the required extension, strip it. */
711 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
712 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
713 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
714 x = gen_lowpart (mode, x);
716 if (GET_MODE (x) != VOIDmode)
717 oldmode = GET_MODE (x);
719 if (mode == oldmode)
720 return x;
722 /* There is one case that we must handle specially: If we are converting
723 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
724 we are to interpret the constant as unsigned, gen_lowpart will do
725 the wrong if the constant appears negative. What we want to do is
726 make the high-order word of the constant zero, not all ones. */
728 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
729 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
730 && CONST_INT_P (x) && INTVAL (x) < 0)
732 double_int val = uhwi_to_double_int (INTVAL (x));
734 /* We need to zero extend VAL. */
735 if (oldmode != VOIDmode)
736 val = double_int_zext (val, GET_MODE_BITSIZE (oldmode));
738 return immed_double_int_const (val, mode);
741 /* We can do this with a gen_lowpart if both desired and current modes
742 are integer, and this is either a constant integer, a register, or a
743 non-volatile MEM. Except for the constant case where MODE is no
744 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
746 if ((CONST_INT_P (x)
747 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
748 || (GET_MODE_CLASS (mode) == MODE_INT
749 && GET_MODE_CLASS (oldmode) == MODE_INT
750 && (GET_CODE (x) == CONST_DOUBLE
751 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
752 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
753 && direct_load[(int) mode])
754 || (REG_P (x)
755 && (! HARD_REGISTER_P (x)
756 || HARD_REGNO_MODE_OK (REGNO (x), mode))
757 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
758 GET_MODE_BITSIZE (GET_MODE (x)))))))))
760 /* ?? If we don't know OLDMODE, we have to assume here that
761 X does not need sign- or zero-extension. This may not be
762 the case, but it's the best we can do. */
763 if (CONST_INT_P (x) && oldmode != VOIDmode
764 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
766 HOST_WIDE_INT val = INTVAL (x);
767 int width = GET_MODE_BITSIZE (oldmode);
769 /* We must sign or zero-extend in this case. Start by
770 zero-extending, then sign extend if we need to. */
771 val &= ((HOST_WIDE_INT) 1 << width) - 1;
772 if (! unsignedp
773 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
774 val |= (HOST_WIDE_INT) (-1) << width;
776 return gen_int_mode (val, mode);
779 return gen_lowpart (mode, x);
782 /* Converting from integer constant into mode is always equivalent to an
783 subreg operation. */
784 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
786 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
787 return simplify_gen_subreg (mode, x, oldmode, 0);
790 temp = gen_reg_rtx (mode);
791 convert_move (temp, x, unsignedp);
792 return temp;
795 /* Return the largest alignment we can use for doing a move (or store)
796 of MAX_PIECES. ALIGN is the largest alignment we could use. */
798 static unsigned int
799 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
801 enum machine_mode tmode;
803 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
804 if (align >= GET_MODE_ALIGNMENT (tmode))
805 align = GET_MODE_ALIGNMENT (tmode);
806 else
808 enum machine_mode tmode, xmode;
810 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
811 tmode != VOIDmode;
812 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
813 if (GET_MODE_SIZE (tmode) > max_pieces
814 || SLOW_UNALIGNED_ACCESS (tmode, align))
815 break;
817 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
820 return align;
823 /* Return the widest integer mode no wider than SIZE. If no such mode
824 can be found, return VOIDmode. */
826 static enum machine_mode
827 widest_int_mode_for_size (unsigned int size)
829 enum machine_mode tmode, mode = VOIDmode;
831 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
832 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
833 if (GET_MODE_SIZE (tmode) < size)
834 mode = tmode;
836 return mode;
839 /* STORE_MAX_PIECES is the number of bytes at a time that we can
840 store efficiently. Due to internal GCC limitations, this is
841 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
842 for an immediate constant. */
844 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
846 /* Determine whether the LEN bytes can be moved by using several move
847 instructions. Return nonzero if a call to move_by_pieces should
848 succeed. */
851 can_move_by_pieces (unsigned HOST_WIDE_INT len,
852 unsigned int align ATTRIBUTE_UNUSED)
854 return MOVE_BY_PIECES_P (len, align);
857 /* Generate several move instructions to copy LEN bytes from block FROM to
858 block TO. (These are MEM rtx's with BLKmode).
860 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
861 used to push FROM to the stack.
863 ALIGN is maximum stack alignment we can assume.
865 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
866 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
867 stpcpy. */
870 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
871 unsigned int align, int endp)
873 struct move_by_pieces_d data;
874 enum machine_mode to_addr_mode, from_addr_mode
875 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (from));
876 rtx to_addr, from_addr = XEXP (from, 0);
877 unsigned int max_size = MOVE_MAX_PIECES + 1;
878 enum insn_code icode;
880 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
882 data.offset = 0;
883 data.from_addr = from_addr;
884 if (to)
886 to_addr_mode = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
887 to_addr = XEXP (to, 0);
888 data.to = to;
889 data.autinc_to
890 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
891 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
892 data.reverse
893 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
895 else
897 to_addr_mode = VOIDmode;
898 to_addr = NULL_RTX;
899 data.to = NULL_RTX;
900 data.autinc_to = 1;
901 #ifdef STACK_GROWS_DOWNWARD
902 data.reverse = 1;
903 #else
904 data.reverse = 0;
905 #endif
907 data.to_addr = to_addr;
908 data.from = from;
909 data.autinc_from
910 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
911 || GET_CODE (from_addr) == POST_INC
912 || GET_CODE (from_addr) == POST_DEC);
914 data.explicit_inc_from = 0;
915 data.explicit_inc_to = 0;
916 if (data.reverse) data.offset = len;
917 data.len = len;
919 /* If copying requires more than two move insns,
920 copy addresses to registers (to make displacements shorter)
921 and use post-increment if available. */
922 if (!(data.autinc_from && data.autinc_to)
923 && move_by_pieces_ninsns (len, align, max_size) > 2)
925 /* Find the mode of the largest move...
926 MODE might not be used depending on the definitions of the
927 USE_* macros below. */
928 enum machine_mode mode ATTRIBUTE_UNUSED
929 = widest_int_mode_for_size (max_size);
931 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
933 data.from_addr = copy_to_mode_reg (from_addr_mode,
934 plus_constant (from_addr, len));
935 data.autinc_from = 1;
936 data.explicit_inc_from = -1;
938 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
940 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
941 data.autinc_from = 1;
942 data.explicit_inc_from = 1;
944 if (!data.autinc_from && CONSTANT_P (from_addr))
945 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
946 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
948 data.to_addr = copy_to_mode_reg (to_addr_mode,
949 plus_constant (to_addr, len));
950 data.autinc_to = 1;
951 data.explicit_inc_to = -1;
953 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
955 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
956 data.autinc_to = 1;
957 data.explicit_inc_to = 1;
959 if (!data.autinc_to && CONSTANT_P (to_addr))
960 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
963 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
965 /* First move what we can in the largest integer mode, then go to
966 successively smaller modes. */
968 while (max_size > 1)
970 enum machine_mode mode = widest_int_mode_for_size (max_size);
972 if (mode == VOIDmode)
973 break;
975 icode = optab_handler (mov_optab, mode);
976 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
977 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
979 max_size = GET_MODE_SIZE (mode);
982 /* The code above should have handled everything. */
983 gcc_assert (!data.len);
985 if (endp)
987 rtx to1;
989 gcc_assert (!data.reverse);
990 if (data.autinc_to)
992 if (endp == 2)
994 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
995 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
996 else
997 data.to_addr = copy_to_mode_reg (to_addr_mode,
998 plus_constant (data.to_addr,
999 -1));
1001 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1002 data.offset);
1004 else
1006 if (endp == 2)
1007 --data.offset;
1008 to1 = adjust_address (data.to, QImode, data.offset);
1010 return to1;
1012 else
1013 return data.to;
1016 /* Return number of insns required to move L bytes by pieces.
1017 ALIGN (in bits) is maximum alignment we can assume. */
1019 static unsigned HOST_WIDE_INT
1020 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1021 unsigned int max_size)
1023 unsigned HOST_WIDE_INT n_insns = 0;
1025 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1027 while (max_size > 1)
1029 enum machine_mode mode;
1030 enum insn_code icode;
1032 mode = widest_int_mode_for_size (max_size);
1034 if (mode == VOIDmode)
1035 break;
1037 icode = optab_handler (mov_optab, mode);
1038 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1039 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1041 max_size = GET_MODE_SIZE (mode);
1044 gcc_assert (!l);
1045 return n_insns;
1048 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1049 with move instructions for mode MODE. GENFUN is the gen_... function
1050 to make a move insn for that mode. DATA has all the other info. */
1052 static void
1053 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1054 struct move_by_pieces_d *data)
1056 unsigned int size = GET_MODE_SIZE (mode);
1057 rtx to1 = NULL_RTX, from1;
1059 while (data->len >= size)
1061 if (data->reverse)
1062 data->offset -= size;
1064 if (data->to)
1066 if (data->autinc_to)
1067 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1068 data->offset);
1069 else
1070 to1 = adjust_address (data->to, mode, data->offset);
1073 if (data->autinc_from)
1074 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1075 data->offset);
1076 else
1077 from1 = adjust_address (data->from, mode, data->offset);
1079 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1080 emit_insn (gen_add2_insn (data->to_addr,
1081 GEN_INT (-(HOST_WIDE_INT)size)));
1082 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1083 emit_insn (gen_add2_insn (data->from_addr,
1084 GEN_INT (-(HOST_WIDE_INT)size)));
1086 if (data->to)
1087 emit_insn ((*genfun) (to1, from1));
1088 else
1090 #ifdef PUSH_ROUNDING
1091 emit_single_push_insn (mode, from1, NULL);
1092 #else
1093 gcc_unreachable ();
1094 #endif
1097 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1098 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1099 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1100 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1102 if (! data->reverse)
1103 data->offset += size;
1105 data->len -= size;
1109 /* Emit code to move a block Y to a block X. This may be done with
1110 string-move instructions, with multiple scalar move instructions,
1111 or with a library call.
1113 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1114 SIZE is an rtx that says how long they are.
1115 ALIGN is the maximum alignment we can assume they have.
1116 METHOD describes what kind of copy this is, and what mechanisms may be used.
1118 Return the address of the new block, if memcpy is called and returns it,
1119 0 otherwise. */
1122 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1123 unsigned int expected_align, HOST_WIDE_INT expected_size)
1125 bool may_use_call;
1126 rtx retval = 0;
1127 unsigned int align;
1129 gcc_assert (size);
1130 if (CONST_INT_P (size)
1131 && INTVAL (size) == 0)
1132 return 0;
1134 switch (method)
1136 case BLOCK_OP_NORMAL:
1137 case BLOCK_OP_TAILCALL:
1138 may_use_call = true;
1139 break;
1141 case BLOCK_OP_CALL_PARM:
1142 may_use_call = block_move_libcall_safe_for_call_parm ();
1144 /* Make inhibit_defer_pop nonzero around the library call
1145 to force it to pop the arguments right away. */
1146 NO_DEFER_POP;
1147 break;
1149 case BLOCK_OP_NO_LIBCALL:
1150 may_use_call = false;
1151 break;
1153 default:
1154 gcc_unreachable ();
1157 gcc_assert (MEM_P (x) && MEM_P (y));
1158 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1159 gcc_assert (align >= BITS_PER_UNIT);
1161 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1162 block copy is more efficient for other large modes, e.g. DCmode. */
1163 x = adjust_address (x, BLKmode, 0);
1164 y = adjust_address (y, BLKmode, 0);
1166 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1167 can be incorrect is coming from __builtin_memcpy. */
1168 if (CONST_INT_P (size))
1170 x = shallow_copy_rtx (x);
1171 y = shallow_copy_rtx (y);
1172 set_mem_size (x, size);
1173 set_mem_size (y, size);
1176 if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1177 move_by_pieces (x, y, INTVAL (size), align, 0);
1178 else if (emit_block_move_via_movmem (x, y, size, align,
1179 expected_align, expected_size))
1181 else if (may_use_call
1182 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1183 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1184 retval = emit_block_move_via_libcall (x, y, size,
1185 method == BLOCK_OP_TAILCALL);
1186 else
1187 emit_block_move_via_loop (x, y, size, align);
1189 if (method == BLOCK_OP_CALL_PARM)
1190 OK_DEFER_POP;
1192 return retval;
1196 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1198 return emit_block_move_hints (x, y, size, method, 0, -1);
1201 /* A subroutine of emit_block_move. Returns true if calling the
1202 block move libcall will not clobber any parameters which may have
1203 already been placed on the stack. */
1205 static bool
1206 block_move_libcall_safe_for_call_parm (void)
1208 #if defined (REG_PARM_STACK_SPACE)
1209 tree fn;
1210 #endif
1212 /* If arguments are pushed on the stack, then they're safe. */
1213 if (PUSH_ARGS)
1214 return true;
1216 /* If registers go on the stack anyway, any argument is sure to clobber
1217 an outgoing argument. */
1218 #if defined (REG_PARM_STACK_SPACE)
1219 fn = emit_block_move_libcall_fn (false);
1220 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1221 depend on its argument. */
1222 (void) fn;
1223 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1224 && REG_PARM_STACK_SPACE (fn) != 0)
1225 return false;
1226 #endif
1228 /* If any argument goes in memory, then it might clobber an outgoing
1229 argument. */
1231 CUMULATIVE_ARGS args_so_far;
1232 tree fn, arg;
1234 fn = emit_block_move_libcall_fn (false);
1235 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1237 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1238 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1240 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1241 rtx tmp = targetm.calls.function_arg (&args_so_far, mode,
1242 NULL_TREE, true);
1243 if (!tmp || !REG_P (tmp))
1244 return false;
1245 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1246 return false;
1247 targetm.calls.function_arg_advance (&args_so_far, mode,
1248 NULL_TREE, true);
1251 return true;
1254 /* A subroutine of emit_block_move. Expand a movmem pattern;
1255 return true if successful. */
1257 static bool
1258 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1259 unsigned int expected_align, HOST_WIDE_INT expected_size)
1261 int save_volatile_ok = volatile_ok;
1262 enum machine_mode mode;
1264 if (expected_align < align)
1265 expected_align = align;
1267 /* Since this is a move insn, we don't care about volatility. */
1268 volatile_ok = 1;
1270 /* Try the most limited insn first, because there's no point
1271 including more than one in the machine description unless
1272 the more limited one has some advantage. */
1274 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1275 mode = GET_MODE_WIDER_MODE (mode))
1277 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1279 if (code != CODE_FOR_nothing
1280 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1281 here because if SIZE is less than the mode mask, as it is
1282 returned by the macro, it will definitely be less than the
1283 actual mode mask. */
1284 && ((CONST_INT_P (size)
1285 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1286 <= (GET_MODE_MASK (mode) >> 1)))
1287 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
1289 struct expand_operand ops[6];
1290 unsigned int nops;
1292 /* ??? When called via emit_block_move_for_call, it'd be
1293 nice if there were some way to inform the backend, so
1294 that it doesn't fail the expansion because it thinks
1295 emitting the libcall would be more efficient. */
1296 nops = insn_data[(int) code].n_generator_args;
1297 gcc_assert (nops == 4 || nops == 6);
1299 create_fixed_operand (&ops[0], x);
1300 create_fixed_operand (&ops[1], y);
1301 /* The check above guarantees that this size conversion is valid. */
1302 create_convert_operand_to (&ops[2], size, mode, true);
1303 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1304 if (nops == 6)
1306 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1307 create_integer_operand (&ops[5], expected_size);
1309 if (maybe_expand_insn (code, nops, ops))
1311 volatile_ok = save_volatile_ok;
1312 return true;
1317 volatile_ok = save_volatile_ok;
1318 return false;
1321 /* A subroutine of emit_block_move. Expand a call to memcpy.
1322 Return the return value from memcpy, 0 otherwise. */
1325 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1327 rtx dst_addr, src_addr;
1328 tree call_expr, fn, src_tree, dst_tree, size_tree;
1329 enum machine_mode size_mode;
1330 rtx retval;
1332 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1333 pseudos. We can then place those new pseudos into a VAR_DECL and
1334 use them later. */
1336 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1337 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1339 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1340 src_addr = convert_memory_address (ptr_mode, src_addr);
1342 dst_tree = make_tree (ptr_type_node, dst_addr);
1343 src_tree = make_tree (ptr_type_node, src_addr);
1345 size_mode = TYPE_MODE (sizetype);
1347 size = convert_to_mode (size_mode, size, 1);
1348 size = copy_to_mode_reg (size_mode, size);
1350 /* It is incorrect to use the libcall calling conventions to call
1351 memcpy in this context. This could be a user call to memcpy and
1352 the user may wish to examine the return value from memcpy. For
1353 targets where libcalls and normal calls have different conventions
1354 for returning pointers, we could end up generating incorrect code. */
1356 size_tree = make_tree (sizetype, size);
1358 fn = emit_block_move_libcall_fn (true);
1359 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1360 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1362 retval = expand_normal (call_expr);
1364 return retval;
1367 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1368 for the function we use for block copies. The first time FOR_CALL
1369 is true, we call assemble_external. */
1371 static GTY(()) tree block_move_fn;
1373 void
1374 init_block_move_fn (const char *asmspec)
1376 if (!block_move_fn)
1378 tree args, fn;
1380 fn = get_identifier ("memcpy");
1381 args = build_function_type_list (ptr_type_node, ptr_type_node,
1382 const_ptr_type_node, sizetype,
1383 NULL_TREE);
1385 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1386 DECL_EXTERNAL (fn) = 1;
1387 TREE_PUBLIC (fn) = 1;
1388 DECL_ARTIFICIAL (fn) = 1;
1389 TREE_NOTHROW (fn) = 1;
1390 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1391 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1393 block_move_fn = fn;
1396 if (asmspec)
1397 set_user_assembler_name (block_move_fn, asmspec);
1400 static tree
1401 emit_block_move_libcall_fn (int for_call)
1403 static bool emitted_extern;
1405 if (!block_move_fn)
1406 init_block_move_fn (NULL);
1408 if (for_call && !emitted_extern)
1410 emitted_extern = true;
1411 make_decl_rtl (block_move_fn);
1412 assemble_external (block_move_fn);
1415 return block_move_fn;
1418 /* A subroutine of emit_block_move. Copy the data via an explicit
1419 loop. This is used only when libcalls are forbidden. */
1420 /* ??? It'd be nice to copy in hunks larger than QImode. */
1422 static void
1423 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1424 unsigned int align ATTRIBUTE_UNUSED)
1426 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1427 enum machine_mode x_addr_mode
1428 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (x));
1429 enum machine_mode y_addr_mode
1430 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (y));
1431 enum machine_mode iter_mode;
1433 iter_mode = GET_MODE (size);
1434 if (iter_mode == VOIDmode)
1435 iter_mode = word_mode;
1437 top_label = gen_label_rtx ();
1438 cmp_label = gen_label_rtx ();
1439 iter = gen_reg_rtx (iter_mode);
1441 emit_move_insn (iter, const0_rtx);
1443 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1444 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1445 do_pending_stack_adjust ();
1447 emit_jump (cmp_label);
1448 emit_label (top_label);
1450 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1451 x_addr = gen_rtx_PLUS (x_addr_mode, x_addr, tmp);
1453 if (x_addr_mode != y_addr_mode)
1454 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1455 y_addr = gen_rtx_PLUS (y_addr_mode, y_addr, tmp);
1457 x = change_address (x, QImode, x_addr);
1458 y = change_address (y, QImode, y_addr);
1460 emit_move_insn (x, y);
1462 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1463 true, OPTAB_LIB_WIDEN);
1464 if (tmp != iter)
1465 emit_move_insn (iter, tmp);
1467 emit_label (cmp_label);
1469 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1470 true, top_label);
1473 /* Copy all or part of a value X into registers starting at REGNO.
1474 The number of registers to be filled is NREGS. */
1476 void
1477 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1479 int i;
1480 #ifdef HAVE_load_multiple
1481 rtx pat;
1482 rtx last;
1483 #endif
1485 if (nregs == 0)
1486 return;
1488 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1489 x = validize_mem (force_const_mem (mode, x));
1491 /* See if the machine can do this with a load multiple insn. */
1492 #ifdef HAVE_load_multiple
1493 if (HAVE_load_multiple)
1495 last = get_last_insn ();
1496 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1497 GEN_INT (nregs));
1498 if (pat)
1500 emit_insn (pat);
1501 return;
1503 else
1504 delete_insns_since (last);
1506 #endif
1508 for (i = 0; i < nregs; i++)
1509 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1510 operand_subword_force (x, i, mode));
1513 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1514 The number of registers to be filled is NREGS. */
1516 void
1517 move_block_from_reg (int regno, rtx x, int nregs)
1519 int i;
1521 if (nregs == 0)
1522 return;
1524 /* See if the machine can do this with a store multiple insn. */
1525 #ifdef HAVE_store_multiple
1526 if (HAVE_store_multiple)
1528 rtx last = get_last_insn ();
1529 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1530 GEN_INT (nregs));
1531 if (pat)
1533 emit_insn (pat);
1534 return;
1536 else
1537 delete_insns_since (last);
1539 #endif
1541 for (i = 0; i < nregs; i++)
1543 rtx tem = operand_subword (x, i, 1, BLKmode);
1545 gcc_assert (tem);
1547 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1551 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1552 ORIG, where ORIG is a non-consecutive group of registers represented by
1553 a PARALLEL. The clone is identical to the original except in that the
1554 original set of registers is replaced by a new set of pseudo registers.
1555 The new set has the same modes as the original set. */
1558 gen_group_rtx (rtx orig)
1560 int i, length;
1561 rtx *tmps;
1563 gcc_assert (GET_CODE (orig) == PARALLEL);
1565 length = XVECLEN (orig, 0);
1566 tmps = XALLOCAVEC (rtx, length);
1568 /* Skip a NULL entry in first slot. */
1569 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1571 if (i)
1572 tmps[0] = 0;
1574 for (; i < length; i++)
1576 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1577 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1579 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1582 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1585 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1586 except that values are placed in TMPS[i], and must later be moved
1587 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1589 static void
1590 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1592 rtx src;
1593 int start, i;
1594 enum machine_mode m = GET_MODE (orig_src);
1596 gcc_assert (GET_CODE (dst) == PARALLEL);
1598 if (m != VOIDmode
1599 && !SCALAR_INT_MODE_P (m)
1600 && !MEM_P (orig_src)
1601 && GET_CODE (orig_src) != CONCAT)
1603 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1604 if (imode == BLKmode)
1605 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1606 else
1607 src = gen_reg_rtx (imode);
1608 if (imode != BLKmode)
1609 src = gen_lowpart (GET_MODE (orig_src), src);
1610 emit_move_insn (src, orig_src);
1611 /* ...and back again. */
1612 if (imode != BLKmode)
1613 src = gen_lowpart (imode, src);
1614 emit_group_load_1 (tmps, dst, src, type, ssize);
1615 return;
1618 /* Check for a NULL entry, used to indicate that the parameter goes
1619 both on the stack and in registers. */
1620 if (XEXP (XVECEXP (dst, 0, 0), 0))
1621 start = 0;
1622 else
1623 start = 1;
1625 /* Process the pieces. */
1626 for (i = start; i < XVECLEN (dst, 0); i++)
1628 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1629 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1630 unsigned int bytelen = GET_MODE_SIZE (mode);
1631 int shift = 0;
1633 /* Handle trailing fragments that run over the size of the struct. */
1634 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1636 /* Arrange to shift the fragment to where it belongs.
1637 extract_bit_field loads to the lsb of the reg. */
1638 if (
1639 #ifdef BLOCK_REG_PADDING
1640 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1641 == (BYTES_BIG_ENDIAN ? upward : downward)
1642 #else
1643 BYTES_BIG_ENDIAN
1644 #endif
1646 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1647 bytelen = ssize - bytepos;
1648 gcc_assert (bytelen > 0);
1651 /* If we won't be loading directly from memory, protect the real source
1652 from strange tricks we might play; but make sure that the source can
1653 be loaded directly into the destination. */
1654 src = orig_src;
1655 if (!MEM_P (orig_src)
1656 && (!CONSTANT_P (orig_src)
1657 || (GET_MODE (orig_src) != mode
1658 && GET_MODE (orig_src) != VOIDmode)))
1660 if (GET_MODE (orig_src) == VOIDmode)
1661 src = gen_reg_rtx (mode);
1662 else
1663 src = gen_reg_rtx (GET_MODE (orig_src));
1665 emit_move_insn (src, orig_src);
1668 /* Optimize the access just a bit. */
1669 if (MEM_P (src)
1670 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1671 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1672 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1673 && bytelen == GET_MODE_SIZE (mode))
1675 tmps[i] = gen_reg_rtx (mode);
1676 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1678 else if (COMPLEX_MODE_P (mode)
1679 && GET_MODE (src) == mode
1680 && bytelen == GET_MODE_SIZE (mode))
1681 /* Let emit_move_complex do the bulk of the work. */
1682 tmps[i] = src;
1683 else if (GET_CODE (src) == CONCAT)
1685 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1686 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1688 if ((bytepos == 0 && bytelen == slen0)
1689 || (bytepos != 0 && bytepos + bytelen <= slen))
1691 /* The following assumes that the concatenated objects all
1692 have the same size. In this case, a simple calculation
1693 can be used to determine the object and the bit field
1694 to be extracted. */
1695 tmps[i] = XEXP (src, bytepos / slen0);
1696 if (! CONSTANT_P (tmps[i])
1697 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1698 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1699 (bytepos % slen0) * BITS_PER_UNIT,
1700 1, false, NULL_RTX, mode, mode);
1702 else
1704 rtx mem;
1706 gcc_assert (!bytepos);
1707 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1708 emit_move_insn (mem, src);
1709 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1710 0, 1, false, NULL_RTX, mode, mode);
1713 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1714 SIMD register, which is currently broken. While we get GCC
1715 to emit proper RTL for these cases, let's dump to memory. */
1716 else if (VECTOR_MODE_P (GET_MODE (dst))
1717 && REG_P (src))
1719 int slen = GET_MODE_SIZE (GET_MODE (src));
1720 rtx mem;
1722 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1723 emit_move_insn (mem, src);
1724 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1726 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1727 && XVECLEN (dst, 0) > 1)
1728 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1729 else if (CONSTANT_P (src))
1731 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1733 if (len == ssize)
1734 tmps[i] = src;
1735 else
1737 rtx first, second;
1739 gcc_assert (2 * len == ssize);
1740 split_double (src, &first, &second);
1741 if (i)
1742 tmps[i] = second;
1743 else
1744 tmps[i] = first;
1747 else if (REG_P (src) && GET_MODE (src) == mode)
1748 tmps[i] = src;
1749 else
1750 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1751 bytepos * BITS_PER_UNIT, 1, false, NULL_RTX,
1752 mode, mode);
1754 if (shift)
1755 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1756 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1760 /* Emit code to move a block SRC of type TYPE to a block DST,
1761 where DST is non-consecutive registers represented by a PARALLEL.
1762 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1763 if not known. */
1765 void
1766 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1768 rtx *tmps;
1769 int i;
1771 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1772 emit_group_load_1 (tmps, dst, src, type, ssize);
1774 /* Copy the extracted pieces into the proper (probable) hard regs. */
1775 for (i = 0; i < XVECLEN (dst, 0); i++)
1777 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1778 if (d == NULL)
1779 continue;
1780 emit_move_insn (d, tmps[i]);
1784 /* Similar, but load SRC into new pseudos in a format that looks like
1785 PARALLEL. This can later be fed to emit_group_move to get things
1786 in the right place. */
1789 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1791 rtvec vec;
1792 int i;
1794 vec = rtvec_alloc (XVECLEN (parallel, 0));
1795 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1797 /* Convert the vector to look just like the original PARALLEL, except
1798 with the computed values. */
1799 for (i = 0; i < XVECLEN (parallel, 0); i++)
1801 rtx e = XVECEXP (parallel, 0, i);
1802 rtx d = XEXP (e, 0);
1804 if (d)
1806 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1807 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1809 RTVEC_ELT (vec, i) = e;
1812 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1815 /* Emit code to move a block SRC to block DST, where SRC and DST are
1816 non-consecutive groups of registers, each represented by a PARALLEL. */
1818 void
1819 emit_group_move (rtx dst, rtx src)
1821 int i;
1823 gcc_assert (GET_CODE (src) == PARALLEL
1824 && GET_CODE (dst) == PARALLEL
1825 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1827 /* Skip first entry if NULL. */
1828 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1829 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1830 XEXP (XVECEXP (src, 0, i), 0));
1833 /* Move a group of registers represented by a PARALLEL into pseudos. */
1836 emit_group_move_into_temps (rtx src)
1838 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1839 int i;
1841 for (i = 0; i < XVECLEN (src, 0); i++)
1843 rtx e = XVECEXP (src, 0, i);
1844 rtx d = XEXP (e, 0);
1846 if (d)
1847 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1848 RTVEC_ELT (vec, i) = e;
1851 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1854 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1855 where SRC is non-consecutive registers represented by a PARALLEL.
1856 SSIZE represents the total size of block ORIG_DST, or -1 if not
1857 known. */
1859 void
1860 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1862 rtx *tmps, dst;
1863 int start, finish, i;
1864 enum machine_mode m = GET_MODE (orig_dst);
1866 gcc_assert (GET_CODE (src) == PARALLEL);
1868 if (!SCALAR_INT_MODE_P (m)
1869 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1871 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1872 if (imode == BLKmode)
1873 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1874 else
1875 dst = gen_reg_rtx (imode);
1876 emit_group_store (dst, src, type, ssize);
1877 if (imode != BLKmode)
1878 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1879 emit_move_insn (orig_dst, dst);
1880 return;
1883 /* Check for a NULL entry, used to indicate that the parameter goes
1884 both on the stack and in registers. */
1885 if (XEXP (XVECEXP (src, 0, 0), 0))
1886 start = 0;
1887 else
1888 start = 1;
1889 finish = XVECLEN (src, 0);
1891 tmps = XALLOCAVEC (rtx, finish);
1893 /* Copy the (probable) hard regs into pseudos. */
1894 for (i = start; i < finish; i++)
1896 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1897 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1899 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1900 emit_move_insn (tmps[i], reg);
1902 else
1903 tmps[i] = reg;
1906 /* If we won't be storing directly into memory, protect the real destination
1907 from strange tricks we might play. */
1908 dst = orig_dst;
1909 if (GET_CODE (dst) == PARALLEL)
1911 rtx temp;
1913 /* We can get a PARALLEL dst if there is a conditional expression in
1914 a return statement. In that case, the dst and src are the same,
1915 so no action is necessary. */
1916 if (rtx_equal_p (dst, src))
1917 return;
1919 /* It is unclear if we can ever reach here, but we may as well handle
1920 it. Allocate a temporary, and split this into a store/load to/from
1921 the temporary. */
1923 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1924 emit_group_store (temp, src, type, ssize);
1925 emit_group_load (dst, temp, type, ssize);
1926 return;
1928 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1930 enum machine_mode outer = GET_MODE (dst);
1931 enum machine_mode inner;
1932 HOST_WIDE_INT bytepos;
1933 bool done = false;
1934 rtx temp;
1936 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1937 dst = gen_reg_rtx (outer);
1939 /* Make life a bit easier for combine. */
1940 /* If the first element of the vector is the low part
1941 of the destination mode, use a paradoxical subreg to
1942 initialize the destination. */
1943 if (start < finish)
1945 inner = GET_MODE (tmps[start]);
1946 bytepos = subreg_lowpart_offset (inner, outer);
1947 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1949 temp = simplify_gen_subreg (outer, tmps[start],
1950 inner, 0);
1951 if (temp)
1953 emit_move_insn (dst, temp);
1954 done = true;
1955 start++;
1960 /* If the first element wasn't the low part, try the last. */
1961 if (!done
1962 && start < finish - 1)
1964 inner = GET_MODE (tmps[finish - 1]);
1965 bytepos = subreg_lowpart_offset (inner, outer);
1966 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1968 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1969 inner, 0);
1970 if (temp)
1972 emit_move_insn (dst, temp);
1973 done = true;
1974 finish--;
1979 /* Otherwise, simply initialize the result to zero. */
1980 if (!done)
1981 emit_move_insn (dst, CONST0_RTX (outer));
1984 /* Process the pieces. */
1985 for (i = start; i < finish; i++)
1987 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1988 enum machine_mode mode = GET_MODE (tmps[i]);
1989 unsigned int bytelen = GET_MODE_SIZE (mode);
1990 unsigned int adj_bytelen = bytelen;
1991 rtx dest = dst;
1993 /* Handle trailing fragments that run over the size of the struct. */
1994 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1995 adj_bytelen = ssize - bytepos;
1997 if (GET_CODE (dst) == CONCAT)
1999 if (bytepos + adj_bytelen
2000 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2001 dest = XEXP (dst, 0);
2002 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2004 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2005 dest = XEXP (dst, 1);
2007 else
2009 enum machine_mode dest_mode = GET_MODE (dest);
2010 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2012 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2014 if (GET_MODE_ALIGNMENT (dest_mode)
2015 >= GET_MODE_ALIGNMENT (tmp_mode))
2017 dest = assign_stack_temp (dest_mode,
2018 GET_MODE_SIZE (dest_mode),
2020 emit_move_insn (adjust_address (dest,
2021 tmp_mode,
2022 bytepos),
2023 tmps[i]);
2024 dst = dest;
2026 else
2028 dest = assign_stack_temp (tmp_mode,
2029 GET_MODE_SIZE (tmp_mode),
2031 emit_move_insn (dest, tmps[i]);
2032 dst = adjust_address (dest, dest_mode, bytepos);
2034 break;
2038 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2040 /* store_bit_field always takes its value from the lsb.
2041 Move the fragment to the lsb if it's not already there. */
2042 if (
2043 #ifdef BLOCK_REG_PADDING
2044 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2045 == (BYTES_BIG_ENDIAN ? upward : downward)
2046 #else
2047 BYTES_BIG_ENDIAN
2048 #endif
2051 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2052 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2053 build_int_cst (NULL_TREE, shift),
2054 tmps[i], 0);
2056 bytelen = adj_bytelen;
2059 /* Optimize the access just a bit. */
2060 if (MEM_P (dest)
2061 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2062 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2063 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2064 && bytelen == GET_MODE_SIZE (mode))
2065 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2066 else
2067 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2068 mode, tmps[i]);
2071 /* Copy from the pseudo into the (probable) hard reg. */
2072 if (orig_dst != dst)
2073 emit_move_insn (orig_dst, dst);
2076 /* Generate code to copy a BLKmode object of TYPE out of a
2077 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2078 is null, a stack temporary is created. TGTBLK is returned.
2080 The purpose of this routine is to handle functions that return
2081 BLKmode structures in registers. Some machines (the PA for example)
2082 want to return all small structures in registers regardless of the
2083 structure's alignment. */
2086 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2088 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2089 rtx src = NULL, dst = NULL;
2090 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2091 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2092 enum machine_mode copy_mode;
2094 if (tgtblk == 0)
2096 tgtblk = assign_temp (build_qualified_type (type,
2097 (TYPE_QUALS (type)
2098 | TYPE_QUAL_CONST)),
2099 0, 1, 1);
2100 preserve_temp_slots (tgtblk);
2103 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2104 into a new pseudo which is a full word. */
2106 if (GET_MODE (srcreg) != BLKmode
2107 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2108 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2110 /* If the structure doesn't take up a whole number of words, see whether
2111 SRCREG is padded on the left or on the right. If it's on the left,
2112 set PADDING_CORRECTION to the number of bits to skip.
2114 In most ABIs, the structure will be returned at the least end of
2115 the register, which translates to right padding on little-endian
2116 targets and left padding on big-endian targets. The opposite
2117 holds if the structure is returned at the most significant
2118 end of the register. */
2119 if (bytes % UNITS_PER_WORD != 0
2120 && (targetm.calls.return_in_msb (type)
2121 ? !BYTES_BIG_ENDIAN
2122 : BYTES_BIG_ENDIAN))
2123 padding_correction
2124 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2126 /* Copy the structure BITSIZE bits at a time. If the target lives in
2127 memory, take care of not reading/writing past its end by selecting
2128 a copy mode suited to BITSIZE. This should always be possible given
2129 how it is computed.
2131 We could probably emit more efficient code for machines which do not use
2132 strict alignment, but it doesn't seem worth the effort at the current
2133 time. */
2135 copy_mode = word_mode;
2136 if (MEM_P (tgtblk))
2138 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2139 if (mem_mode != BLKmode)
2140 copy_mode = mem_mode;
2143 for (bitpos = 0, xbitpos = padding_correction;
2144 bitpos < bytes * BITS_PER_UNIT;
2145 bitpos += bitsize, xbitpos += bitsize)
2147 /* We need a new source operand each time xbitpos is on a
2148 word boundary and when xbitpos == padding_correction
2149 (the first time through). */
2150 if (xbitpos % BITS_PER_WORD == 0
2151 || xbitpos == padding_correction)
2152 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2153 GET_MODE (srcreg));
2155 /* We need a new destination operand each time bitpos is on
2156 a word boundary. */
2157 if (bitpos % BITS_PER_WORD == 0)
2158 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2160 /* Use xbitpos for the source extraction (right justified) and
2161 bitpos for the destination store (left justified). */
2162 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, copy_mode,
2163 extract_bit_field (src, bitsize,
2164 xbitpos % BITS_PER_WORD, 1, false,
2165 NULL_RTX, copy_mode, copy_mode));
2168 return tgtblk;
2171 /* Add a USE expression for REG to the (possibly empty) list pointed
2172 to by CALL_FUSAGE. REG must denote a hard register. */
2174 void
2175 use_reg (rtx *call_fusage, rtx reg)
2177 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2179 *call_fusage
2180 = gen_rtx_EXPR_LIST (VOIDmode,
2181 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2184 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2185 starting at REGNO. All of these registers must be hard registers. */
2187 void
2188 use_regs (rtx *call_fusage, int regno, int nregs)
2190 int i;
2192 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2194 for (i = 0; i < nregs; i++)
2195 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2198 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2199 PARALLEL REGS. This is for calls that pass values in multiple
2200 non-contiguous locations. The Irix 6 ABI has examples of this. */
2202 void
2203 use_group_regs (rtx *call_fusage, rtx regs)
2205 int i;
2207 for (i = 0; i < XVECLEN (regs, 0); i++)
2209 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2211 /* A NULL entry means the parameter goes both on the stack and in
2212 registers. This can also be a MEM for targets that pass values
2213 partially on the stack and partially in registers. */
2214 if (reg != 0 && REG_P (reg))
2215 use_reg (call_fusage, reg);
2219 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2220 assigment and the code of the expresion on the RHS is CODE. Return
2221 NULL otherwise. */
2223 static gimple
2224 get_def_for_expr (tree name, enum tree_code code)
2226 gimple def_stmt;
2228 if (TREE_CODE (name) != SSA_NAME)
2229 return NULL;
2231 def_stmt = get_gimple_for_ssa_name (name);
2232 if (!def_stmt
2233 || gimple_assign_rhs_code (def_stmt) != code)
2234 return NULL;
2236 return def_stmt;
2240 /* Determine whether the LEN bytes generated by CONSTFUN can be
2241 stored to memory using several move instructions. CONSTFUNDATA is
2242 a pointer which will be passed as argument in every CONSTFUN call.
2243 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2244 a memset operation and false if it's a copy of a constant string.
2245 Return nonzero if a call to store_by_pieces should succeed. */
2248 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2249 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2250 void *constfundata, unsigned int align, bool memsetp)
2252 unsigned HOST_WIDE_INT l;
2253 unsigned int max_size;
2254 HOST_WIDE_INT offset = 0;
2255 enum machine_mode mode;
2256 enum insn_code icode;
2257 int reverse;
2258 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2259 rtx cst ATTRIBUTE_UNUSED;
2261 if (len == 0)
2262 return 1;
2264 if (! (memsetp
2265 ? SET_BY_PIECES_P (len, align)
2266 : STORE_BY_PIECES_P (len, align)))
2267 return 0;
2269 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2271 /* We would first store what we can in the largest integer mode, then go to
2272 successively smaller modes. */
2274 for (reverse = 0;
2275 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2276 reverse++)
2278 l = len;
2279 max_size = STORE_MAX_PIECES + 1;
2280 while (max_size > 1)
2282 mode = widest_int_mode_for_size (max_size);
2284 if (mode == VOIDmode)
2285 break;
2287 icode = optab_handler (mov_optab, mode);
2288 if (icode != CODE_FOR_nothing
2289 && align >= GET_MODE_ALIGNMENT (mode))
2291 unsigned int size = GET_MODE_SIZE (mode);
2293 while (l >= size)
2295 if (reverse)
2296 offset -= size;
2298 cst = (*constfun) (constfundata, offset, mode);
2299 if (!LEGITIMATE_CONSTANT_P (cst))
2300 return 0;
2302 if (!reverse)
2303 offset += size;
2305 l -= size;
2309 max_size = GET_MODE_SIZE (mode);
2312 /* The code above should have handled everything. */
2313 gcc_assert (!l);
2316 return 1;
2319 /* Generate several move instructions to store LEN bytes generated by
2320 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2321 pointer which will be passed as argument in every CONSTFUN call.
2322 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2323 a memset operation and false if it's a copy of a constant string.
2324 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2325 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2326 stpcpy. */
2329 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2330 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2331 void *constfundata, unsigned int align, bool memsetp, int endp)
2333 enum machine_mode to_addr_mode
2334 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
2335 struct store_by_pieces_d data;
2337 if (len == 0)
2339 gcc_assert (endp != 2);
2340 return to;
2343 gcc_assert (memsetp
2344 ? SET_BY_PIECES_P (len, align)
2345 : STORE_BY_PIECES_P (len, align));
2346 data.constfun = constfun;
2347 data.constfundata = constfundata;
2348 data.len = len;
2349 data.to = to;
2350 store_by_pieces_1 (&data, align);
2351 if (endp)
2353 rtx to1;
2355 gcc_assert (!data.reverse);
2356 if (data.autinc_to)
2358 if (endp == 2)
2360 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2361 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2362 else
2363 data.to_addr = copy_to_mode_reg (to_addr_mode,
2364 plus_constant (data.to_addr,
2365 -1));
2367 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2368 data.offset);
2370 else
2372 if (endp == 2)
2373 --data.offset;
2374 to1 = adjust_address (data.to, QImode, data.offset);
2376 return to1;
2378 else
2379 return data.to;
2382 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2383 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2385 static void
2386 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2388 struct store_by_pieces_d data;
2390 if (len == 0)
2391 return;
2393 data.constfun = clear_by_pieces_1;
2394 data.constfundata = NULL;
2395 data.len = len;
2396 data.to = to;
2397 store_by_pieces_1 (&data, align);
2400 /* Callback routine for clear_by_pieces.
2401 Return const0_rtx unconditionally. */
2403 static rtx
2404 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2405 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2406 enum machine_mode mode ATTRIBUTE_UNUSED)
2408 return const0_rtx;
2411 /* Subroutine of clear_by_pieces and store_by_pieces.
2412 Generate several move instructions to store LEN bytes of block TO. (A MEM
2413 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2415 static void
2416 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2417 unsigned int align ATTRIBUTE_UNUSED)
2419 enum machine_mode to_addr_mode
2420 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (data->to));
2421 rtx to_addr = XEXP (data->to, 0);
2422 unsigned int max_size = STORE_MAX_PIECES + 1;
2423 enum insn_code icode;
2425 data->offset = 0;
2426 data->to_addr = to_addr;
2427 data->autinc_to
2428 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2429 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2431 data->explicit_inc_to = 0;
2432 data->reverse
2433 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2434 if (data->reverse)
2435 data->offset = data->len;
2437 /* If storing requires more than two move insns,
2438 copy addresses to registers (to make displacements shorter)
2439 and use post-increment if available. */
2440 if (!data->autinc_to
2441 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2443 /* Determine the main mode we'll be using.
2444 MODE might not be used depending on the definitions of the
2445 USE_* macros below. */
2446 enum machine_mode mode ATTRIBUTE_UNUSED
2447 = widest_int_mode_for_size (max_size);
2449 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2451 data->to_addr = copy_to_mode_reg (to_addr_mode,
2452 plus_constant (to_addr, data->len));
2453 data->autinc_to = 1;
2454 data->explicit_inc_to = -1;
2457 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2458 && ! data->autinc_to)
2460 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2461 data->autinc_to = 1;
2462 data->explicit_inc_to = 1;
2465 if ( !data->autinc_to && CONSTANT_P (to_addr))
2466 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2469 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2471 /* First store what we can in the largest integer mode, then go to
2472 successively smaller modes. */
2474 while (max_size > 1)
2476 enum machine_mode mode = widest_int_mode_for_size (max_size);
2478 if (mode == VOIDmode)
2479 break;
2481 icode = optab_handler (mov_optab, mode);
2482 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2483 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2485 max_size = GET_MODE_SIZE (mode);
2488 /* The code above should have handled everything. */
2489 gcc_assert (!data->len);
2492 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2493 with move instructions for mode MODE. GENFUN is the gen_... function
2494 to make a move insn for that mode. DATA has all the other info. */
2496 static void
2497 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2498 struct store_by_pieces_d *data)
2500 unsigned int size = GET_MODE_SIZE (mode);
2501 rtx to1, cst;
2503 while (data->len >= size)
2505 if (data->reverse)
2506 data->offset -= size;
2508 if (data->autinc_to)
2509 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2510 data->offset);
2511 else
2512 to1 = adjust_address (data->to, mode, data->offset);
2514 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2515 emit_insn (gen_add2_insn (data->to_addr,
2516 GEN_INT (-(HOST_WIDE_INT) size)));
2518 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2519 emit_insn ((*genfun) (to1, cst));
2521 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2522 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2524 if (! data->reverse)
2525 data->offset += size;
2527 data->len -= size;
2531 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2532 its length in bytes. */
2535 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2536 unsigned int expected_align, HOST_WIDE_INT expected_size)
2538 enum machine_mode mode = GET_MODE (object);
2539 unsigned int align;
2541 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2543 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2544 just move a zero. Otherwise, do this a piece at a time. */
2545 if (mode != BLKmode
2546 && CONST_INT_P (size)
2547 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2549 rtx zero = CONST0_RTX (mode);
2550 if (zero != NULL)
2552 emit_move_insn (object, zero);
2553 return NULL;
2556 if (COMPLEX_MODE_P (mode))
2558 zero = CONST0_RTX (GET_MODE_INNER (mode));
2559 if (zero != NULL)
2561 write_complex_part (object, zero, 0);
2562 write_complex_part (object, zero, 1);
2563 return NULL;
2568 if (size == const0_rtx)
2569 return NULL;
2571 align = MEM_ALIGN (object);
2573 if (CONST_INT_P (size)
2574 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2575 clear_by_pieces (object, INTVAL (size), align);
2576 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2577 expected_align, expected_size))
2579 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2580 return set_storage_via_libcall (object, size, const0_rtx,
2581 method == BLOCK_OP_TAILCALL);
2582 else
2583 gcc_unreachable ();
2585 return NULL;
2589 clear_storage (rtx object, rtx size, enum block_op_methods method)
2591 return clear_storage_hints (object, size, method, 0, -1);
2595 /* A subroutine of clear_storage. Expand a call to memset.
2596 Return the return value of memset, 0 otherwise. */
2599 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2601 tree call_expr, fn, object_tree, size_tree, val_tree;
2602 enum machine_mode size_mode;
2603 rtx retval;
2605 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2606 place those into new pseudos into a VAR_DECL and use them later. */
2608 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2610 size_mode = TYPE_MODE (sizetype);
2611 size = convert_to_mode (size_mode, size, 1);
2612 size = copy_to_mode_reg (size_mode, size);
2614 /* It is incorrect to use the libcall calling conventions to call
2615 memset in this context. This could be a user call to memset and
2616 the user may wish to examine the return value from memset. For
2617 targets where libcalls and normal calls have different conventions
2618 for returning pointers, we could end up generating incorrect code. */
2620 object_tree = make_tree (ptr_type_node, object);
2621 if (!CONST_INT_P (val))
2622 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2623 size_tree = make_tree (sizetype, size);
2624 val_tree = make_tree (integer_type_node, val);
2626 fn = clear_storage_libcall_fn (true);
2627 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2628 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2630 retval = expand_normal (call_expr);
2632 return retval;
2635 /* A subroutine of set_storage_via_libcall. Create the tree node
2636 for the function we use for block clears. The first time FOR_CALL
2637 is true, we call assemble_external. */
2639 tree block_clear_fn;
2641 void
2642 init_block_clear_fn (const char *asmspec)
2644 if (!block_clear_fn)
2646 tree fn, args;
2648 fn = get_identifier ("memset");
2649 args = build_function_type_list (ptr_type_node, ptr_type_node,
2650 integer_type_node, sizetype,
2651 NULL_TREE);
2653 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2654 DECL_EXTERNAL (fn) = 1;
2655 TREE_PUBLIC (fn) = 1;
2656 DECL_ARTIFICIAL (fn) = 1;
2657 TREE_NOTHROW (fn) = 1;
2658 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2659 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2661 block_clear_fn = fn;
2664 if (asmspec)
2665 set_user_assembler_name (block_clear_fn, asmspec);
2668 static tree
2669 clear_storage_libcall_fn (int for_call)
2671 static bool emitted_extern;
2673 if (!block_clear_fn)
2674 init_block_clear_fn (NULL);
2676 if (for_call && !emitted_extern)
2678 emitted_extern = true;
2679 make_decl_rtl (block_clear_fn);
2680 assemble_external (block_clear_fn);
2683 return block_clear_fn;
2686 /* Expand a setmem pattern; return true if successful. */
2688 bool
2689 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2690 unsigned int expected_align, HOST_WIDE_INT expected_size)
2692 /* Try the most limited insn first, because there's no point
2693 including more than one in the machine description unless
2694 the more limited one has some advantage. */
2696 enum machine_mode mode;
2698 if (expected_align < align)
2699 expected_align = align;
2701 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2702 mode = GET_MODE_WIDER_MODE (mode))
2704 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2706 if (code != CODE_FOR_nothing
2707 /* We don't need MODE to be narrower than
2708 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2709 the mode mask, as it is returned by the macro, it will
2710 definitely be less than the actual mode mask. */
2711 && ((CONST_INT_P (size)
2712 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2713 <= (GET_MODE_MASK (mode) >> 1)))
2714 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
2716 struct expand_operand ops[6];
2717 unsigned int nops;
2719 nops = insn_data[(int) code].n_generator_args;
2720 gcc_assert (nops == 4 || nops == 6);
2722 create_fixed_operand (&ops[0], object);
2723 /* The check above guarantees that this size conversion is valid. */
2724 create_convert_operand_to (&ops[1], size, mode, true);
2725 create_convert_operand_from (&ops[2], val, byte_mode, true);
2726 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2727 if (nops == 6)
2729 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2730 create_integer_operand (&ops[5], expected_size);
2732 if (maybe_expand_insn (code, nops, ops))
2733 return true;
2737 return false;
2741 /* Write to one of the components of the complex value CPLX. Write VAL to
2742 the real part if IMAG_P is false, and the imaginary part if its true. */
2744 static void
2745 write_complex_part (rtx cplx, rtx val, bool imag_p)
2747 enum machine_mode cmode;
2748 enum machine_mode imode;
2749 unsigned ibitsize;
2751 if (GET_CODE (cplx) == CONCAT)
2753 emit_move_insn (XEXP (cplx, imag_p), val);
2754 return;
2757 cmode = GET_MODE (cplx);
2758 imode = GET_MODE_INNER (cmode);
2759 ibitsize = GET_MODE_BITSIZE (imode);
2761 /* For MEMs simplify_gen_subreg may generate an invalid new address
2762 because, e.g., the original address is considered mode-dependent
2763 by the target, which restricts simplify_subreg from invoking
2764 adjust_address_nv. Instead of preparing fallback support for an
2765 invalid address, we call adjust_address_nv directly. */
2766 if (MEM_P (cplx))
2768 emit_move_insn (adjust_address_nv (cplx, imode,
2769 imag_p ? GET_MODE_SIZE (imode) : 0),
2770 val);
2771 return;
2774 /* If the sub-object is at least word sized, then we know that subregging
2775 will work. This special case is important, since store_bit_field
2776 wants to operate on integer modes, and there's rarely an OImode to
2777 correspond to TCmode. */
2778 if (ibitsize >= BITS_PER_WORD
2779 /* For hard regs we have exact predicates. Assume we can split
2780 the original object if it spans an even number of hard regs.
2781 This special case is important for SCmode on 64-bit platforms
2782 where the natural size of floating-point regs is 32-bit. */
2783 || (REG_P (cplx)
2784 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2785 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2787 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2788 imag_p ? GET_MODE_SIZE (imode) : 0);
2789 if (part)
2791 emit_move_insn (part, val);
2792 return;
2794 else
2795 /* simplify_gen_subreg may fail for sub-word MEMs. */
2796 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2799 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2802 /* Extract one of the components of the complex value CPLX. Extract the
2803 real part if IMAG_P is false, and the imaginary part if it's true. */
2805 static rtx
2806 read_complex_part (rtx cplx, bool imag_p)
2808 enum machine_mode cmode, imode;
2809 unsigned ibitsize;
2811 if (GET_CODE (cplx) == CONCAT)
2812 return XEXP (cplx, imag_p);
2814 cmode = GET_MODE (cplx);
2815 imode = GET_MODE_INNER (cmode);
2816 ibitsize = GET_MODE_BITSIZE (imode);
2818 /* Special case reads from complex constants that got spilled to memory. */
2819 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2821 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2822 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2824 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2825 if (CONSTANT_CLASS_P (part))
2826 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2830 /* For MEMs simplify_gen_subreg may generate an invalid new address
2831 because, e.g., the original address is considered mode-dependent
2832 by the target, which restricts simplify_subreg from invoking
2833 adjust_address_nv. Instead of preparing fallback support for an
2834 invalid address, we call adjust_address_nv directly. */
2835 if (MEM_P (cplx))
2836 return adjust_address_nv (cplx, imode,
2837 imag_p ? GET_MODE_SIZE (imode) : 0);
2839 /* If the sub-object is at least word sized, then we know that subregging
2840 will work. This special case is important, since extract_bit_field
2841 wants to operate on integer modes, and there's rarely an OImode to
2842 correspond to TCmode. */
2843 if (ibitsize >= BITS_PER_WORD
2844 /* For hard regs we have exact predicates. Assume we can split
2845 the original object if it spans an even number of hard regs.
2846 This special case is important for SCmode on 64-bit platforms
2847 where the natural size of floating-point regs is 32-bit. */
2848 || (REG_P (cplx)
2849 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2850 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2852 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2853 imag_p ? GET_MODE_SIZE (imode) : 0);
2854 if (ret)
2855 return ret;
2856 else
2857 /* simplify_gen_subreg may fail for sub-word MEMs. */
2858 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2861 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2862 true, false, NULL_RTX, imode, imode);
2865 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2866 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2867 represented in NEW_MODE. If FORCE is true, this will never happen, as
2868 we'll force-create a SUBREG if needed. */
2870 static rtx
2871 emit_move_change_mode (enum machine_mode new_mode,
2872 enum machine_mode old_mode, rtx x, bool force)
2874 rtx ret;
2876 if (push_operand (x, GET_MODE (x)))
2878 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2879 MEM_COPY_ATTRIBUTES (ret, x);
2881 else if (MEM_P (x))
2883 /* We don't have to worry about changing the address since the
2884 size in bytes is supposed to be the same. */
2885 if (reload_in_progress)
2887 /* Copy the MEM to change the mode and move any
2888 substitutions from the old MEM to the new one. */
2889 ret = adjust_address_nv (x, new_mode, 0);
2890 copy_replacements (x, ret);
2892 else
2893 ret = adjust_address (x, new_mode, 0);
2895 else
2897 /* Note that we do want simplify_subreg's behavior of validating
2898 that the new mode is ok for a hard register. If we were to use
2899 simplify_gen_subreg, we would create the subreg, but would
2900 probably run into the target not being able to implement it. */
2901 /* Except, of course, when FORCE is true, when this is exactly what
2902 we want. Which is needed for CCmodes on some targets. */
2903 if (force)
2904 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2905 else
2906 ret = simplify_subreg (new_mode, x, old_mode, 0);
2909 return ret;
2912 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2913 an integer mode of the same size as MODE. Returns the instruction
2914 emitted, or NULL if such a move could not be generated. */
2916 static rtx
2917 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2919 enum machine_mode imode;
2920 enum insn_code code;
2922 /* There must exist a mode of the exact size we require. */
2923 imode = int_mode_for_mode (mode);
2924 if (imode == BLKmode)
2925 return NULL_RTX;
2927 /* The target must support moves in this mode. */
2928 code = optab_handler (mov_optab, imode);
2929 if (code == CODE_FOR_nothing)
2930 return NULL_RTX;
2932 x = emit_move_change_mode (imode, mode, x, force);
2933 if (x == NULL_RTX)
2934 return NULL_RTX;
2935 y = emit_move_change_mode (imode, mode, y, force);
2936 if (y == NULL_RTX)
2937 return NULL_RTX;
2938 return emit_insn (GEN_FCN (code) (x, y));
2941 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2942 Return an equivalent MEM that does not use an auto-increment. */
2944 static rtx
2945 emit_move_resolve_push (enum machine_mode mode, rtx x)
2947 enum rtx_code code = GET_CODE (XEXP (x, 0));
2948 HOST_WIDE_INT adjust;
2949 rtx temp;
2951 adjust = GET_MODE_SIZE (mode);
2952 #ifdef PUSH_ROUNDING
2953 adjust = PUSH_ROUNDING (adjust);
2954 #endif
2955 if (code == PRE_DEC || code == POST_DEC)
2956 adjust = -adjust;
2957 else if (code == PRE_MODIFY || code == POST_MODIFY)
2959 rtx expr = XEXP (XEXP (x, 0), 1);
2960 HOST_WIDE_INT val;
2962 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2963 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
2964 val = INTVAL (XEXP (expr, 1));
2965 if (GET_CODE (expr) == MINUS)
2966 val = -val;
2967 gcc_assert (adjust == val || adjust == -val);
2968 adjust = val;
2971 /* Do not use anti_adjust_stack, since we don't want to update
2972 stack_pointer_delta. */
2973 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2974 GEN_INT (adjust), stack_pointer_rtx,
2975 0, OPTAB_LIB_WIDEN);
2976 if (temp != stack_pointer_rtx)
2977 emit_move_insn (stack_pointer_rtx, temp);
2979 switch (code)
2981 case PRE_INC:
2982 case PRE_DEC:
2983 case PRE_MODIFY:
2984 temp = stack_pointer_rtx;
2985 break;
2986 case POST_INC:
2987 case POST_DEC:
2988 case POST_MODIFY:
2989 temp = plus_constant (stack_pointer_rtx, -adjust);
2990 break;
2991 default:
2992 gcc_unreachable ();
2995 return replace_equiv_address (x, temp);
2998 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2999 X is known to satisfy push_operand, and MODE is known to be complex.
3000 Returns the last instruction emitted. */
3003 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3005 enum machine_mode submode = GET_MODE_INNER (mode);
3006 bool imag_first;
3008 #ifdef PUSH_ROUNDING
3009 unsigned int submodesize = GET_MODE_SIZE (submode);
3011 /* In case we output to the stack, but the size is smaller than the
3012 machine can push exactly, we need to use move instructions. */
3013 if (PUSH_ROUNDING (submodesize) != submodesize)
3015 x = emit_move_resolve_push (mode, x);
3016 return emit_move_insn (x, y);
3018 #endif
3020 /* Note that the real part always precedes the imag part in memory
3021 regardless of machine's endianness. */
3022 switch (GET_CODE (XEXP (x, 0)))
3024 case PRE_DEC:
3025 case POST_DEC:
3026 imag_first = true;
3027 break;
3028 case PRE_INC:
3029 case POST_INC:
3030 imag_first = false;
3031 break;
3032 default:
3033 gcc_unreachable ();
3036 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3037 read_complex_part (y, imag_first));
3038 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3039 read_complex_part (y, !imag_first));
3042 /* A subroutine of emit_move_complex. Perform the move from Y to X
3043 via two moves of the parts. Returns the last instruction emitted. */
3046 emit_move_complex_parts (rtx x, rtx y)
3048 /* Show the output dies here. This is necessary for SUBREGs
3049 of pseudos since we cannot track their lifetimes correctly;
3050 hard regs shouldn't appear here except as return values. */
3051 if (!reload_completed && !reload_in_progress
3052 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3053 emit_clobber (x);
3055 write_complex_part (x, read_complex_part (y, false), false);
3056 write_complex_part (x, read_complex_part (y, true), true);
3058 return get_last_insn ();
3061 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3062 MODE is known to be complex. Returns the last instruction emitted. */
3064 static rtx
3065 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3067 bool try_int;
3069 /* Need to take special care for pushes, to maintain proper ordering
3070 of the data, and possibly extra padding. */
3071 if (push_operand (x, mode))
3072 return emit_move_complex_push (mode, x, y);
3074 /* See if we can coerce the target into moving both values at once. */
3076 /* Move floating point as parts. */
3077 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3078 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing)
3079 try_int = false;
3080 /* Not possible if the values are inherently not adjacent. */
3081 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3082 try_int = false;
3083 /* Is possible if both are registers (or subregs of registers). */
3084 else if (register_operand (x, mode) && register_operand (y, mode))
3085 try_int = true;
3086 /* If one of the operands is a memory, and alignment constraints
3087 are friendly enough, we may be able to do combined memory operations.
3088 We do not attempt this if Y is a constant because that combination is
3089 usually better with the by-parts thing below. */
3090 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3091 && (!STRICT_ALIGNMENT
3092 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3093 try_int = true;
3094 else
3095 try_int = false;
3097 if (try_int)
3099 rtx ret;
3101 /* For memory to memory moves, optimal behavior can be had with the
3102 existing block move logic. */
3103 if (MEM_P (x) && MEM_P (y))
3105 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3106 BLOCK_OP_NO_LIBCALL);
3107 return get_last_insn ();
3110 ret = emit_move_via_integer (mode, x, y, true);
3111 if (ret)
3112 return ret;
3115 return emit_move_complex_parts (x, y);
3118 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3119 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3121 static rtx
3122 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3124 rtx ret;
3126 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3127 if (mode != CCmode)
3129 enum insn_code code = optab_handler (mov_optab, CCmode);
3130 if (code != CODE_FOR_nothing)
3132 x = emit_move_change_mode (CCmode, mode, x, true);
3133 y = emit_move_change_mode (CCmode, mode, y, true);
3134 return emit_insn (GEN_FCN (code) (x, y));
3138 /* Otherwise, find the MODE_INT mode of the same width. */
3139 ret = emit_move_via_integer (mode, x, y, false);
3140 gcc_assert (ret != NULL);
3141 return ret;
3144 /* Return true if word I of OP lies entirely in the
3145 undefined bits of a paradoxical subreg. */
3147 static bool
3148 undefined_operand_subword_p (const_rtx op, int i)
3150 enum machine_mode innermode, innermostmode;
3151 int offset;
3152 if (GET_CODE (op) != SUBREG)
3153 return false;
3154 innermode = GET_MODE (op);
3155 innermostmode = GET_MODE (SUBREG_REG (op));
3156 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3157 /* The SUBREG_BYTE represents offset, as if the value were stored in
3158 memory, except for a paradoxical subreg where we define
3159 SUBREG_BYTE to be 0; undo this exception as in
3160 simplify_subreg. */
3161 if (SUBREG_BYTE (op) == 0
3162 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3164 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3165 if (WORDS_BIG_ENDIAN)
3166 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3167 if (BYTES_BIG_ENDIAN)
3168 offset += difference % UNITS_PER_WORD;
3170 if (offset >= GET_MODE_SIZE (innermostmode)
3171 || offset <= -GET_MODE_SIZE (word_mode))
3172 return true;
3173 return false;
3176 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3177 MODE is any multi-word or full-word mode that lacks a move_insn
3178 pattern. Note that you will get better code if you define such
3179 patterns, even if they must turn into multiple assembler instructions. */
3181 static rtx
3182 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3184 rtx last_insn = 0;
3185 rtx seq, inner;
3186 bool need_clobber;
3187 int i;
3189 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3191 /* If X is a push on the stack, do the push now and replace
3192 X with a reference to the stack pointer. */
3193 if (push_operand (x, mode))
3194 x = emit_move_resolve_push (mode, x);
3196 /* If we are in reload, see if either operand is a MEM whose address
3197 is scheduled for replacement. */
3198 if (reload_in_progress && MEM_P (x)
3199 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3200 x = replace_equiv_address_nv (x, inner);
3201 if (reload_in_progress && MEM_P (y)
3202 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3203 y = replace_equiv_address_nv (y, inner);
3205 start_sequence ();
3207 need_clobber = false;
3208 for (i = 0;
3209 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3210 i++)
3212 rtx xpart = operand_subword (x, i, 1, mode);
3213 rtx ypart;
3215 /* Do not generate code for a move if it would come entirely
3216 from the undefined bits of a paradoxical subreg. */
3217 if (undefined_operand_subword_p (y, i))
3218 continue;
3220 ypart = operand_subword (y, i, 1, mode);
3222 /* If we can't get a part of Y, put Y into memory if it is a
3223 constant. Otherwise, force it into a register. Then we must
3224 be able to get a part of Y. */
3225 if (ypart == 0 && CONSTANT_P (y))
3227 y = use_anchored_address (force_const_mem (mode, y));
3228 ypart = operand_subword (y, i, 1, mode);
3230 else if (ypart == 0)
3231 ypart = operand_subword_force (y, i, mode);
3233 gcc_assert (xpart && ypart);
3235 need_clobber |= (GET_CODE (xpart) == SUBREG);
3237 last_insn = emit_move_insn (xpart, ypart);
3240 seq = get_insns ();
3241 end_sequence ();
3243 /* Show the output dies here. This is necessary for SUBREGs
3244 of pseudos since we cannot track their lifetimes correctly;
3245 hard regs shouldn't appear here except as return values.
3246 We never want to emit such a clobber after reload. */
3247 if (x != y
3248 && ! (reload_in_progress || reload_completed)
3249 && need_clobber != 0)
3250 emit_clobber (x);
3252 emit_insn (seq);
3254 return last_insn;
3257 /* Low level part of emit_move_insn.
3258 Called just like emit_move_insn, but assumes X and Y
3259 are basically valid. */
3262 emit_move_insn_1 (rtx x, rtx y)
3264 enum machine_mode mode = GET_MODE (x);
3265 enum insn_code code;
3267 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3269 code = optab_handler (mov_optab, mode);
3270 if (code != CODE_FOR_nothing)
3271 return emit_insn (GEN_FCN (code) (x, y));
3273 /* Expand complex moves by moving real part and imag part. */
3274 if (COMPLEX_MODE_P (mode))
3275 return emit_move_complex (mode, x, y);
3277 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3278 || ALL_FIXED_POINT_MODE_P (mode))
3280 rtx result = emit_move_via_integer (mode, x, y, true);
3282 /* If we can't find an integer mode, use multi words. */
3283 if (result)
3284 return result;
3285 else
3286 return emit_move_multi_word (mode, x, y);
3289 if (GET_MODE_CLASS (mode) == MODE_CC)
3290 return emit_move_ccmode (mode, x, y);
3292 /* Try using a move pattern for the corresponding integer mode. This is
3293 only safe when simplify_subreg can convert MODE constants into integer
3294 constants. At present, it can only do this reliably if the value
3295 fits within a HOST_WIDE_INT. */
3296 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3298 rtx ret = emit_move_via_integer (mode, x, y, false);
3299 if (ret)
3300 return ret;
3303 return emit_move_multi_word (mode, x, y);
3306 /* Generate code to copy Y into X.
3307 Both Y and X must have the same mode, except that
3308 Y can be a constant with VOIDmode.
3309 This mode cannot be BLKmode; use emit_block_move for that.
3311 Return the last instruction emitted. */
3314 emit_move_insn (rtx x, rtx y)
3316 enum machine_mode mode = GET_MODE (x);
3317 rtx y_cst = NULL_RTX;
3318 rtx last_insn, set;
3320 gcc_assert (mode != BLKmode
3321 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3323 if (CONSTANT_P (y))
3325 if (optimize
3326 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3327 && (last_insn = compress_float_constant (x, y)))
3328 return last_insn;
3330 y_cst = y;
3332 if (!LEGITIMATE_CONSTANT_P (y))
3334 y = force_const_mem (mode, y);
3336 /* If the target's cannot_force_const_mem prevented the spill,
3337 assume that the target's move expanders will also take care
3338 of the non-legitimate constant. */
3339 if (!y)
3340 y = y_cst;
3341 else
3342 y = use_anchored_address (y);
3346 /* If X or Y are memory references, verify that their addresses are valid
3347 for the machine. */
3348 if (MEM_P (x)
3349 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3350 MEM_ADDR_SPACE (x))
3351 && ! push_operand (x, GET_MODE (x))))
3352 x = validize_mem (x);
3354 if (MEM_P (y)
3355 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3356 MEM_ADDR_SPACE (y)))
3357 y = validize_mem (y);
3359 gcc_assert (mode != BLKmode);
3361 last_insn = emit_move_insn_1 (x, y);
3363 if (y_cst && REG_P (x)
3364 && (set = single_set (last_insn)) != NULL_RTX
3365 && SET_DEST (set) == x
3366 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3367 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3369 return last_insn;
3372 /* If Y is representable exactly in a narrower mode, and the target can
3373 perform the extension directly from constant or memory, then emit the
3374 move as an extension. */
3376 static rtx
3377 compress_float_constant (rtx x, rtx y)
3379 enum machine_mode dstmode = GET_MODE (x);
3380 enum machine_mode orig_srcmode = GET_MODE (y);
3381 enum machine_mode srcmode;
3382 REAL_VALUE_TYPE r;
3383 int oldcost, newcost;
3384 bool speed = optimize_insn_for_speed_p ();
3386 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3388 if (LEGITIMATE_CONSTANT_P (y))
3389 oldcost = rtx_cost (y, SET, speed);
3390 else
3391 oldcost = rtx_cost (force_const_mem (dstmode, y), SET, speed);
3393 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3394 srcmode != orig_srcmode;
3395 srcmode = GET_MODE_WIDER_MODE (srcmode))
3397 enum insn_code ic;
3398 rtx trunc_y, last_insn;
3400 /* Skip if the target can't extend this way. */
3401 ic = can_extend_p (dstmode, srcmode, 0);
3402 if (ic == CODE_FOR_nothing)
3403 continue;
3405 /* Skip if the narrowed value isn't exact. */
3406 if (! exact_real_truncate (srcmode, &r))
3407 continue;
3409 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3411 if (LEGITIMATE_CONSTANT_P (trunc_y))
3413 /* Skip if the target needs extra instructions to perform
3414 the extension. */
3415 if (!insn_operand_matches (ic, 1, trunc_y))
3416 continue;
3417 /* This is valid, but may not be cheaper than the original. */
3418 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3419 if (oldcost < newcost)
3420 continue;
3422 else if (float_extend_from_mem[dstmode][srcmode])
3424 trunc_y = force_const_mem (srcmode, trunc_y);
3425 /* This is valid, but may not be cheaper than the original. */
3426 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3427 if (oldcost < newcost)
3428 continue;
3429 trunc_y = validize_mem (trunc_y);
3431 else
3432 continue;
3434 /* For CSE's benefit, force the compressed constant pool entry
3435 into a new pseudo. This constant may be used in different modes,
3436 and if not, combine will put things back together for us. */
3437 trunc_y = force_reg (srcmode, trunc_y);
3438 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3439 last_insn = get_last_insn ();
3441 if (REG_P (x))
3442 set_unique_reg_note (last_insn, REG_EQUAL, y);
3444 return last_insn;
3447 return NULL_RTX;
3450 /* Pushing data onto the stack. */
3452 /* Push a block of length SIZE (perhaps variable)
3453 and return an rtx to address the beginning of the block.
3454 The value may be virtual_outgoing_args_rtx.
3456 EXTRA is the number of bytes of padding to push in addition to SIZE.
3457 BELOW nonzero means this padding comes at low addresses;
3458 otherwise, the padding comes at high addresses. */
3461 push_block (rtx size, int extra, int below)
3463 rtx temp;
3465 size = convert_modes (Pmode, ptr_mode, size, 1);
3466 if (CONSTANT_P (size))
3467 anti_adjust_stack (plus_constant (size, extra));
3468 else if (REG_P (size) && extra == 0)
3469 anti_adjust_stack (size);
3470 else
3472 temp = copy_to_mode_reg (Pmode, size);
3473 if (extra != 0)
3474 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3475 temp, 0, OPTAB_LIB_WIDEN);
3476 anti_adjust_stack (temp);
3479 #ifndef STACK_GROWS_DOWNWARD
3480 if (0)
3481 #else
3482 if (1)
3483 #endif
3485 temp = virtual_outgoing_args_rtx;
3486 if (extra != 0 && below)
3487 temp = plus_constant (temp, extra);
3489 else
3491 if (CONST_INT_P (size))
3492 temp = plus_constant (virtual_outgoing_args_rtx,
3493 -INTVAL (size) - (below ? 0 : extra));
3494 else if (extra != 0 && !below)
3495 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3496 negate_rtx (Pmode, plus_constant (size, extra)));
3497 else
3498 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3499 negate_rtx (Pmode, size));
3502 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3505 #ifdef PUSH_ROUNDING
3507 /* Emit single push insn. */
3509 static void
3510 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3512 rtx dest_addr;
3513 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3514 rtx dest;
3515 enum insn_code icode;
3517 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3518 /* If there is push pattern, use it. Otherwise try old way of throwing
3519 MEM representing push operation to move expander. */
3520 icode = optab_handler (push_optab, mode);
3521 if (icode != CODE_FOR_nothing)
3523 struct expand_operand ops[1];
3525 create_input_operand (&ops[0], x, mode);
3526 if (maybe_expand_insn (icode, 1, ops))
3527 return;
3529 if (GET_MODE_SIZE (mode) == rounded_size)
3530 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3531 /* If we are to pad downward, adjust the stack pointer first and
3532 then store X into the stack location using an offset. This is
3533 because emit_move_insn does not know how to pad; it does not have
3534 access to type. */
3535 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3537 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3538 HOST_WIDE_INT offset;
3540 emit_move_insn (stack_pointer_rtx,
3541 expand_binop (Pmode,
3542 #ifdef STACK_GROWS_DOWNWARD
3543 sub_optab,
3544 #else
3545 add_optab,
3546 #endif
3547 stack_pointer_rtx,
3548 GEN_INT (rounded_size),
3549 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3551 offset = (HOST_WIDE_INT) padding_size;
3552 #ifdef STACK_GROWS_DOWNWARD
3553 if (STACK_PUSH_CODE == POST_DEC)
3554 /* We have already decremented the stack pointer, so get the
3555 previous value. */
3556 offset += (HOST_WIDE_INT) rounded_size;
3557 #else
3558 if (STACK_PUSH_CODE == POST_INC)
3559 /* We have already incremented the stack pointer, so get the
3560 previous value. */
3561 offset -= (HOST_WIDE_INT) rounded_size;
3562 #endif
3563 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3565 else
3567 #ifdef STACK_GROWS_DOWNWARD
3568 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3569 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3570 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3571 #else
3572 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3573 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3574 GEN_INT (rounded_size));
3575 #endif
3576 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3579 dest = gen_rtx_MEM (mode, dest_addr);
3581 if (type != 0)
3583 set_mem_attributes (dest, type, 1);
3585 if (flag_optimize_sibling_calls)
3586 /* Function incoming arguments may overlap with sibling call
3587 outgoing arguments and we cannot allow reordering of reads
3588 from function arguments with stores to outgoing arguments
3589 of sibling calls. */
3590 set_mem_alias_set (dest, 0);
3592 emit_move_insn (dest, x);
3594 #endif
3596 /* Generate code to push X onto the stack, assuming it has mode MODE and
3597 type TYPE.
3598 MODE is redundant except when X is a CONST_INT (since they don't
3599 carry mode info).
3600 SIZE is an rtx for the size of data to be copied (in bytes),
3601 needed only if X is BLKmode.
3603 ALIGN (in bits) is maximum alignment we can assume.
3605 If PARTIAL and REG are both nonzero, then copy that many of the first
3606 bytes of X into registers starting with REG, and push the rest of X.
3607 The amount of space pushed is decreased by PARTIAL bytes.
3608 REG must be a hard register in this case.
3609 If REG is zero but PARTIAL is not, take any all others actions for an
3610 argument partially in registers, but do not actually load any
3611 registers.
3613 EXTRA is the amount in bytes of extra space to leave next to this arg.
3614 This is ignored if an argument block has already been allocated.
3616 On a machine that lacks real push insns, ARGS_ADDR is the address of
3617 the bottom of the argument block for this call. We use indexing off there
3618 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3619 argument block has not been preallocated.
3621 ARGS_SO_FAR is the size of args previously pushed for this call.
3623 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3624 for arguments passed in registers. If nonzero, it will be the number
3625 of bytes required. */
3627 void
3628 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3629 unsigned int align, int partial, rtx reg, int extra,
3630 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3631 rtx alignment_pad)
3633 rtx xinner;
3634 enum direction stack_direction
3635 #ifdef STACK_GROWS_DOWNWARD
3636 = downward;
3637 #else
3638 = upward;
3639 #endif
3641 /* Decide where to pad the argument: `downward' for below,
3642 `upward' for above, or `none' for don't pad it.
3643 Default is below for small data on big-endian machines; else above. */
3644 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3646 /* Invert direction if stack is post-decrement.
3647 FIXME: why? */
3648 if (STACK_PUSH_CODE == POST_DEC)
3649 if (where_pad != none)
3650 where_pad = (where_pad == downward ? upward : downward);
3652 xinner = x;
3654 if (mode == BLKmode
3655 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3657 /* Copy a block into the stack, entirely or partially. */
3659 rtx temp;
3660 int used;
3661 int offset;
3662 int skip;
3664 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3665 used = partial - offset;
3667 if (mode != BLKmode)
3669 /* A value is to be stored in an insufficiently aligned
3670 stack slot; copy via a suitably aligned slot if
3671 necessary. */
3672 size = GEN_INT (GET_MODE_SIZE (mode));
3673 if (!MEM_P (xinner))
3675 temp = assign_temp (type, 0, 1, 1);
3676 emit_move_insn (temp, xinner);
3677 xinner = temp;
3681 gcc_assert (size);
3683 /* USED is now the # of bytes we need not copy to the stack
3684 because registers will take care of them. */
3686 if (partial != 0)
3687 xinner = adjust_address (xinner, BLKmode, used);
3689 /* If the partial register-part of the arg counts in its stack size,
3690 skip the part of stack space corresponding to the registers.
3691 Otherwise, start copying to the beginning of the stack space,
3692 by setting SKIP to 0. */
3693 skip = (reg_parm_stack_space == 0) ? 0 : used;
3695 #ifdef PUSH_ROUNDING
3696 /* Do it with several push insns if that doesn't take lots of insns
3697 and if there is no difficulty with push insns that skip bytes
3698 on the stack for alignment purposes. */
3699 if (args_addr == 0
3700 && PUSH_ARGS
3701 && CONST_INT_P (size)
3702 && skip == 0
3703 && MEM_ALIGN (xinner) >= align
3704 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3705 /* Here we avoid the case of a structure whose weak alignment
3706 forces many pushes of a small amount of data,
3707 and such small pushes do rounding that causes trouble. */
3708 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3709 || align >= BIGGEST_ALIGNMENT
3710 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3711 == (align / BITS_PER_UNIT)))
3712 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3714 /* Push padding now if padding above and stack grows down,
3715 or if padding below and stack grows up.
3716 But if space already allocated, this has already been done. */
3717 if (extra && args_addr == 0
3718 && where_pad != none && where_pad != stack_direction)
3719 anti_adjust_stack (GEN_INT (extra));
3721 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3723 else
3724 #endif /* PUSH_ROUNDING */
3726 rtx target;
3728 /* Otherwise make space on the stack and copy the data
3729 to the address of that space. */
3731 /* Deduct words put into registers from the size we must copy. */
3732 if (partial != 0)
3734 if (CONST_INT_P (size))
3735 size = GEN_INT (INTVAL (size) - used);
3736 else
3737 size = expand_binop (GET_MODE (size), sub_optab, size,
3738 GEN_INT (used), NULL_RTX, 0,
3739 OPTAB_LIB_WIDEN);
3742 /* Get the address of the stack space.
3743 In this case, we do not deal with EXTRA separately.
3744 A single stack adjust will do. */
3745 if (! args_addr)
3747 temp = push_block (size, extra, where_pad == downward);
3748 extra = 0;
3750 else if (CONST_INT_P (args_so_far))
3751 temp = memory_address (BLKmode,
3752 plus_constant (args_addr,
3753 skip + INTVAL (args_so_far)));
3754 else
3755 temp = memory_address (BLKmode,
3756 plus_constant (gen_rtx_PLUS (Pmode,
3757 args_addr,
3758 args_so_far),
3759 skip));
3761 if (!ACCUMULATE_OUTGOING_ARGS)
3763 /* If the source is referenced relative to the stack pointer,
3764 copy it to another register to stabilize it. We do not need
3765 to do this if we know that we won't be changing sp. */
3767 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3768 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3769 temp = copy_to_reg (temp);
3772 target = gen_rtx_MEM (BLKmode, temp);
3774 /* We do *not* set_mem_attributes here, because incoming arguments
3775 may overlap with sibling call outgoing arguments and we cannot
3776 allow reordering of reads from function arguments with stores
3777 to outgoing arguments of sibling calls. We do, however, want
3778 to record the alignment of the stack slot. */
3779 /* ALIGN may well be better aligned than TYPE, e.g. due to
3780 PARM_BOUNDARY. Assume the caller isn't lying. */
3781 set_mem_align (target, align);
3783 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3786 else if (partial > 0)
3788 /* Scalar partly in registers. */
3790 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3791 int i;
3792 int not_stack;
3793 /* # bytes of start of argument
3794 that we must make space for but need not store. */
3795 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3796 int args_offset = INTVAL (args_so_far);
3797 int skip;
3799 /* Push padding now if padding above and stack grows down,
3800 or if padding below and stack grows up.
3801 But if space already allocated, this has already been done. */
3802 if (extra && args_addr == 0
3803 && where_pad != none && where_pad != stack_direction)
3804 anti_adjust_stack (GEN_INT (extra));
3806 /* If we make space by pushing it, we might as well push
3807 the real data. Otherwise, we can leave OFFSET nonzero
3808 and leave the space uninitialized. */
3809 if (args_addr == 0)
3810 offset = 0;
3812 /* Now NOT_STACK gets the number of words that we don't need to
3813 allocate on the stack. Convert OFFSET to words too. */
3814 not_stack = (partial - offset) / UNITS_PER_WORD;
3815 offset /= UNITS_PER_WORD;
3817 /* If the partial register-part of the arg counts in its stack size,
3818 skip the part of stack space corresponding to the registers.
3819 Otherwise, start copying to the beginning of the stack space,
3820 by setting SKIP to 0. */
3821 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3823 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3824 x = validize_mem (force_const_mem (mode, x));
3826 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3827 SUBREGs of such registers are not allowed. */
3828 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3829 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3830 x = copy_to_reg (x);
3832 /* Loop over all the words allocated on the stack for this arg. */
3833 /* We can do it by words, because any scalar bigger than a word
3834 has a size a multiple of a word. */
3835 #ifndef PUSH_ARGS_REVERSED
3836 for (i = not_stack; i < size; i++)
3837 #else
3838 for (i = size - 1; i >= not_stack; i--)
3839 #endif
3840 if (i >= not_stack + offset)
3841 emit_push_insn (operand_subword_force (x, i, mode),
3842 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3843 0, args_addr,
3844 GEN_INT (args_offset + ((i - not_stack + skip)
3845 * UNITS_PER_WORD)),
3846 reg_parm_stack_space, alignment_pad);
3848 else
3850 rtx addr;
3851 rtx dest;
3853 /* Push padding now if padding above and stack grows down,
3854 or if padding below and stack grows up.
3855 But if space already allocated, this has already been done. */
3856 if (extra && args_addr == 0
3857 && where_pad != none && where_pad != stack_direction)
3858 anti_adjust_stack (GEN_INT (extra));
3860 #ifdef PUSH_ROUNDING
3861 if (args_addr == 0 && PUSH_ARGS)
3862 emit_single_push_insn (mode, x, type);
3863 else
3864 #endif
3866 if (CONST_INT_P (args_so_far))
3867 addr
3868 = memory_address (mode,
3869 plus_constant (args_addr,
3870 INTVAL (args_so_far)));
3871 else
3872 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3873 args_so_far));
3874 dest = gen_rtx_MEM (mode, addr);
3876 /* We do *not* set_mem_attributes here, because incoming arguments
3877 may overlap with sibling call outgoing arguments and we cannot
3878 allow reordering of reads from function arguments with stores
3879 to outgoing arguments of sibling calls. We do, however, want
3880 to record the alignment of the stack slot. */
3881 /* ALIGN may well be better aligned than TYPE, e.g. due to
3882 PARM_BOUNDARY. Assume the caller isn't lying. */
3883 set_mem_align (dest, align);
3885 emit_move_insn (dest, x);
3889 /* If part should go in registers, copy that part
3890 into the appropriate registers. Do this now, at the end,
3891 since mem-to-mem copies above may do function calls. */
3892 if (partial > 0 && reg != 0)
3894 /* Handle calls that pass values in multiple non-contiguous locations.
3895 The Irix 6 ABI has examples of this. */
3896 if (GET_CODE (reg) == PARALLEL)
3897 emit_group_load (reg, x, type, -1);
3898 else
3900 gcc_assert (partial % UNITS_PER_WORD == 0);
3901 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3905 if (extra && args_addr == 0 && where_pad == stack_direction)
3906 anti_adjust_stack (GEN_INT (extra));
3908 if (alignment_pad && args_addr == 0)
3909 anti_adjust_stack (alignment_pad);
3912 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3913 operations. */
3915 static rtx
3916 get_subtarget (rtx x)
3918 return (optimize
3919 || x == 0
3920 /* Only registers can be subtargets. */
3921 || !REG_P (x)
3922 /* Don't use hard regs to avoid extending their life. */
3923 || REGNO (x) < FIRST_PSEUDO_REGISTER
3924 ? 0 : x);
3927 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3928 FIELD is a bitfield. Returns true if the optimization was successful,
3929 and there's nothing else to do. */
3931 static bool
3932 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3933 unsigned HOST_WIDE_INT bitpos,
3934 enum machine_mode mode1, rtx str_rtx,
3935 tree to, tree src)
3937 enum machine_mode str_mode = GET_MODE (str_rtx);
3938 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3939 tree op0, op1;
3940 rtx value, result;
3941 optab binop;
3942 gimple srcstmt;
3943 enum tree_code code;
3945 if (mode1 != VOIDmode
3946 || bitsize >= BITS_PER_WORD
3947 || str_bitsize > BITS_PER_WORD
3948 || TREE_SIDE_EFFECTS (to)
3949 || TREE_THIS_VOLATILE (to))
3950 return false;
3952 STRIP_NOPS (src);
3953 if (TREE_CODE (src) != SSA_NAME)
3954 return false;
3955 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3956 return false;
3958 srcstmt = get_gimple_for_ssa_name (src);
3959 if (!srcstmt
3960 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
3961 return false;
3963 code = gimple_assign_rhs_code (srcstmt);
3965 op0 = gimple_assign_rhs1 (srcstmt);
3967 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
3968 to find its initialization. Hopefully the initialization will
3969 be from a bitfield load. */
3970 if (TREE_CODE (op0) == SSA_NAME)
3972 gimple op0stmt = get_gimple_for_ssa_name (op0);
3974 /* We want to eventually have OP0 be the same as TO, which
3975 should be a bitfield. */
3976 if (!op0stmt
3977 || !is_gimple_assign (op0stmt)
3978 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
3979 return false;
3980 op0 = gimple_assign_rhs1 (op0stmt);
3983 op1 = gimple_assign_rhs2 (srcstmt);
3985 if (!operand_equal_p (to, op0, 0))
3986 return false;
3988 if (MEM_P (str_rtx))
3990 unsigned HOST_WIDE_INT offset1;
3992 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3993 str_mode = word_mode;
3994 str_mode = get_best_mode (bitsize, bitpos,
3995 MEM_ALIGN (str_rtx), str_mode, 0);
3996 if (str_mode == VOIDmode)
3997 return false;
3998 str_bitsize = GET_MODE_BITSIZE (str_mode);
4000 offset1 = bitpos;
4001 bitpos %= str_bitsize;
4002 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4003 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4005 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4006 return false;
4008 /* If the bit field covers the whole REG/MEM, store_field
4009 will likely generate better code. */
4010 if (bitsize >= str_bitsize)
4011 return false;
4013 /* We can't handle fields split across multiple entities. */
4014 if (bitpos + bitsize > str_bitsize)
4015 return false;
4017 if (BYTES_BIG_ENDIAN)
4018 bitpos = str_bitsize - bitpos - bitsize;
4020 switch (code)
4022 case PLUS_EXPR:
4023 case MINUS_EXPR:
4024 /* For now, just optimize the case of the topmost bitfield
4025 where we don't need to do any masking and also
4026 1 bit bitfields where xor can be used.
4027 We might win by one instruction for the other bitfields
4028 too if insv/extv instructions aren't used, so that
4029 can be added later. */
4030 if (bitpos + bitsize != str_bitsize
4031 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4032 break;
4034 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4035 value = convert_modes (str_mode,
4036 TYPE_MODE (TREE_TYPE (op1)), value,
4037 TYPE_UNSIGNED (TREE_TYPE (op1)));
4039 /* We may be accessing data outside the field, which means
4040 we can alias adjacent data. */
4041 if (MEM_P (str_rtx))
4043 str_rtx = shallow_copy_rtx (str_rtx);
4044 set_mem_alias_set (str_rtx, 0);
4045 set_mem_expr (str_rtx, 0);
4048 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4049 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4051 value = expand_and (str_mode, value, const1_rtx, NULL);
4052 binop = xor_optab;
4054 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4055 build_int_cst (NULL_TREE, bitpos),
4056 NULL_RTX, 1);
4057 result = expand_binop (str_mode, binop, str_rtx,
4058 value, str_rtx, 1, OPTAB_WIDEN);
4059 if (result != str_rtx)
4060 emit_move_insn (str_rtx, result);
4061 return true;
4063 case BIT_IOR_EXPR:
4064 case BIT_XOR_EXPR:
4065 if (TREE_CODE (op1) != INTEGER_CST)
4066 break;
4067 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4068 value = convert_modes (GET_MODE (str_rtx),
4069 TYPE_MODE (TREE_TYPE (op1)), value,
4070 TYPE_UNSIGNED (TREE_TYPE (op1)));
4072 /* We may be accessing data outside the field, which means
4073 we can alias adjacent data. */
4074 if (MEM_P (str_rtx))
4076 str_rtx = shallow_copy_rtx (str_rtx);
4077 set_mem_alias_set (str_rtx, 0);
4078 set_mem_expr (str_rtx, 0);
4081 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4082 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4084 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4085 - 1);
4086 value = expand_and (GET_MODE (str_rtx), value, mask,
4087 NULL_RTX);
4089 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4090 build_int_cst (NULL_TREE, bitpos),
4091 NULL_RTX, 1);
4092 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4093 value, str_rtx, 1, OPTAB_WIDEN);
4094 if (result != str_rtx)
4095 emit_move_insn (str_rtx, result);
4096 return true;
4098 default:
4099 break;
4102 return false;
4106 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4107 is true, try generating a nontemporal store. */
4109 void
4110 expand_assignment (tree to, tree from, bool nontemporal)
4112 rtx to_rtx = 0;
4113 rtx result;
4114 enum machine_mode mode;
4115 int align;
4116 enum insn_code icode;
4118 /* Don't crash if the lhs of the assignment was erroneous. */
4119 if (TREE_CODE (to) == ERROR_MARK)
4121 expand_normal (from);
4122 return;
4125 /* Optimize away no-op moves without side-effects. */
4126 if (operand_equal_p (to, from, 0))
4127 return;
4129 mode = TYPE_MODE (TREE_TYPE (to));
4130 if ((TREE_CODE (to) == MEM_REF
4131 || TREE_CODE (to) == TARGET_MEM_REF)
4132 && mode != BLKmode
4133 && ((align = MAX (TYPE_ALIGN (TREE_TYPE (to)),
4134 get_object_alignment (to, BIGGEST_ALIGNMENT)))
4135 < (signed) GET_MODE_ALIGNMENT (mode))
4136 && ((icode = optab_handler (movmisalign_optab, mode))
4137 != CODE_FOR_nothing))
4139 struct expand_operand ops[2];
4140 enum machine_mode address_mode;
4141 rtx reg, op0, mem;
4143 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4144 reg = force_not_mem (reg);
4146 if (TREE_CODE (to) == MEM_REF)
4148 addr_space_t as
4149 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (to, 1))));
4150 tree base = TREE_OPERAND (to, 0);
4151 address_mode = targetm.addr_space.address_mode (as);
4152 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4153 op0 = convert_memory_address_addr_space (address_mode, op0, as);
4154 if (!integer_zerop (TREE_OPERAND (to, 1)))
4156 rtx off
4157 = immed_double_int_const (mem_ref_offset (to), address_mode);
4158 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
4160 op0 = memory_address_addr_space (mode, op0, as);
4161 mem = gen_rtx_MEM (mode, op0);
4162 set_mem_attributes (mem, to, 0);
4163 set_mem_addr_space (mem, as);
4165 else if (TREE_CODE (to) == TARGET_MEM_REF)
4167 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (to));
4168 struct mem_address addr;
4170 get_address_description (to, &addr);
4171 op0 = addr_for_mem_ref (&addr, as, true);
4172 op0 = memory_address_addr_space (mode, op0, as);
4173 mem = gen_rtx_MEM (mode, op0);
4174 set_mem_attributes (mem, to, 0);
4175 set_mem_addr_space (mem, as);
4177 else
4178 gcc_unreachable ();
4179 if (TREE_THIS_VOLATILE (to))
4180 MEM_VOLATILE_P (mem) = 1;
4182 create_fixed_operand (&ops[0], mem);
4183 create_input_operand (&ops[1], reg, mode);
4184 /* The movmisalign<mode> pattern cannot fail, else the assignment would
4185 silently be omitted. */
4186 expand_insn (icode, 2, ops);
4187 return;
4190 /* Assignment of a structure component needs special treatment
4191 if the structure component's rtx is not simply a MEM.
4192 Assignment of an array element at a constant index, and assignment of
4193 an array element in an unaligned packed structure field, has the same
4194 problem. */
4195 if (handled_component_p (to)
4196 /* ??? We only need to handle MEM_REF here if the access is not
4197 a full access of the base object. */
4198 || (TREE_CODE (to) == MEM_REF
4199 && TREE_CODE (TREE_OPERAND (to, 0)) == ADDR_EXPR)
4200 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4202 enum machine_mode mode1;
4203 HOST_WIDE_INT bitsize, bitpos;
4204 tree offset;
4205 int unsignedp;
4206 int volatilep = 0;
4207 tree tem;
4209 push_temp_slots ();
4210 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4211 &unsignedp, &volatilep, true);
4213 /* If we are going to use store_bit_field and extract_bit_field,
4214 make sure to_rtx will be safe for multiple use. */
4216 to_rtx = expand_normal (tem);
4218 /* If the bitfield is volatile, we want to access it in the
4219 field's mode, not the computed mode.
4220 If a MEM has VOIDmode (external with incomplete type),
4221 use BLKmode for it instead. */
4222 if (MEM_P (to_rtx))
4224 if (volatilep && flag_strict_volatile_bitfields > 0)
4225 to_rtx = adjust_address (to_rtx, mode1, 0);
4226 else if (GET_MODE (to_rtx) == VOIDmode)
4227 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4230 if (offset != 0)
4232 enum machine_mode address_mode;
4233 rtx offset_rtx;
4235 if (!MEM_P (to_rtx))
4237 /* We can get constant negative offsets into arrays with broken
4238 user code. Translate this to a trap instead of ICEing. */
4239 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4240 expand_builtin_trap ();
4241 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4244 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4245 address_mode
4246 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
4247 if (GET_MODE (offset_rtx) != address_mode)
4248 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4250 /* A constant address in TO_RTX can have VOIDmode, we must not try
4251 to call force_reg for that case. Avoid that case. */
4252 if (MEM_P (to_rtx)
4253 && GET_MODE (to_rtx) == BLKmode
4254 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4255 && bitsize > 0
4256 && (bitpos % bitsize) == 0
4257 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4258 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4260 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4261 bitpos = 0;
4264 to_rtx = offset_address (to_rtx, offset_rtx,
4265 highest_pow2_factor_for_target (to,
4266 offset));
4269 /* No action is needed if the target is not a memory and the field
4270 lies completely outside that target. This can occur if the source
4271 code contains an out-of-bounds access to a small array. */
4272 if (!MEM_P (to_rtx)
4273 && GET_MODE (to_rtx) != BLKmode
4274 && (unsigned HOST_WIDE_INT) bitpos
4275 >= GET_MODE_BITSIZE (GET_MODE (to_rtx)))
4277 expand_normal (from);
4278 result = NULL;
4280 /* Handle expand_expr of a complex value returning a CONCAT. */
4281 else if (GET_CODE (to_rtx) == CONCAT)
4283 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4284 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4285 && bitpos == 0
4286 && bitsize == mode_bitsize)
4287 result = store_expr (from, to_rtx, false, nontemporal);
4288 else if (bitsize == mode_bitsize / 2
4289 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4290 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4291 nontemporal);
4292 else if (bitpos + bitsize <= mode_bitsize / 2)
4293 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4294 mode1, from, TREE_TYPE (tem),
4295 get_alias_set (to), nontemporal);
4296 else if (bitpos >= mode_bitsize / 2)
4297 result = store_field (XEXP (to_rtx, 1), bitsize,
4298 bitpos - mode_bitsize / 2, mode1, from,
4299 TREE_TYPE (tem), get_alias_set (to),
4300 nontemporal);
4301 else if (bitpos == 0 && bitsize == mode_bitsize)
4303 rtx from_rtx;
4304 result = expand_normal (from);
4305 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4306 TYPE_MODE (TREE_TYPE (from)), 0);
4307 emit_move_insn (XEXP (to_rtx, 0),
4308 read_complex_part (from_rtx, false));
4309 emit_move_insn (XEXP (to_rtx, 1),
4310 read_complex_part (from_rtx, true));
4312 else
4314 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4315 GET_MODE_SIZE (GET_MODE (to_rtx)),
4317 write_complex_part (temp, XEXP (to_rtx, 0), false);
4318 write_complex_part (temp, XEXP (to_rtx, 1), true);
4319 result = store_field (temp, bitsize, bitpos, mode1, from,
4320 TREE_TYPE (tem), get_alias_set (to),
4321 nontemporal);
4322 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4323 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4326 else
4328 if (MEM_P (to_rtx))
4330 /* If the field is at offset zero, we could have been given the
4331 DECL_RTX of the parent struct. Don't munge it. */
4332 to_rtx = shallow_copy_rtx (to_rtx);
4334 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4336 /* Deal with volatile and readonly fields. The former is only
4337 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4338 if (volatilep)
4339 MEM_VOLATILE_P (to_rtx) = 1;
4340 if (component_uses_parent_alias_set (to))
4341 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4344 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4345 to_rtx, to, from))
4346 result = NULL;
4347 else
4348 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4349 TREE_TYPE (tem), get_alias_set (to),
4350 nontemporal);
4353 if (result)
4354 preserve_temp_slots (result);
4355 free_temp_slots ();
4356 pop_temp_slots ();
4357 return;
4360 /* If the rhs is a function call and its value is not an aggregate,
4361 call the function before we start to compute the lhs.
4362 This is needed for correct code for cases such as
4363 val = setjmp (buf) on machines where reference to val
4364 requires loading up part of an address in a separate insn.
4366 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4367 since it might be a promoted variable where the zero- or sign- extension
4368 needs to be done. Handling this in the normal way is safe because no
4369 computation is done before the call. The same is true for SSA names. */
4370 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4371 && COMPLETE_TYPE_P (TREE_TYPE (from))
4372 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4373 && ! (((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4374 && REG_P (DECL_RTL (to)))
4375 || TREE_CODE (to) == SSA_NAME))
4377 rtx value;
4379 push_temp_slots ();
4380 value = expand_normal (from);
4381 if (to_rtx == 0)
4382 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4384 /* Handle calls that return values in multiple non-contiguous locations.
4385 The Irix 6 ABI has examples of this. */
4386 if (GET_CODE (to_rtx) == PARALLEL)
4387 emit_group_load (to_rtx, value, TREE_TYPE (from),
4388 int_size_in_bytes (TREE_TYPE (from)));
4389 else if (GET_MODE (to_rtx) == BLKmode)
4390 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4391 else
4393 if (POINTER_TYPE_P (TREE_TYPE (to)))
4394 value = convert_memory_address_addr_space
4395 (GET_MODE (to_rtx), value,
4396 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
4398 emit_move_insn (to_rtx, value);
4400 preserve_temp_slots (to_rtx);
4401 free_temp_slots ();
4402 pop_temp_slots ();
4403 return;
4406 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4407 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4409 if (to_rtx == 0)
4410 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4412 /* Don't move directly into a return register. */
4413 if (TREE_CODE (to) == RESULT_DECL
4414 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4416 rtx temp;
4418 push_temp_slots ();
4419 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4421 if (GET_CODE (to_rtx) == PARALLEL)
4422 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4423 int_size_in_bytes (TREE_TYPE (from)));
4424 else
4425 emit_move_insn (to_rtx, temp);
4427 preserve_temp_slots (to_rtx);
4428 free_temp_slots ();
4429 pop_temp_slots ();
4430 return;
4433 /* In case we are returning the contents of an object which overlaps
4434 the place the value is being stored, use a safe function when copying
4435 a value through a pointer into a structure value return block. */
4436 if (TREE_CODE (to) == RESULT_DECL
4437 && TREE_CODE (from) == INDIRECT_REF
4438 && ADDR_SPACE_GENERIC_P
4439 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
4440 && refs_may_alias_p (to, from)
4441 && cfun->returns_struct
4442 && !cfun->returns_pcc_struct)
4444 rtx from_rtx, size;
4446 push_temp_slots ();
4447 size = expr_size (from);
4448 from_rtx = expand_normal (from);
4450 emit_library_call (memmove_libfunc, LCT_NORMAL,
4451 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4452 XEXP (from_rtx, 0), Pmode,
4453 convert_to_mode (TYPE_MODE (sizetype),
4454 size, TYPE_UNSIGNED (sizetype)),
4455 TYPE_MODE (sizetype));
4457 preserve_temp_slots (to_rtx);
4458 free_temp_slots ();
4459 pop_temp_slots ();
4460 return;
4463 /* Compute FROM and store the value in the rtx we got. */
4465 push_temp_slots ();
4466 result = store_expr (from, to_rtx, 0, nontemporal);
4467 preserve_temp_slots (result);
4468 free_temp_slots ();
4469 pop_temp_slots ();
4470 return;
4473 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4474 succeeded, false otherwise. */
4476 bool
4477 emit_storent_insn (rtx to, rtx from)
4479 struct expand_operand ops[2];
4480 enum machine_mode mode = GET_MODE (to);
4481 enum insn_code code = optab_handler (storent_optab, mode);
4483 if (code == CODE_FOR_nothing)
4484 return false;
4486 create_fixed_operand (&ops[0], to);
4487 create_input_operand (&ops[1], from, mode);
4488 return maybe_expand_insn (code, 2, ops);
4491 /* Generate code for computing expression EXP,
4492 and storing the value into TARGET.
4494 If the mode is BLKmode then we may return TARGET itself.
4495 It turns out that in BLKmode it doesn't cause a problem.
4496 because C has no operators that could combine two different
4497 assignments into the same BLKmode object with different values
4498 with no sequence point. Will other languages need this to
4499 be more thorough?
4501 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4502 stack, and block moves may need to be treated specially.
4504 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4507 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4509 rtx temp;
4510 rtx alt_rtl = NULL_RTX;
4511 location_t loc = EXPR_LOCATION (exp);
4513 if (VOID_TYPE_P (TREE_TYPE (exp)))
4515 /* C++ can generate ?: expressions with a throw expression in one
4516 branch and an rvalue in the other. Here, we resolve attempts to
4517 store the throw expression's nonexistent result. */
4518 gcc_assert (!call_param_p);
4519 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4520 return NULL_RTX;
4522 if (TREE_CODE (exp) == COMPOUND_EXPR)
4524 /* Perform first part of compound expression, then assign from second
4525 part. */
4526 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4527 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4528 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4529 nontemporal);
4531 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4533 /* For conditional expression, get safe form of the target. Then
4534 test the condition, doing the appropriate assignment on either
4535 side. This avoids the creation of unnecessary temporaries.
4536 For non-BLKmode, it is more efficient not to do this. */
4538 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4540 do_pending_stack_adjust ();
4541 NO_DEFER_POP;
4542 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
4543 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4544 nontemporal);
4545 emit_jump_insn (gen_jump (lab2));
4546 emit_barrier ();
4547 emit_label (lab1);
4548 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
4549 nontemporal);
4550 emit_label (lab2);
4551 OK_DEFER_POP;
4553 return NULL_RTX;
4555 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4556 /* If this is a scalar in a register that is stored in a wider mode
4557 than the declared mode, compute the result into its declared mode
4558 and then convert to the wider mode. Our value is the computed
4559 expression. */
4561 rtx inner_target = 0;
4563 /* We can do the conversion inside EXP, which will often result
4564 in some optimizations. Do the conversion in two steps: first
4565 change the signedness, if needed, then the extend. But don't
4566 do this if the type of EXP is a subtype of something else
4567 since then the conversion might involve more than just
4568 converting modes. */
4569 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4570 && TREE_TYPE (TREE_TYPE (exp)) == 0
4571 && GET_MODE_PRECISION (GET_MODE (target))
4572 == TYPE_PRECISION (TREE_TYPE (exp)))
4574 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4575 != SUBREG_PROMOTED_UNSIGNED_P (target))
4577 /* Some types, e.g. Fortran's logical*4, won't have a signed
4578 version, so use the mode instead. */
4579 tree ntype
4580 = (signed_or_unsigned_type_for
4581 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4582 if (ntype == NULL)
4583 ntype = lang_hooks.types.type_for_mode
4584 (TYPE_MODE (TREE_TYPE (exp)),
4585 SUBREG_PROMOTED_UNSIGNED_P (target));
4587 exp = fold_convert_loc (loc, ntype, exp);
4590 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
4591 (GET_MODE (SUBREG_REG (target)),
4592 SUBREG_PROMOTED_UNSIGNED_P (target)),
4593 exp);
4595 inner_target = SUBREG_REG (target);
4598 temp = expand_expr (exp, inner_target, VOIDmode,
4599 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4601 /* If TEMP is a VOIDmode constant, use convert_modes to make
4602 sure that we properly convert it. */
4603 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4605 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4606 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4607 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4608 GET_MODE (target), temp,
4609 SUBREG_PROMOTED_UNSIGNED_P (target));
4612 convert_move (SUBREG_REG (target), temp,
4613 SUBREG_PROMOTED_UNSIGNED_P (target));
4615 return NULL_RTX;
4617 else if ((TREE_CODE (exp) == STRING_CST
4618 || (TREE_CODE (exp) == MEM_REF
4619 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4620 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4621 == STRING_CST
4622 && integer_zerop (TREE_OPERAND (exp, 1))))
4623 && !nontemporal && !call_param_p
4624 && MEM_P (target))
4626 /* Optimize initialization of an array with a STRING_CST. */
4627 HOST_WIDE_INT exp_len, str_copy_len;
4628 rtx dest_mem;
4629 tree str = TREE_CODE (exp) == STRING_CST
4630 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4632 exp_len = int_expr_size (exp);
4633 if (exp_len <= 0)
4634 goto normal_expr;
4636 if (TREE_STRING_LENGTH (str) <= 0)
4637 goto normal_expr;
4639 str_copy_len = strlen (TREE_STRING_POINTER (str));
4640 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
4641 goto normal_expr;
4643 str_copy_len = TREE_STRING_LENGTH (str);
4644 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
4645 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
4647 str_copy_len += STORE_MAX_PIECES - 1;
4648 str_copy_len &= ~(STORE_MAX_PIECES - 1);
4650 str_copy_len = MIN (str_copy_len, exp_len);
4651 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
4652 CONST_CAST (char *, TREE_STRING_POINTER (str)),
4653 MEM_ALIGN (target), false))
4654 goto normal_expr;
4656 dest_mem = target;
4658 dest_mem = store_by_pieces (dest_mem,
4659 str_copy_len, builtin_strncpy_read_str,
4660 CONST_CAST (char *,
4661 TREE_STRING_POINTER (str)),
4662 MEM_ALIGN (target), false,
4663 exp_len > str_copy_len ? 1 : 0);
4664 if (exp_len > str_copy_len)
4665 clear_storage (adjust_address (dest_mem, BLKmode, 0),
4666 GEN_INT (exp_len - str_copy_len),
4667 BLOCK_OP_NORMAL);
4668 return NULL_RTX;
4670 else
4672 rtx tmp_target;
4674 normal_expr:
4675 /* If we want to use a nontemporal store, force the value to
4676 register first. */
4677 tmp_target = nontemporal ? NULL_RTX : target;
4678 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
4679 (call_param_p
4680 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4681 &alt_rtl);
4684 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4685 the same as that of TARGET, adjust the constant. This is needed, for
4686 example, in case it is a CONST_DOUBLE and we want only a word-sized
4687 value. */
4688 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4689 && TREE_CODE (exp) != ERROR_MARK
4690 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4691 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4692 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4694 /* If value was not generated in the target, store it there.
4695 Convert the value to TARGET's type first if necessary and emit the
4696 pending incrementations that have been queued when expanding EXP.
4697 Note that we cannot emit the whole queue blindly because this will
4698 effectively disable the POST_INC optimization later.
4700 If TEMP and TARGET compare equal according to rtx_equal_p, but
4701 one or both of them are volatile memory refs, we have to distinguish
4702 two cases:
4703 - expand_expr has used TARGET. In this case, we must not generate
4704 another copy. This can be detected by TARGET being equal according
4705 to == .
4706 - expand_expr has not used TARGET - that means that the source just
4707 happens to have the same RTX form. Since temp will have been created
4708 by expand_expr, it will compare unequal according to == .
4709 We must generate a copy in this case, to reach the correct number
4710 of volatile memory references. */
4712 if ((! rtx_equal_p (temp, target)
4713 || (temp != target && (side_effects_p (temp)
4714 || side_effects_p (target))))
4715 && TREE_CODE (exp) != ERROR_MARK
4716 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4717 but TARGET is not valid memory reference, TEMP will differ
4718 from TARGET although it is really the same location. */
4719 && !(alt_rtl
4720 && rtx_equal_p (alt_rtl, target)
4721 && !side_effects_p (alt_rtl)
4722 && !side_effects_p (target))
4723 /* If there's nothing to copy, don't bother. Don't call
4724 expr_size unless necessary, because some front-ends (C++)
4725 expr_size-hook must not be given objects that are not
4726 supposed to be bit-copied or bit-initialized. */
4727 && expr_size (exp) != const0_rtx)
4729 if (GET_MODE (temp) != GET_MODE (target)
4730 && GET_MODE (temp) != VOIDmode)
4732 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4733 if (GET_MODE (target) == BLKmode
4734 && GET_MODE (temp) == BLKmode)
4735 emit_block_move (target, temp, expr_size (exp),
4736 (call_param_p
4737 ? BLOCK_OP_CALL_PARM
4738 : BLOCK_OP_NORMAL));
4739 else if (GET_MODE (target) == BLKmode)
4740 store_bit_field (target, INTVAL (expr_size (exp)) * BITS_PER_UNIT,
4741 0, GET_MODE (temp), temp);
4742 else
4743 convert_move (target, temp, unsignedp);
4746 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4748 /* Handle copying a string constant into an array. The string
4749 constant may be shorter than the array. So copy just the string's
4750 actual length, and clear the rest. First get the size of the data
4751 type of the string, which is actually the size of the target. */
4752 rtx size = expr_size (exp);
4754 if (CONST_INT_P (size)
4755 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4756 emit_block_move (target, temp, size,
4757 (call_param_p
4758 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4759 else
4761 enum machine_mode pointer_mode
4762 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
4763 enum machine_mode address_mode
4764 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (target));
4766 /* Compute the size of the data to copy from the string. */
4767 tree copy_size
4768 = size_binop_loc (loc, MIN_EXPR,
4769 make_tree (sizetype, size),
4770 size_int (TREE_STRING_LENGTH (exp)));
4771 rtx copy_size_rtx
4772 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4773 (call_param_p
4774 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4775 rtx label = 0;
4777 /* Copy that much. */
4778 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
4779 TYPE_UNSIGNED (sizetype));
4780 emit_block_move (target, temp, copy_size_rtx,
4781 (call_param_p
4782 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4784 /* Figure out how much is left in TARGET that we have to clear.
4785 Do all calculations in pointer_mode. */
4786 if (CONST_INT_P (copy_size_rtx))
4788 size = plus_constant (size, -INTVAL (copy_size_rtx));
4789 target = adjust_address (target, BLKmode,
4790 INTVAL (copy_size_rtx));
4792 else
4794 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4795 copy_size_rtx, NULL_RTX, 0,
4796 OPTAB_LIB_WIDEN);
4798 if (GET_MODE (copy_size_rtx) != address_mode)
4799 copy_size_rtx = convert_to_mode (address_mode,
4800 copy_size_rtx,
4801 TYPE_UNSIGNED (sizetype));
4803 target = offset_address (target, copy_size_rtx,
4804 highest_pow2_factor (copy_size));
4805 label = gen_label_rtx ();
4806 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4807 GET_MODE (size), 0, label);
4810 if (size != const0_rtx)
4811 clear_storage (target, size, BLOCK_OP_NORMAL);
4813 if (label)
4814 emit_label (label);
4817 /* Handle calls that return values in multiple non-contiguous locations.
4818 The Irix 6 ABI has examples of this. */
4819 else if (GET_CODE (target) == PARALLEL)
4820 emit_group_load (target, temp, TREE_TYPE (exp),
4821 int_size_in_bytes (TREE_TYPE (exp)));
4822 else if (GET_MODE (temp) == BLKmode)
4823 emit_block_move (target, temp, expr_size (exp),
4824 (call_param_p
4825 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4826 else if (nontemporal
4827 && emit_storent_insn (target, temp))
4828 /* If we managed to emit a nontemporal store, there is nothing else to
4829 do. */
4831 else
4833 temp = force_operand (temp, target);
4834 if (temp != target)
4835 emit_move_insn (target, temp);
4839 return NULL_RTX;
4842 /* Helper for categorize_ctor_elements. Identical interface. */
4844 static bool
4845 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4846 HOST_WIDE_INT *p_elt_count,
4847 bool *p_must_clear)
4849 unsigned HOST_WIDE_INT idx;
4850 HOST_WIDE_INT nz_elts, elt_count;
4851 tree value, purpose;
4853 /* Whether CTOR is a valid constant initializer, in accordance with what
4854 initializer_constant_valid_p does. If inferred from the constructor
4855 elements, true until proven otherwise. */
4856 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4857 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4859 nz_elts = 0;
4860 elt_count = 0;
4862 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4864 HOST_WIDE_INT mult = 1;
4866 if (TREE_CODE (purpose) == RANGE_EXPR)
4868 tree lo_index = TREE_OPERAND (purpose, 0);
4869 tree hi_index = TREE_OPERAND (purpose, 1);
4871 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4872 mult = (tree_low_cst (hi_index, 1)
4873 - tree_low_cst (lo_index, 1) + 1);
4876 switch (TREE_CODE (value))
4878 case CONSTRUCTOR:
4880 HOST_WIDE_INT nz = 0, ic = 0;
4882 bool const_elt_p
4883 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4885 nz_elts += mult * nz;
4886 elt_count += mult * ic;
4888 if (const_from_elts_p && const_p)
4889 const_p = const_elt_p;
4891 break;
4893 case INTEGER_CST:
4894 case REAL_CST:
4895 case FIXED_CST:
4896 if (!initializer_zerop (value))
4897 nz_elts += mult;
4898 elt_count += mult;
4899 break;
4901 case STRING_CST:
4902 nz_elts += mult * TREE_STRING_LENGTH (value);
4903 elt_count += mult * TREE_STRING_LENGTH (value);
4904 break;
4906 case COMPLEX_CST:
4907 if (!initializer_zerop (TREE_REALPART (value)))
4908 nz_elts += mult;
4909 if (!initializer_zerop (TREE_IMAGPART (value)))
4910 nz_elts += mult;
4911 elt_count += mult;
4912 break;
4914 case VECTOR_CST:
4916 tree v;
4917 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4919 if (!initializer_zerop (TREE_VALUE (v)))
4920 nz_elts += mult;
4921 elt_count += mult;
4924 break;
4926 default:
4928 HOST_WIDE_INT tc = count_type_elements (TREE_TYPE (value), true);
4929 if (tc < 1)
4930 tc = 1;
4931 nz_elts += mult * tc;
4932 elt_count += mult * tc;
4934 if (const_from_elts_p && const_p)
4935 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4936 != NULL_TREE;
4938 break;
4942 if (!*p_must_clear
4943 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4944 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4946 tree init_sub_type;
4947 bool clear_this = true;
4949 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4951 /* We don't expect more than one element of the union to be
4952 initialized. Not sure what we should do otherwise... */
4953 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4954 == 1);
4956 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4957 CONSTRUCTOR_ELTS (ctor),
4958 0)->value);
4960 /* ??? We could look at each element of the union, and find the
4961 largest element. Which would avoid comparing the size of the
4962 initialized element against any tail padding in the union.
4963 Doesn't seem worth the effort... */
4964 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4965 TYPE_SIZE (init_sub_type)) == 1)
4967 /* And now we have to find out if the element itself is fully
4968 constructed. E.g. for union { struct { int a, b; } s; } u
4969 = { .s = { .a = 1 } }. */
4970 if (elt_count == count_type_elements (init_sub_type, false))
4971 clear_this = false;
4975 *p_must_clear = clear_this;
4978 *p_nz_elts += nz_elts;
4979 *p_elt_count += elt_count;
4981 return const_p;
4984 /* Examine CTOR to discover:
4985 * how many scalar fields are set to nonzero values,
4986 and place it in *P_NZ_ELTS;
4987 * how many scalar fields in total are in CTOR,
4988 and place it in *P_ELT_COUNT.
4989 * if a type is a union, and the initializer from the constructor
4990 is not the largest element in the union, then set *p_must_clear.
4992 Return whether or not CTOR is a valid static constant initializer, the same
4993 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4995 bool
4996 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4997 HOST_WIDE_INT *p_elt_count,
4998 bool *p_must_clear)
5000 *p_nz_elts = 0;
5001 *p_elt_count = 0;
5002 *p_must_clear = false;
5004 return
5005 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
5008 /* Count the number of scalars in TYPE. Return -1 on overflow or
5009 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
5010 array member at the end of the structure. */
5012 HOST_WIDE_INT
5013 count_type_elements (const_tree type, bool allow_flexarr)
5015 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
5016 switch (TREE_CODE (type))
5018 case ARRAY_TYPE:
5020 tree telts = array_type_nelts (type);
5021 if (telts && host_integerp (telts, 1))
5023 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
5024 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
5025 if (n == 0)
5026 return 0;
5027 else if (max / n > m)
5028 return n * m;
5030 return -1;
5033 case RECORD_TYPE:
5035 HOST_WIDE_INT n = 0, t;
5036 tree f;
5038 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5039 if (TREE_CODE (f) == FIELD_DECL)
5041 t = count_type_elements (TREE_TYPE (f), false);
5042 if (t < 0)
5044 /* Check for structures with flexible array member. */
5045 tree tf = TREE_TYPE (f);
5046 if (allow_flexarr
5047 && DECL_CHAIN (f) == NULL
5048 && TREE_CODE (tf) == ARRAY_TYPE
5049 && TYPE_DOMAIN (tf)
5050 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5051 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5052 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5053 && int_size_in_bytes (type) >= 0)
5054 break;
5056 return -1;
5058 n += t;
5061 return n;
5064 case UNION_TYPE:
5065 case QUAL_UNION_TYPE:
5066 return -1;
5068 case COMPLEX_TYPE:
5069 return 2;
5071 case VECTOR_TYPE:
5072 return TYPE_VECTOR_SUBPARTS (type);
5074 case INTEGER_TYPE:
5075 case REAL_TYPE:
5076 case FIXED_POINT_TYPE:
5077 case ENUMERAL_TYPE:
5078 case BOOLEAN_TYPE:
5079 case POINTER_TYPE:
5080 case OFFSET_TYPE:
5081 case REFERENCE_TYPE:
5082 return 1;
5084 case ERROR_MARK:
5085 return 0;
5087 case VOID_TYPE:
5088 case METHOD_TYPE:
5089 case FUNCTION_TYPE:
5090 case LANG_TYPE:
5091 default:
5092 gcc_unreachable ();
5096 /* Return 1 if EXP contains mostly (3/4) zeros. */
5098 static int
5099 mostly_zeros_p (const_tree exp)
5101 if (TREE_CODE (exp) == CONSTRUCTOR)
5104 HOST_WIDE_INT nz_elts, count, elts;
5105 bool must_clear;
5107 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5108 if (must_clear)
5109 return 1;
5111 elts = count_type_elements (TREE_TYPE (exp), false);
5113 return nz_elts < elts / 4;
5116 return initializer_zerop (exp);
5119 /* Return 1 if EXP contains all zeros. */
5121 static int
5122 all_zeros_p (const_tree exp)
5124 if (TREE_CODE (exp) == CONSTRUCTOR)
5127 HOST_WIDE_INT nz_elts, count;
5128 bool must_clear;
5130 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5131 return nz_elts == 0;
5134 return initializer_zerop (exp);
5137 /* Helper function for store_constructor.
5138 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5139 TYPE is the type of the CONSTRUCTOR, not the element type.
5140 CLEARED is as for store_constructor.
5141 ALIAS_SET is the alias set to use for any stores.
5143 This provides a recursive shortcut back to store_constructor when it isn't
5144 necessary to go through store_field. This is so that we can pass through
5145 the cleared field to let store_constructor know that we may not have to
5146 clear a substructure if the outer structure has already been cleared. */
5148 static void
5149 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5150 HOST_WIDE_INT bitpos, enum machine_mode mode,
5151 tree exp, tree type, int cleared,
5152 alias_set_type alias_set)
5154 if (TREE_CODE (exp) == CONSTRUCTOR
5155 /* We can only call store_constructor recursively if the size and
5156 bit position are on a byte boundary. */
5157 && bitpos % BITS_PER_UNIT == 0
5158 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5159 /* If we have a nonzero bitpos for a register target, then we just
5160 let store_field do the bitfield handling. This is unlikely to
5161 generate unnecessary clear instructions anyways. */
5162 && (bitpos == 0 || MEM_P (target)))
5164 if (MEM_P (target))
5165 target
5166 = adjust_address (target,
5167 GET_MODE (target) == BLKmode
5168 || 0 != (bitpos
5169 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5170 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5173 /* Update the alias set, if required. */
5174 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5175 && MEM_ALIAS_SET (target) != 0)
5177 target = copy_rtx (target);
5178 set_mem_alias_set (target, alias_set);
5181 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5183 else
5184 store_field (target, bitsize, bitpos, mode, exp, type, alias_set, false);
5187 /* Store the value of constructor EXP into the rtx TARGET.
5188 TARGET is either a REG or a MEM; we know it cannot conflict, since
5189 safe_from_p has been called.
5190 CLEARED is true if TARGET is known to have been zero'd.
5191 SIZE is the number of bytes of TARGET we are allowed to modify: this
5192 may not be the same as the size of EXP if we are assigning to a field
5193 which has been packed to exclude padding bits. */
5195 static void
5196 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5198 tree type = TREE_TYPE (exp);
5199 #ifdef WORD_REGISTER_OPERATIONS
5200 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5201 #endif
5203 switch (TREE_CODE (type))
5205 case RECORD_TYPE:
5206 case UNION_TYPE:
5207 case QUAL_UNION_TYPE:
5209 unsigned HOST_WIDE_INT idx;
5210 tree field, value;
5212 /* If size is zero or the target is already cleared, do nothing. */
5213 if (size == 0 || cleared)
5214 cleared = 1;
5215 /* We either clear the aggregate or indicate the value is dead. */
5216 else if ((TREE_CODE (type) == UNION_TYPE
5217 || TREE_CODE (type) == QUAL_UNION_TYPE)
5218 && ! CONSTRUCTOR_ELTS (exp))
5219 /* If the constructor is empty, clear the union. */
5221 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5222 cleared = 1;
5225 /* If we are building a static constructor into a register,
5226 set the initial value as zero so we can fold the value into
5227 a constant. But if more than one register is involved,
5228 this probably loses. */
5229 else if (REG_P (target) && TREE_STATIC (exp)
5230 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5232 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5233 cleared = 1;
5236 /* If the constructor has fewer fields than the structure or
5237 if we are initializing the structure to mostly zeros, clear
5238 the whole structure first. Don't do this if TARGET is a
5239 register whose mode size isn't equal to SIZE since
5240 clear_storage can't handle this case. */
5241 else if (size > 0
5242 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5243 != fields_length (type))
5244 || mostly_zeros_p (exp))
5245 && (!REG_P (target)
5246 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5247 == size)))
5249 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5250 cleared = 1;
5253 if (REG_P (target) && !cleared)
5254 emit_clobber (target);
5256 /* Store each element of the constructor into the
5257 corresponding field of TARGET. */
5258 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5260 enum machine_mode mode;
5261 HOST_WIDE_INT bitsize;
5262 HOST_WIDE_INT bitpos = 0;
5263 tree offset;
5264 rtx to_rtx = target;
5266 /* Just ignore missing fields. We cleared the whole
5267 structure, above, if any fields are missing. */
5268 if (field == 0)
5269 continue;
5271 if (cleared && initializer_zerop (value))
5272 continue;
5274 if (host_integerp (DECL_SIZE (field), 1))
5275 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5276 else
5277 bitsize = -1;
5279 mode = DECL_MODE (field);
5280 if (DECL_BIT_FIELD (field))
5281 mode = VOIDmode;
5283 offset = DECL_FIELD_OFFSET (field);
5284 if (host_integerp (offset, 0)
5285 && host_integerp (bit_position (field), 0))
5287 bitpos = int_bit_position (field);
5288 offset = 0;
5290 else
5291 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5293 if (offset)
5295 enum machine_mode address_mode;
5296 rtx offset_rtx;
5298 offset
5299 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5300 make_tree (TREE_TYPE (exp),
5301 target));
5303 offset_rtx = expand_normal (offset);
5304 gcc_assert (MEM_P (to_rtx));
5306 address_mode
5307 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
5308 if (GET_MODE (offset_rtx) != address_mode)
5309 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5311 to_rtx = offset_address (to_rtx, offset_rtx,
5312 highest_pow2_factor (offset));
5315 #ifdef WORD_REGISTER_OPERATIONS
5316 /* If this initializes a field that is smaller than a
5317 word, at the start of a word, try to widen it to a full
5318 word. This special case allows us to output C++ member
5319 function initializations in a form that the optimizers
5320 can understand. */
5321 if (REG_P (target)
5322 && bitsize < BITS_PER_WORD
5323 && bitpos % BITS_PER_WORD == 0
5324 && GET_MODE_CLASS (mode) == MODE_INT
5325 && TREE_CODE (value) == INTEGER_CST
5326 && exp_size >= 0
5327 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5329 tree type = TREE_TYPE (value);
5331 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5333 type = lang_hooks.types.type_for_size
5334 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5335 value = fold_convert (type, value);
5338 if (BYTES_BIG_ENDIAN)
5339 value
5340 = fold_build2 (LSHIFT_EXPR, type, value,
5341 build_int_cst (type,
5342 BITS_PER_WORD - bitsize));
5343 bitsize = BITS_PER_WORD;
5344 mode = word_mode;
5346 #endif
5348 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5349 && DECL_NONADDRESSABLE_P (field))
5351 to_rtx = copy_rtx (to_rtx);
5352 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5355 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5356 value, type, cleared,
5357 get_alias_set (TREE_TYPE (field)));
5359 break;
5361 case ARRAY_TYPE:
5363 tree value, index;
5364 unsigned HOST_WIDE_INT i;
5365 int need_to_clear;
5366 tree domain;
5367 tree elttype = TREE_TYPE (type);
5368 int const_bounds_p;
5369 HOST_WIDE_INT minelt = 0;
5370 HOST_WIDE_INT maxelt = 0;
5372 domain = TYPE_DOMAIN (type);
5373 const_bounds_p = (TYPE_MIN_VALUE (domain)
5374 && TYPE_MAX_VALUE (domain)
5375 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5376 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5378 /* If we have constant bounds for the range of the type, get them. */
5379 if (const_bounds_p)
5381 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5382 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5385 /* If the constructor has fewer elements than the array, clear
5386 the whole array first. Similarly if this is static
5387 constructor of a non-BLKmode object. */
5388 if (cleared)
5389 need_to_clear = 0;
5390 else if (REG_P (target) && TREE_STATIC (exp))
5391 need_to_clear = 1;
5392 else
5394 unsigned HOST_WIDE_INT idx;
5395 tree index, value;
5396 HOST_WIDE_INT count = 0, zero_count = 0;
5397 need_to_clear = ! const_bounds_p;
5399 /* This loop is a more accurate version of the loop in
5400 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5401 is also needed to check for missing elements. */
5402 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5404 HOST_WIDE_INT this_node_count;
5406 if (need_to_clear)
5407 break;
5409 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5411 tree lo_index = TREE_OPERAND (index, 0);
5412 tree hi_index = TREE_OPERAND (index, 1);
5414 if (! host_integerp (lo_index, 1)
5415 || ! host_integerp (hi_index, 1))
5417 need_to_clear = 1;
5418 break;
5421 this_node_count = (tree_low_cst (hi_index, 1)
5422 - tree_low_cst (lo_index, 1) + 1);
5424 else
5425 this_node_count = 1;
5427 count += this_node_count;
5428 if (mostly_zeros_p (value))
5429 zero_count += this_node_count;
5432 /* Clear the entire array first if there are any missing
5433 elements, or if the incidence of zero elements is >=
5434 75%. */
5435 if (! need_to_clear
5436 && (count < maxelt - minelt + 1
5437 || 4 * zero_count >= 3 * count))
5438 need_to_clear = 1;
5441 if (need_to_clear && size > 0)
5443 if (REG_P (target))
5444 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5445 else
5446 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5447 cleared = 1;
5450 if (!cleared && REG_P (target))
5451 /* Inform later passes that the old value is dead. */
5452 emit_clobber (target);
5454 /* Store each element of the constructor into the
5455 corresponding element of TARGET, determined by counting the
5456 elements. */
5457 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5459 enum machine_mode mode;
5460 HOST_WIDE_INT bitsize;
5461 HOST_WIDE_INT bitpos;
5462 rtx xtarget = target;
5464 if (cleared && initializer_zerop (value))
5465 continue;
5467 mode = TYPE_MODE (elttype);
5468 if (mode == BLKmode)
5469 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5470 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5471 : -1);
5472 else
5473 bitsize = GET_MODE_BITSIZE (mode);
5475 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5477 tree lo_index = TREE_OPERAND (index, 0);
5478 tree hi_index = TREE_OPERAND (index, 1);
5479 rtx index_r, pos_rtx;
5480 HOST_WIDE_INT lo, hi, count;
5481 tree position;
5483 /* If the range is constant and "small", unroll the loop. */
5484 if (const_bounds_p
5485 && host_integerp (lo_index, 0)
5486 && host_integerp (hi_index, 0)
5487 && (lo = tree_low_cst (lo_index, 0),
5488 hi = tree_low_cst (hi_index, 0),
5489 count = hi - lo + 1,
5490 (!MEM_P (target)
5491 || count <= 2
5492 || (host_integerp (TYPE_SIZE (elttype), 1)
5493 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5494 <= 40 * 8)))))
5496 lo -= minelt; hi -= minelt;
5497 for (; lo <= hi; lo++)
5499 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5501 if (MEM_P (target)
5502 && !MEM_KEEP_ALIAS_SET_P (target)
5503 && TREE_CODE (type) == ARRAY_TYPE
5504 && TYPE_NONALIASED_COMPONENT (type))
5506 target = copy_rtx (target);
5507 MEM_KEEP_ALIAS_SET_P (target) = 1;
5510 store_constructor_field
5511 (target, bitsize, bitpos, mode, value, type, cleared,
5512 get_alias_set (elttype));
5515 else
5517 rtx loop_start = gen_label_rtx ();
5518 rtx loop_end = gen_label_rtx ();
5519 tree exit_cond;
5521 expand_normal (hi_index);
5523 index = build_decl (EXPR_LOCATION (exp),
5524 VAR_DECL, NULL_TREE, domain);
5525 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
5526 SET_DECL_RTL (index, index_r);
5527 store_expr (lo_index, index_r, 0, false);
5529 /* Build the head of the loop. */
5530 do_pending_stack_adjust ();
5531 emit_label (loop_start);
5533 /* Assign value to element index. */
5534 position =
5535 fold_convert (ssizetype,
5536 fold_build2 (MINUS_EXPR,
5537 TREE_TYPE (index),
5538 index,
5539 TYPE_MIN_VALUE (domain)));
5541 position =
5542 size_binop (MULT_EXPR, position,
5543 fold_convert (ssizetype,
5544 TYPE_SIZE_UNIT (elttype)));
5546 pos_rtx = expand_normal (position);
5547 xtarget = offset_address (target, pos_rtx,
5548 highest_pow2_factor (position));
5549 xtarget = adjust_address (xtarget, mode, 0);
5550 if (TREE_CODE (value) == CONSTRUCTOR)
5551 store_constructor (value, xtarget, cleared,
5552 bitsize / BITS_PER_UNIT);
5553 else
5554 store_expr (value, xtarget, 0, false);
5556 /* Generate a conditional jump to exit the loop. */
5557 exit_cond = build2 (LT_EXPR, integer_type_node,
5558 index, hi_index);
5559 jumpif (exit_cond, loop_end, -1);
5561 /* Update the loop counter, and jump to the head of
5562 the loop. */
5563 expand_assignment (index,
5564 build2 (PLUS_EXPR, TREE_TYPE (index),
5565 index, integer_one_node),
5566 false);
5568 emit_jump (loop_start);
5570 /* Build the end of the loop. */
5571 emit_label (loop_end);
5574 else if ((index != 0 && ! host_integerp (index, 0))
5575 || ! host_integerp (TYPE_SIZE (elttype), 1))
5577 tree position;
5579 if (index == 0)
5580 index = ssize_int (1);
5582 if (minelt)
5583 index = fold_convert (ssizetype,
5584 fold_build2 (MINUS_EXPR,
5585 TREE_TYPE (index),
5586 index,
5587 TYPE_MIN_VALUE (domain)));
5589 position =
5590 size_binop (MULT_EXPR, index,
5591 fold_convert (ssizetype,
5592 TYPE_SIZE_UNIT (elttype)));
5593 xtarget = offset_address (target,
5594 expand_normal (position),
5595 highest_pow2_factor (position));
5596 xtarget = adjust_address (xtarget, mode, 0);
5597 store_expr (value, xtarget, 0, false);
5599 else
5601 if (index != 0)
5602 bitpos = ((tree_low_cst (index, 0) - minelt)
5603 * tree_low_cst (TYPE_SIZE (elttype), 1));
5604 else
5605 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5607 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5608 && TREE_CODE (type) == ARRAY_TYPE
5609 && TYPE_NONALIASED_COMPONENT (type))
5611 target = copy_rtx (target);
5612 MEM_KEEP_ALIAS_SET_P (target) = 1;
5614 store_constructor_field (target, bitsize, bitpos, mode, value,
5615 type, cleared, get_alias_set (elttype));
5618 break;
5621 case VECTOR_TYPE:
5623 unsigned HOST_WIDE_INT idx;
5624 constructor_elt *ce;
5625 int i;
5626 int need_to_clear;
5627 int icode = 0;
5628 tree elttype = TREE_TYPE (type);
5629 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5630 enum machine_mode eltmode = TYPE_MODE (elttype);
5631 HOST_WIDE_INT bitsize;
5632 HOST_WIDE_INT bitpos;
5633 rtvec vector = NULL;
5634 unsigned n_elts;
5635 alias_set_type alias;
5637 gcc_assert (eltmode != BLKmode);
5639 n_elts = TYPE_VECTOR_SUBPARTS (type);
5640 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5642 enum machine_mode mode = GET_MODE (target);
5644 icode = (int) optab_handler (vec_init_optab, mode);
5645 if (icode != CODE_FOR_nothing)
5647 unsigned int i;
5649 vector = rtvec_alloc (n_elts);
5650 for (i = 0; i < n_elts; i++)
5651 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5655 /* If the constructor has fewer elements than the vector,
5656 clear the whole array first. Similarly if this is static
5657 constructor of a non-BLKmode object. */
5658 if (cleared)
5659 need_to_clear = 0;
5660 else if (REG_P (target) && TREE_STATIC (exp))
5661 need_to_clear = 1;
5662 else
5664 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5665 tree value;
5667 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5669 int n_elts_here = tree_low_cst
5670 (int_const_binop (TRUNC_DIV_EXPR,
5671 TYPE_SIZE (TREE_TYPE (value)),
5672 TYPE_SIZE (elttype), 0), 1);
5674 count += n_elts_here;
5675 if (mostly_zeros_p (value))
5676 zero_count += n_elts_here;
5679 /* Clear the entire vector first if there are any missing elements,
5680 or if the incidence of zero elements is >= 75%. */
5681 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5684 if (need_to_clear && size > 0 && !vector)
5686 if (REG_P (target))
5687 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5688 else
5689 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5690 cleared = 1;
5693 /* Inform later passes that the old value is dead. */
5694 if (!cleared && !vector && REG_P (target))
5695 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5697 if (MEM_P (target))
5698 alias = MEM_ALIAS_SET (target);
5699 else
5700 alias = get_alias_set (elttype);
5702 /* Store each element of the constructor into the corresponding
5703 element of TARGET, determined by counting the elements. */
5704 for (idx = 0, i = 0;
5705 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5706 idx++, i += bitsize / elt_size)
5708 HOST_WIDE_INT eltpos;
5709 tree value = ce->value;
5711 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5712 if (cleared && initializer_zerop (value))
5713 continue;
5715 if (ce->index)
5716 eltpos = tree_low_cst (ce->index, 1);
5717 else
5718 eltpos = i;
5720 if (vector)
5722 /* Vector CONSTRUCTORs should only be built from smaller
5723 vectors in the case of BLKmode vectors. */
5724 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5725 RTVEC_ELT (vector, eltpos)
5726 = expand_normal (value);
5728 else
5730 enum machine_mode value_mode =
5731 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5732 ? TYPE_MODE (TREE_TYPE (value))
5733 : eltmode;
5734 bitpos = eltpos * elt_size;
5735 store_constructor_field (target, bitsize, bitpos,
5736 value_mode, value, type,
5737 cleared, alias);
5741 if (vector)
5742 emit_insn (GEN_FCN (icode)
5743 (target,
5744 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5745 break;
5748 default:
5749 gcc_unreachable ();
5753 /* Store the value of EXP (an expression tree)
5754 into a subfield of TARGET which has mode MODE and occupies
5755 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5756 If MODE is VOIDmode, it means that we are storing into a bit-field.
5758 Always return const0_rtx unless we have something particular to
5759 return.
5761 TYPE is the type of the underlying object,
5763 ALIAS_SET is the alias set for the destination. This value will
5764 (in general) be different from that for TARGET, since TARGET is a
5765 reference to the containing structure.
5767 If NONTEMPORAL is true, try generating a nontemporal store. */
5769 static rtx
5770 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5771 enum machine_mode mode, tree exp, tree type,
5772 alias_set_type alias_set, bool nontemporal)
5774 if (TREE_CODE (exp) == ERROR_MARK)
5775 return const0_rtx;
5777 /* If we have nothing to store, do nothing unless the expression has
5778 side-effects. */
5779 if (bitsize == 0)
5780 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5782 /* If we are storing into an unaligned field of an aligned union that is
5783 in a register, we may have the mode of TARGET being an integer mode but
5784 MODE == BLKmode. In that case, get an aligned object whose size and
5785 alignment are the same as TARGET and store TARGET into it (we can avoid
5786 the store if the field being stored is the entire width of TARGET). Then
5787 call ourselves recursively to store the field into a BLKmode version of
5788 that object. Finally, load from the object into TARGET. This is not
5789 very efficient in general, but should only be slightly more expensive
5790 than the otherwise-required unaligned accesses. Perhaps this can be
5791 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5792 twice, once with emit_move_insn and once via store_field. */
5794 if (mode == BLKmode
5795 && (REG_P (target) || GET_CODE (target) == SUBREG))
5797 rtx object = assign_temp (type, 0, 1, 1);
5798 rtx blk_object = adjust_address (object, BLKmode, 0);
5800 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5801 emit_move_insn (object, target);
5803 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set,
5804 nontemporal);
5806 emit_move_insn (target, object);
5808 /* We want to return the BLKmode version of the data. */
5809 return blk_object;
5812 if (GET_CODE (target) == CONCAT)
5814 /* We're storing into a struct containing a single __complex. */
5816 gcc_assert (!bitpos);
5817 return store_expr (exp, target, 0, nontemporal);
5820 /* If the structure is in a register or if the component
5821 is a bit field, we cannot use addressing to access it.
5822 Use bit-field techniques or SUBREG to store in it. */
5824 if (mode == VOIDmode
5825 || (mode != BLKmode && ! direct_store[(int) mode]
5826 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5827 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5828 || REG_P (target)
5829 || GET_CODE (target) == SUBREG
5830 /* If the field isn't aligned enough to store as an ordinary memref,
5831 store it as a bit field. */
5832 || (mode != BLKmode
5833 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5834 || bitpos % GET_MODE_ALIGNMENT (mode))
5835 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5836 || (bitpos % BITS_PER_UNIT != 0)))
5837 /* If the RHS and field are a constant size and the size of the
5838 RHS isn't the same size as the bitfield, we must use bitfield
5839 operations. */
5840 || (bitsize >= 0
5841 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5842 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
5843 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
5844 decl we must use bitfield operations. */
5845 || (bitsize >= 0
5846 && TREE_CODE (exp) == MEM_REF
5847 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5848 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5849 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
5850 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
5852 rtx temp;
5853 gimple nop_def;
5855 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5856 implies a mask operation. If the precision is the same size as
5857 the field we're storing into, that mask is redundant. This is
5858 particularly common with bit field assignments generated by the
5859 C front end. */
5860 nop_def = get_def_for_expr (exp, NOP_EXPR);
5861 if (nop_def)
5863 tree type = TREE_TYPE (exp);
5864 if (INTEGRAL_TYPE_P (type)
5865 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5866 && bitsize == TYPE_PRECISION (type))
5868 tree op = gimple_assign_rhs1 (nop_def);
5869 type = TREE_TYPE (op);
5870 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5871 exp = op;
5875 temp = expand_normal (exp);
5877 /* If BITSIZE is narrower than the size of the type of EXP
5878 we will be narrowing TEMP. Normally, what's wanted are the
5879 low-order bits. However, if EXP's type is a record and this is
5880 big-endian machine, we want the upper BITSIZE bits. */
5881 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5882 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5883 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5884 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5885 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5886 - bitsize),
5887 NULL_RTX, 1);
5889 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5890 MODE. */
5891 if (mode != VOIDmode && mode != BLKmode
5892 && mode != TYPE_MODE (TREE_TYPE (exp)))
5893 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5895 /* If the modes of TEMP and TARGET are both BLKmode, both
5896 must be in memory and BITPOS must be aligned on a byte
5897 boundary. If so, we simply do a block copy. Likewise
5898 for a BLKmode-like TARGET. */
5899 if (GET_MODE (temp) == BLKmode
5900 && (GET_MODE (target) == BLKmode
5901 || (MEM_P (target)
5902 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
5903 && (bitpos % BITS_PER_UNIT) == 0
5904 && (bitsize % BITS_PER_UNIT) == 0)))
5906 gcc_assert (MEM_P (target) && MEM_P (temp)
5907 && (bitpos % BITS_PER_UNIT) == 0);
5909 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5910 emit_block_move (target, temp,
5911 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5912 / BITS_PER_UNIT),
5913 BLOCK_OP_NORMAL);
5915 return const0_rtx;
5918 /* Store the value in the bitfield. */
5919 store_bit_field (target, bitsize, bitpos, mode, temp);
5921 return const0_rtx;
5923 else
5925 /* Now build a reference to just the desired component. */
5926 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5928 if (to_rtx == target)
5929 to_rtx = copy_rtx (to_rtx);
5931 if (!MEM_SCALAR_P (to_rtx))
5932 MEM_IN_STRUCT_P (to_rtx) = 1;
5933 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5934 set_mem_alias_set (to_rtx, alias_set);
5936 return store_expr (exp, to_rtx, 0, nontemporal);
5940 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5941 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5942 codes and find the ultimate containing object, which we return.
5944 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5945 bit position, and *PUNSIGNEDP to the signedness of the field.
5946 If the position of the field is variable, we store a tree
5947 giving the variable offset (in units) in *POFFSET.
5948 This offset is in addition to the bit position.
5949 If the position is not variable, we store 0 in *POFFSET.
5951 If any of the extraction expressions is volatile,
5952 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5954 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
5955 Otherwise, it is a mode that can be used to access the field.
5957 If the field describes a variable-sized object, *PMODE is set to
5958 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
5959 this case, but the address of the object can be found.
5961 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5962 look through nodes that serve as markers of a greater alignment than
5963 the one that can be deduced from the expression. These nodes make it
5964 possible for front-ends to prevent temporaries from being created by
5965 the middle-end on alignment considerations. For that purpose, the
5966 normal operating mode at high-level is to always pass FALSE so that
5967 the ultimate containing object is really returned; moreover, the
5968 associated predicate handled_component_p will always return TRUE
5969 on these nodes, thus indicating that they are essentially handled
5970 by get_inner_reference. TRUE should only be passed when the caller
5971 is scanning the expression in order to build another representation
5972 and specifically knows how to handle these nodes; as such, this is
5973 the normal operating mode in the RTL expanders. */
5975 tree
5976 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5977 HOST_WIDE_INT *pbitpos, tree *poffset,
5978 enum machine_mode *pmode, int *punsignedp,
5979 int *pvolatilep, bool keep_aligning)
5981 tree size_tree = 0;
5982 enum machine_mode mode = VOIDmode;
5983 bool blkmode_bitfield = false;
5984 tree offset = size_zero_node;
5985 double_int bit_offset = double_int_zero;
5987 /* First get the mode, signedness, and size. We do this from just the
5988 outermost expression. */
5989 *pbitsize = -1;
5990 if (TREE_CODE (exp) == COMPONENT_REF)
5992 tree field = TREE_OPERAND (exp, 1);
5993 size_tree = DECL_SIZE (field);
5994 if (!DECL_BIT_FIELD (field))
5995 mode = DECL_MODE (field);
5996 else if (DECL_MODE (field) == BLKmode)
5997 blkmode_bitfield = true;
5998 else if (TREE_THIS_VOLATILE (exp)
5999 && flag_strict_volatile_bitfields > 0)
6000 /* Volatile bitfields should be accessed in the mode of the
6001 field's type, not the mode computed based on the bit
6002 size. */
6003 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6005 *punsignedp = DECL_UNSIGNED (field);
6007 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6009 size_tree = TREE_OPERAND (exp, 1);
6010 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6011 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6013 /* For vector types, with the correct size of access, use the mode of
6014 inner type. */
6015 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6016 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6017 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6018 mode = TYPE_MODE (TREE_TYPE (exp));
6020 else
6022 mode = TYPE_MODE (TREE_TYPE (exp));
6023 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6025 if (mode == BLKmode)
6026 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6027 else
6028 *pbitsize = GET_MODE_BITSIZE (mode);
6031 if (size_tree != 0)
6033 if (! host_integerp (size_tree, 1))
6034 mode = BLKmode, *pbitsize = -1;
6035 else
6036 *pbitsize = tree_low_cst (size_tree, 1);
6039 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6040 and find the ultimate containing object. */
6041 while (1)
6043 switch (TREE_CODE (exp))
6045 case BIT_FIELD_REF:
6046 bit_offset
6047 = double_int_add (bit_offset,
6048 tree_to_double_int (TREE_OPERAND (exp, 2)));
6049 break;
6051 case COMPONENT_REF:
6053 tree field = TREE_OPERAND (exp, 1);
6054 tree this_offset = component_ref_field_offset (exp);
6056 /* If this field hasn't been filled in yet, don't go past it.
6057 This should only happen when folding expressions made during
6058 type construction. */
6059 if (this_offset == 0)
6060 break;
6062 offset = size_binop (PLUS_EXPR, offset, this_offset);
6063 bit_offset = double_int_add (bit_offset,
6064 tree_to_double_int
6065 (DECL_FIELD_BIT_OFFSET (field)));
6067 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6069 break;
6071 case ARRAY_REF:
6072 case ARRAY_RANGE_REF:
6074 tree index = TREE_OPERAND (exp, 1);
6075 tree low_bound = array_ref_low_bound (exp);
6076 tree unit_size = array_ref_element_size (exp);
6078 /* We assume all arrays have sizes that are a multiple of a byte.
6079 First subtract the lower bound, if any, in the type of the
6080 index, then convert to sizetype and multiply by the size of
6081 the array element. */
6082 if (! integer_zerop (low_bound))
6083 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6084 index, low_bound);
6086 offset = size_binop (PLUS_EXPR, offset,
6087 size_binop (MULT_EXPR,
6088 fold_convert (sizetype, index),
6089 unit_size));
6091 break;
6093 case REALPART_EXPR:
6094 break;
6096 case IMAGPART_EXPR:
6097 bit_offset = double_int_add (bit_offset,
6098 uhwi_to_double_int (*pbitsize));
6099 break;
6101 case VIEW_CONVERT_EXPR:
6102 if (keep_aligning && STRICT_ALIGNMENT
6103 && (TYPE_ALIGN (TREE_TYPE (exp))
6104 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6105 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6106 < BIGGEST_ALIGNMENT)
6107 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6108 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6109 goto done;
6110 break;
6112 case MEM_REF:
6113 /* Hand back the decl for MEM[&decl, off]. */
6114 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6116 tree off = TREE_OPERAND (exp, 1);
6117 if (!integer_zerop (off))
6119 double_int boff, coff = mem_ref_offset (exp);
6120 boff = double_int_lshift (coff,
6121 BITS_PER_UNIT == 8
6122 ? 3 : exact_log2 (BITS_PER_UNIT),
6123 HOST_BITS_PER_DOUBLE_INT, true);
6124 bit_offset = double_int_add (bit_offset, boff);
6126 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6128 goto done;
6130 default:
6131 goto done;
6134 /* If any reference in the chain is volatile, the effect is volatile. */
6135 if (TREE_THIS_VOLATILE (exp))
6136 *pvolatilep = 1;
6138 exp = TREE_OPERAND (exp, 0);
6140 done:
6142 /* If OFFSET is constant, see if we can return the whole thing as a
6143 constant bit position. Make sure to handle overflow during
6144 this conversion. */
6145 if (host_integerp (offset, 0))
6147 double_int tem = double_int_lshift (tree_to_double_int (offset),
6148 BITS_PER_UNIT == 8
6149 ? 3 : exact_log2 (BITS_PER_UNIT),
6150 HOST_BITS_PER_DOUBLE_INT, true);
6151 tem = double_int_add (tem, bit_offset);
6152 if (double_int_fits_in_shwi_p (tem))
6154 *pbitpos = double_int_to_shwi (tem);
6155 *poffset = offset = NULL_TREE;
6159 /* Otherwise, split it up. */
6160 if (offset)
6162 *pbitpos = double_int_to_shwi (bit_offset);
6163 *poffset = offset;
6166 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6167 if (mode == VOIDmode
6168 && blkmode_bitfield
6169 && (*pbitpos % BITS_PER_UNIT) == 0
6170 && (*pbitsize % BITS_PER_UNIT) == 0)
6171 *pmode = BLKmode;
6172 else
6173 *pmode = mode;
6175 return exp;
6178 /* Given an expression EXP that may be a COMPONENT_REF, an ARRAY_REF or an
6179 ARRAY_RANGE_REF, look for whether EXP or any nested component-refs within
6180 EXP is marked as PACKED. */
6182 bool
6183 contains_packed_reference (const_tree exp)
6185 bool packed_p = false;
6187 while (1)
6189 switch (TREE_CODE (exp))
6191 case COMPONENT_REF:
6193 tree field = TREE_OPERAND (exp, 1);
6194 packed_p = DECL_PACKED (field)
6195 || TYPE_PACKED (TREE_TYPE (field))
6196 || TYPE_PACKED (TREE_TYPE (exp));
6197 if (packed_p)
6198 goto done;
6200 break;
6202 case BIT_FIELD_REF:
6203 case ARRAY_REF:
6204 case ARRAY_RANGE_REF:
6205 case REALPART_EXPR:
6206 case IMAGPART_EXPR:
6207 case VIEW_CONVERT_EXPR:
6208 break;
6210 default:
6211 goto done;
6213 exp = TREE_OPERAND (exp, 0);
6215 done:
6216 return packed_p;
6219 /* Return a tree of sizetype representing the size, in bytes, of the element
6220 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6222 tree
6223 array_ref_element_size (tree exp)
6225 tree aligned_size = TREE_OPERAND (exp, 3);
6226 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6227 location_t loc = EXPR_LOCATION (exp);
6229 /* If a size was specified in the ARRAY_REF, it's the size measured
6230 in alignment units of the element type. So multiply by that value. */
6231 if (aligned_size)
6233 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6234 sizetype from another type of the same width and signedness. */
6235 if (TREE_TYPE (aligned_size) != sizetype)
6236 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6237 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6238 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6241 /* Otherwise, take the size from that of the element type. Substitute
6242 any PLACEHOLDER_EXPR that we have. */
6243 else
6244 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6247 /* Return a tree representing the lower bound of the array mentioned in
6248 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6250 tree
6251 array_ref_low_bound (tree exp)
6253 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6255 /* If a lower bound is specified in EXP, use it. */
6256 if (TREE_OPERAND (exp, 2))
6257 return TREE_OPERAND (exp, 2);
6259 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6260 substituting for a PLACEHOLDER_EXPR as needed. */
6261 if (domain_type && TYPE_MIN_VALUE (domain_type))
6262 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6264 /* Otherwise, return a zero of the appropriate type. */
6265 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6268 /* Return a tree representing the upper bound of the array mentioned in
6269 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6271 tree
6272 array_ref_up_bound (tree exp)
6274 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6276 /* If there is a domain type and it has an upper bound, use it, substituting
6277 for a PLACEHOLDER_EXPR as needed. */
6278 if (domain_type && TYPE_MAX_VALUE (domain_type))
6279 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6281 /* Otherwise fail. */
6282 return NULL_TREE;
6285 /* Return a tree representing the offset, in bytes, of the field referenced
6286 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6288 tree
6289 component_ref_field_offset (tree exp)
6291 tree aligned_offset = TREE_OPERAND (exp, 2);
6292 tree field = TREE_OPERAND (exp, 1);
6293 location_t loc = EXPR_LOCATION (exp);
6295 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6296 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6297 value. */
6298 if (aligned_offset)
6300 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6301 sizetype from another type of the same width and signedness. */
6302 if (TREE_TYPE (aligned_offset) != sizetype)
6303 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
6304 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
6305 size_int (DECL_OFFSET_ALIGN (field)
6306 / BITS_PER_UNIT));
6309 /* Otherwise, take the offset from that of the field. Substitute
6310 any PLACEHOLDER_EXPR that we have. */
6311 else
6312 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6315 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6317 static unsigned HOST_WIDE_INT
6318 target_align (const_tree target)
6320 /* We might have a chain of nested references with intermediate misaligning
6321 bitfields components, so need to recurse to find out. */
6323 unsigned HOST_WIDE_INT this_align, outer_align;
6325 switch (TREE_CODE (target))
6327 case BIT_FIELD_REF:
6328 return 1;
6330 case COMPONENT_REF:
6331 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
6332 outer_align = target_align (TREE_OPERAND (target, 0));
6333 return MIN (this_align, outer_align);
6335 case ARRAY_REF:
6336 case ARRAY_RANGE_REF:
6337 this_align = TYPE_ALIGN (TREE_TYPE (target));
6338 outer_align = target_align (TREE_OPERAND (target, 0));
6339 return MIN (this_align, outer_align);
6341 CASE_CONVERT:
6342 case NON_LVALUE_EXPR:
6343 case VIEW_CONVERT_EXPR:
6344 this_align = TYPE_ALIGN (TREE_TYPE (target));
6345 outer_align = target_align (TREE_OPERAND (target, 0));
6346 return MAX (this_align, outer_align);
6348 default:
6349 return TYPE_ALIGN (TREE_TYPE (target));
6354 /* Given an rtx VALUE that may contain additions and multiplications, return
6355 an equivalent value that just refers to a register, memory, or constant.
6356 This is done by generating instructions to perform the arithmetic and
6357 returning a pseudo-register containing the value.
6359 The returned value may be a REG, SUBREG, MEM or constant. */
6362 force_operand (rtx value, rtx target)
6364 rtx op1, op2;
6365 /* Use subtarget as the target for operand 0 of a binary operation. */
6366 rtx subtarget = get_subtarget (target);
6367 enum rtx_code code = GET_CODE (value);
6369 /* Check for subreg applied to an expression produced by loop optimizer. */
6370 if (code == SUBREG
6371 && !REG_P (SUBREG_REG (value))
6372 && !MEM_P (SUBREG_REG (value)))
6374 value
6375 = simplify_gen_subreg (GET_MODE (value),
6376 force_reg (GET_MODE (SUBREG_REG (value)),
6377 force_operand (SUBREG_REG (value),
6378 NULL_RTX)),
6379 GET_MODE (SUBREG_REG (value)),
6380 SUBREG_BYTE (value));
6381 code = GET_CODE (value);
6384 /* Check for a PIC address load. */
6385 if ((code == PLUS || code == MINUS)
6386 && XEXP (value, 0) == pic_offset_table_rtx
6387 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6388 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6389 || GET_CODE (XEXP (value, 1)) == CONST))
6391 if (!subtarget)
6392 subtarget = gen_reg_rtx (GET_MODE (value));
6393 emit_move_insn (subtarget, value);
6394 return subtarget;
6397 if (ARITHMETIC_P (value))
6399 op2 = XEXP (value, 1);
6400 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6401 subtarget = 0;
6402 if (code == MINUS && CONST_INT_P (op2))
6404 code = PLUS;
6405 op2 = negate_rtx (GET_MODE (value), op2);
6408 /* Check for an addition with OP2 a constant integer and our first
6409 operand a PLUS of a virtual register and something else. In that
6410 case, we want to emit the sum of the virtual register and the
6411 constant first and then add the other value. This allows virtual
6412 register instantiation to simply modify the constant rather than
6413 creating another one around this addition. */
6414 if (code == PLUS && CONST_INT_P (op2)
6415 && GET_CODE (XEXP (value, 0)) == PLUS
6416 && REG_P (XEXP (XEXP (value, 0), 0))
6417 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6418 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6420 rtx temp = expand_simple_binop (GET_MODE (value), code,
6421 XEXP (XEXP (value, 0), 0), op2,
6422 subtarget, 0, OPTAB_LIB_WIDEN);
6423 return expand_simple_binop (GET_MODE (value), code, temp,
6424 force_operand (XEXP (XEXP (value,
6425 0), 1), 0),
6426 target, 0, OPTAB_LIB_WIDEN);
6429 op1 = force_operand (XEXP (value, 0), subtarget);
6430 op2 = force_operand (op2, NULL_RTX);
6431 switch (code)
6433 case MULT:
6434 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6435 case DIV:
6436 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6437 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6438 target, 1, OPTAB_LIB_WIDEN);
6439 else
6440 return expand_divmod (0,
6441 FLOAT_MODE_P (GET_MODE (value))
6442 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6443 GET_MODE (value), op1, op2, target, 0);
6444 case MOD:
6445 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6446 target, 0);
6447 case UDIV:
6448 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6449 target, 1);
6450 case UMOD:
6451 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6452 target, 1);
6453 case ASHIFTRT:
6454 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6455 target, 0, OPTAB_LIB_WIDEN);
6456 default:
6457 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6458 target, 1, OPTAB_LIB_WIDEN);
6461 if (UNARY_P (value))
6463 if (!target)
6464 target = gen_reg_rtx (GET_MODE (value));
6465 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6466 switch (code)
6468 case ZERO_EXTEND:
6469 case SIGN_EXTEND:
6470 case TRUNCATE:
6471 case FLOAT_EXTEND:
6472 case FLOAT_TRUNCATE:
6473 convert_move (target, op1, code == ZERO_EXTEND);
6474 return target;
6476 case FIX:
6477 case UNSIGNED_FIX:
6478 expand_fix (target, op1, code == UNSIGNED_FIX);
6479 return target;
6481 case FLOAT:
6482 case UNSIGNED_FLOAT:
6483 expand_float (target, op1, code == UNSIGNED_FLOAT);
6484 return target;
6486 default:
6487 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6491 #ifdef INSN_SCHEDULING
6492 /* On machines that have insn scheduling, we want all memory reference to be
6493 explicit, so we need to deal with such paradoxical SUBREGs. */
6494 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6495 && (GET_MODE_SIZE (GET_MODE (value))
6496 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6497 value
6498 = simplify_gen_subreg (GET_MODE (value),
6499 force_reg (GET_MODE (SUBREG_REG (value)),
6500 force_operand (SUBREG_REG (value),
6501 NULL_RTX)),
6502 GET_MODE (SUBREG_REG (value)),
6503 SUBREG_BYTE (value));
6504 #endif
6506 return value;
6509 /* Subroutine of expand_expr: return nonzero iff there is no way that
6510 EXP can reference X, which is being modified. TOP_P is nonzero if this
6511 call is going to be used to determine whether we need a temporary
6512 for EXP, as opposed to a recursive call to this function.
6514 It is always safe for this routine to return zero since it merely
6515 searches for optimization opportunities. */
6518 safe_from_p (const_rtx x, tree exp, int top_p)
6520 rtx exp_rtl = 0;
6521 int i, nops;
6523 if (x == 0
6524 /* If EXP has varying size, we MUST use a target since we currently
6525 have no way of allocating temporaries of variable size
6526 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6527 So we assume here that something at a higher level has prevented a
6528 clash. This is somewhat bogus, but the best we can do. Only
6529 do this when X is BLKmode and when we are at the top level. */
6530 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6531 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6532 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6533 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6534 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6535 != INTEGER_CST)
6536 && GET_MODE (x) == BLKmode)
6537 /* If X is in the outgoing argument area, it is always safe. */
6538 || (MEM_P (x)
6539 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6540 || (GET_CODE (XEXP (x, 0)) == PLUS
6541 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6542 return 1;
6544 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6545 find the underlying pseudo. */
6546 if (GET_CODE (x) == SUBREG)
6548 x = SUBREG_REG (x);
6549 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6550 return 0;
6553 /* Now look at our tree code and possibly recurse. */
6554 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6556 case tcc_declaration:
6557 exp_rtl = DECL_RTL_IF_SET (exp);
6558 break;
6560 case tcc_constant:
6561 return 1;
6563 case tcc_exceptional:
6564 if (TREE_CODE (exp) == TREE_LIST)
6566 while (1)
6568 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6569 return 0;
6570 exp = TREE_CHAIN (exp);
6571 if (!exp)
6572 return 1;
6573 if (TREE_CODE (exp) != TREE_LIST)
6574 return safe_from_p (x, exp, 0);
6577 else if (TREE_CODE (exp) == CONSTRUCTOR)
6579 constructor_elt *ce;
6580 unsigned HOST_WIDE_INT idx;
6582 FOR_EACH_VEC_ELT (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce)
6583 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6584 || !safe_from_p (x, ce->value, 0))
6585 return 0;
6586 return 1;
6588 else if (TREE_CODE (exp) == ERROR_MARK)
6589 return 1; /* An already-visited SAVE_EXPR? */
6590 else
6591 return 0;
6593 case tcc_statement:
6594 /* The only case we look at here is the DECL_INITIAL inside a
6595 DECL_EXPR. */
6596 return (TREE_CODE (exp) != DECL_EXPR
6597 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6598 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6599 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6601 case tcc_binary:
6602 case tcc_comparison:
6603 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6604 return 0;
6605 /* Fall through. */
6607 case tcc_unary:
6608 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6610 case tcc_expression:
6611 case tcc_reference:
6612 case tcc_vl_exp:
6613 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6614 the expression. If it is set, we conflict iff we are that rtx or
6615 both are in memory. Otherwise, we check all operands of the
6616 expression recursively. */
6618 switch (TREE_CODE (exp))
6620 case ADDR_EXPR:
6621 /* If the operand is static or we are static, we can't conflict.
6622 Likewise if we don't conflict with the operand at all. */
6623 if (staticp (TREE_OPERAND (exp, 0))
6624 || TREE_STATIC (exp)
6625 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6626 return 1;
6628 /* Otherwise, the only way this can conflict is if we are taking
6629 the address of a DECL a that address if part of X, which is
6630 very rare. */
6631 exp = TREE_OPERAND (exp, 0);
6632 if (DECL_P (exp))
6634 if (!DECL_RTL_SET_P (exp)
6635 || !MEM_P (DECL_RTL (exp)))
6636 return 0;
6637 else
6638 exp_rtl = XEXP (DECL_RTL (exp), 0);
6640 break;
6642 case MEM_REF:
6643 if (MEM_P (x)
6644 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6645 get_alias_set (exp)))
6646 return 0;
6647 break;
6649 case CALL_EXPR:
6650 /* Assume that the call will clobber all hard registers and
6651 all of memory. */
6652 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6653 || MEM_P (x))
6654 return 0;
6655 break;
6657 case WITH_CLEANUP_EXPR:
6658 case CLEANUP_POINT_EXPR:
6659 /* Lowered by gimplify.c. */
6660 gcc_unreachable ();
6662 case SAVE_EXPR:
6663 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6665 default:
6666 break;
6669 /* If we have an rtx, we do not need to scan our operands. */
6670 if (exp_rtl)
6671 break;
6673 nops = TREE_OPERAND_LENGTH (exp);
6674 for (i = 0; i < nops; i++)
6675 if (TREE_OPERAND (exp, i) != 0
6676 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6677 return 0;
6679 break;
6681 case tcc_type:
6682 /* Should never get a type here. */
6683 gcc_unreachable ();
6686 /* If we have an rtl, find any enclosed object. Then see if we conflict
6687 with it. */
6688 if (exp_rtl)
6690 if (GET_CODE (exp_rtl) == SUBREG)
6692 exp_rtl = SUBREG_REG (exp_rtl);
6693 if (REG_P (exp_rtl)
6694 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6695 return 0;
6698 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6699 are memory and they conflict. */
6700 return ! (rtx_equal_p (x, exp_rtl)
6701 || (MEM_P (x) && MEM_P (exp_rtl)
6702 && true_dependence (exp_rtl, VOIDmode, x,
6703 rtx_addr_varies_p)));
6706 /* If we reach here, it is safe. */
6707 return 1;
6711 /* Return the highest power of two that EXP is known to be a multiple of.
6712 This is used in updating alignment of MEMs in array references. */
6714 unsigned HOST_WIDE_INT
6715 highest_pow2_factor (const_tree exp)
6717 unsigned HOST_WIDE_INT c0, c1;
6719 switch (TREE_CODE (exp))
6721 case INTEGER_CST:
6722 /* We can find the lowest bit that's a one. If the low
6723 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6724 We need to handle this case since we can find it in a COND_EXPR,
6725 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6726 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6727 later ICE. */
6728 if (TREE_OVERFLOW (exp))
6729 return BIGGEST_ALIGNMENT;
6730 else
6732 /* Note: tree_low_cst is intentionally not used here,
6733 we don't care about the upper bits. */
6734 c0 = TREE_INT_CST_LOW (exp);
6735 c0 &= -c0;
6736 return c0 ? c0 : BIGGEST_ALIGNMENT;
6738 break;
6740 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6741 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6742 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6743 return MIN (c0, c1);
6745 case MULT_EXPR:
6746 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6747 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6748 return c0 * c1;
6750 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6751 case CEIL_DIV_EXPR:
6752 if (integer_pow2p (TREE_OPERAND (exp, 1))
6753 && host_integerp (TREE_OPERAND (exp, 1), 1))
6755 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6756 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6757 return MAX (1, c0 / c1);
6759 break;
6761 case BIT_AND_EXPR:
6762 /* The highest power of two of a bit-and expression is the maximum of
6763 that of its operands. We typically get here for a complex LHS and
6764 a constant negative power of two on the RHS to force an explicit
6765 alignment, so don't bother looking at the LHS. */
6766 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6768 CASE_CONVERT:
6769 case SAVE_EXPR:
6770 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6772 case COMPOUND_EXPR:
6773 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6775 case COND_EXPR:
6776 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6777 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6778 return MIN (c0, c1);
6780 default:
6781 break;
6784 return 1;
6787 /* Similar, except that the alignment requirements of TARGET are
6788 taken into account. Assume it is at least as aligned as its
6789 type, unless it is a COMPONENT_REF in which case the layout of
6790 the structure gives the alignment. */
6792 static unsigned HOST_WIDE_INT
6793 highest_pow2_factor_for_target (const_tree target, const_tree exp)
6795 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
6796 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
6798 return MAX (factor, talign);
6801 /* Subroutine of expand_expr. Expand the two operands of a binary
6802 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6803 The value may be stored in TARGET if TARGET is nonzero. The
6804 MODIFIER argument is as documented by expand_expr. */
6806 static void
6807 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6808 enum expand_modifier modifier)
6810 if (! safe_from_p (target, exp1, 1))
6811 target = 0;
6812 if (operand_equal_p (exp0, exp1, 0))
6814 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6815 *op1 = copy_rtx (*op0);
6817 else
6819 /* If we need to preserve evaluation order, copy exp0 into its own
6820 temporary variable so that it can't be clobbered by exp1. */
6821 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6822 exp0 = save_expr (exp0);
6823 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6824 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6829 /* Return a MEM that contains constant EXP. DEFER is as for
6830 output_constant_def and MODIFIER is as for expand_expr. */
6832 static rtx
6833 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6835 rtx mem;
6837 mem = output_constant_def (exp, defer);
6838 if (modifier != EXPAND_INITIALIZER)
6839 mem = use_anchored_address (mem);
6840 return mem;
6843 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6844 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6846 static rtx
6847 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6848 enum expand_modifier modifier, addr_space_t as)
6850 rtx result, subtarget;
6851 tree inner, offset;
6852 HOST_WIDE_INT bitsize, bitpos;
6853 int volatilep, unsignedp;
6854 enum machine_mode mode1;
6856 /* If we are taking the address of a constant and are at the top level,
6857 we have to use output_constant_def since we can't call force_const_mem
6858 at top level. */
6859 /* ??? This should be considered a front-end bug. We should not be
6860 generating ADDR_EXPR of something that isn't an LVALUE. The only
6861 exception here is STRING_CST. */
6862 if (CONSTANT_CLASS_P (exp))
6863 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6865 /* Everything must be something allowed by is_gimple_addressable. */
6866 switch (TREE_CODE (exp))
6868 case INDIRECT_REF:
6869 /* This case will happen via recursion for &a->b. */
6870 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6872 case MEM_REF:
6874 tree tem = TREE_OPERAND (exp, 0);
6875 if (!integer_zerop (TREE_OPERAND (exp, 1)))
6876 tem = build2 (POINTER_PLUS_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
6877 tem,
6878 double_int_to_tree (sizetype, mem_ref_offset (exp)));
6879 return expand_expr (tem, target, tmode, modifier);
6882 case CONST_DECL:
6883 /* Expand the initializer like constants above. */
6884 return XEXP (expand_expr_constant (DECL_INITIAL (exp), 0, modifier), 0);
6886 case REALPART_EXPR:
6887 /* The real part of the complex number is always first, therefore
6888 the address is the same as the address of the parent object. */
6889 offset = 0;
6890 bitpos = 0;
6891 inner = TREE_OPERAND (exp, 0);
6892 break;
6894 case IMAGPART_EXPR:
6895 /* The imaginary part of the complex number is always second.
6896 The expression is therefore always offset by the size of the
6897 scalar type. */
6898 offset = 0;
6899 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6900 inner = TREE_OPERAND (exp, 0);
6901 break;
6903 default:
6904 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6905 expand_expr, as that can have various side effects; LABEL_DECLs for
6906 example, may not have their DECL_RTL set yet. Expand the rtl of
6907 CONSTRUCTORs too, which should yield a memory reference for the
6908 constructor's contents. Assume language specific tree nodes can
6909 be expanded in some interesting way. */
6910 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
6911 if (DECL_P (exp)
6912 || TREE_CODE (exp) == CONSTRUCTOR
6913 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
6915 result = expand_expr (exp, target, tmode,
6916 modifier == EXPAND_INITIALIZER
6917 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6919 /* If the DECL isn't in memory, then the DECL wasn't properly
6920 marked TREE_ADDRESSABLE, which will be either a front-end
6921 or a tree optimizer bug. */
6922 gcc_assert (MEM_P (result));
6923 result = XEXP (result, 0);
6925 /* ??? Is this needed anymore? */
6926 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6928 assemble_external (exp);
6929 TREE_USED (exp) = 1;
6932 if (modifier != EXPAND_INITIALIZER
6933 && modifier != EXPAND_CONST_ADDRESS)
6934 result = force_operand (result, target);
6935 return result;
6938 /* Pass FALSE as the last argument to get_inner_reference although
6939 we are expanding to RTL. The rationale is that we know how to
6940 handle "aligning nodes" here: we can just bypass them because
6941 they won't change the final object whose address will be returned
6942 (they actually exist only for that purpose). */
6943 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6944 &mode1, &unsignedp, &volatilep, false);
6945 break;
6948 /* We must have made progress. */
6949 gcc_assert (inner != exp);
6951 subtarget = offset || bitpos ? NULL_RTX : target;
6952 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
6953 inner alignment, force the inner to be sufficiently aligned. */
6954 if (CONSTANT_CLASS_P (inner)
6955 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
6957 inner = copy_node (inner);
6958 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
6959 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
6960 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
6962 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
6964 if (offset)
6966 rtx tmp;
6968 if (modifier != EXPAND_NORMAL)
6969 result = force_operand (result, NULL);
6970 tmp = expand_expr (offset, NULL_RTX, tmode,
6971 modifier == EXPAND_INITIALIZER
6972 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
6974 result = convert_memory_address_addr_space (tmode, result, as);
6975 tmp = convert_memory_address_addr_space (tmode, tmp, as);
6977 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6978 result = simplify_gen_binary (PLUS, tmode, result, tmp);
6979 else
6981 subtarget = bitpos ? NULL_RTX : target;
6982 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6983 1, OPTAB_LIB_WIDEN);
6987 if (bitpos)
6989 /* Someone beforehand should have rejected taking the address
6990 of such an object. */
6991 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6993 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6994 if (modifier < EXPAND_SUM)
6995 result = force_operand (result, target);
6998 return result;
7001 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7002 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7004 static rtx
7005 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
7006 enum expand_modifier modifier)
7008 addr_space_t as = ADDR_SPACE_GENERIC;
7009 enum machine_mode address_mode = Pmode;
7010 enum machine_mode pointer_mode = ptr_mode;
7011 enum machine_mode rmode;
7012 rtx result;
7014 /* Target mode of VOIDmode says "whatever's natural". */
7015 if (tmode == VOIDmode)
7016 tmode = TYPE_MODE (TREE_TYPE (exp));
7018 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7020 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7021 address_mode = targetm.addr_space.address_mode (as);
7022 pointer_mode = targetm.addr_space.pointer_mode (as);
7025 /* We can get called with some Weird Things if the user does silliness
7026 like "(short) &a". In that case, convert_memory_address won't do
7027 the right thing, so ignore the given target mode. */
7028 if (tmode != address_mode && tmode != pointer_mode)
7029 tmode = address_mode;
7031 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7032 tmode, modifier, as);
7034 /* Despite expand_expr claims concerning ignoring TMODE when not
7035 strictly convenient, stuff breaks if we don't honor it. Note
7036 that combined with the above, we only do this for pointer modes. */
7037 rmode = GET_MODE (result);
7038 if (rmode == VOIDmode)
7039 rmode = tmode;
7040 if (rmode != tmode)
7041 result = convert_memory_address_addr_space (tmode, result, as);
7043 return result;
7046 /* Generate code for computing CONSTRUCTOR EXP.
7047 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7048 is TRUE, instead of creating a temporary variable in memory
7049 NULL is returned and the caller needs to handle it differently. */
7051 static rtx
7052 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7053 bool avoid_temp_mem)
7055 tree type = TREE_TYPE (exp);
7056 enum machine_mode mode = TYPE_MODE (type);
7058 /* Try to avoid creating a temporary at all. This is possible
7059 if all of the initializer is zero.
7060 FIXME: try to handle all [0..255] initializers we can handle
7061 with memset. */
7062 if (TREE_STATIC (exp)
7063 && !TREE_ADDRESSABLE (exp)
7064 && target != 0 && mode == BLKmode
7065 && all_zeros_p (exp))
7067 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7068 return target;
7071 /* All elts simple constants => refer to a constant in memory. But
7072 if this is a non-BLKmode mode, let it store a field at a time
7073 since that should make a CONST_INT or CONST_DOUBLE when we
7074 fold. Likewise, if we have a target we can use, it is best to
7075 store directly into the target unless the type is large enough
7076 that memcpy will be used. If we are making an initializer and
7077 all operands are constant, put it in memory as well.
7079 FIXME: Avoid trying to fill vector constructors piece-meal.
7080 Output them with output_constant_def below unless we're sure
7081 they're zeros. This should go away when vector initializers
7082 are treated like VECTOR_CST instead of arrays. */
7083 if ((TREE_STATIC (exp)
7084 && ((mode == BLKmode
7085 && ! (target != 0 && safe_from_p (target, exp, 1)))
7086 || TREE_ADDRESSABLE (exp)
7087 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7088 && (! MOVE_BY_PIECES_P
7089 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7090 TYPE_ALIGN (type)))
7091 && ! mostly_zeros_p (exp))))
7092 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7093 && TREE_CONSTANT (exp)))
7095 rtx constructor;
7097 if (avoid_temp_mem)
7098 return NULL_RTX;
7100 constructor = expand_expr_constant (exp, 1, modifier);
7102 if (modifier != EXPAND_CONST_ADDRESS
7103 && modifier != EXPAND_INITIALIZER
7104 && modifier != EXPAND_SUM)
7105 constructor = validize_mem (constructor);
7107 return constructor;
7110 /* Handle calls that pass values in multiple non-contiguous
7111 locations. The Irix 6 ABI has examples of this. */
7112 if (target == 0 || ! safe_from_p (target, exp, 1)
7113 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7115 if (avoid_temp_mem)
7116 return NULL_RTX;
7118 target
7119 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
7120 | (TREE_READONLY (exp)
7121 * TYPE_QUAL_CONST))),
7122 0, TREE_ADDRESSABLE (exp), 1);
7125 store_constructor (exp, target, 0, int_expr_size (exp));
7126 return target;
7130 /* expand_expr: generate code for computing expression EXP.
7131 An rtx for the computed value is returned. The value is never null.
7132 In the case of a void EXP, const0_rtx is returned.
7134 The value may be stored in TARGET if TARGET is nonzero.
7135 TARGET is just a suggestion; callers must assume that
7136 the rtx returned may not be the same as TARGET.
7138 If TARGET is CONST0_RTX, it means that the value will be ignored.
7140 If TMODE is not VOIDmode, it suggests generating the
7141 result in mode TMODE. But this is done only when convenient.
7142 Otherwise, TMODE is ignored and the value generated in its natural mode.
7143 TMODE is just a suggestion; callers must assume that
7144 the rtx returned may not have mode TMODE.
7146 Note that TARGET may have neither TMODE nor MODE. In that case, it
7147 probably will not be used.
7149 If MODIFIER is EXPAND_SUM then when EXP is an addition
7150 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7151 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7152 products as above, or REG or MEM, or constant.
7153 Ordinarily in such cases we would output mul or add instructions
7154 and then return a pseudo reg containing the sum.
7156 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7157 it also marks a label as absolutely required (it can't be dead).
7158 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7159 This is used for outputting expressions used in initializers.
7161 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7162 with a constant address even if that address is not normally legitimate.
7163 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7165 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7166 a call parameter. Such targets require special care as we haven't yet
7167 marked TARGET so that it's safe from being trashed by libcalls. We
7168 don't want to use TARGET for anything but the final result;
7169 Intermediate values must go elsewhere. Additionally, calls to
7170 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7172 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7173 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7174 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7175 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7176 recursively. */
7179 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7180 enum expand_modifier modifier, rtx *alt_rtl)
7182 rtx ret;
7184 /* Handle ERROR_MARK before anybody tries to access its type. */
7185 if (TREE_CODE (exp) == ERROR_MARK
7186 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7188 ret = CONST0_RTX (tmode);
7189 return ret ? ret : const0_rtx;
7192 /* If this is an expression of some kind and it has an associated line
7193 number, then emit the line number before expanding the expression.
7195 We need to save and restore the file and line information so that
7196 errors discovered during expansion are emitted with the right
7197 information. It would be better of the diagnostic routines
7198 used the file/line information embedded in the tree nodes rather
7199 than globals. */
7200 if (cfun && EXPR_HAS_LOCATION (exp))
7202 location_t saved_location = input_location;
7203 location_t saved_curr_loc = get_curr_insn_source_location ();
7204 tree saved_block = get_curr_insn_block ();
7205 input_location = EXPR_LOCATION (exp);
7206 set_curr_insn_source_location (input_location);
7208 /* Record where the insns produced belong. */
7209 set_curr_insn_block (TREE_BLOCK (exp));
7211 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7213 input_location = saved_location;
7214 set_curr_insn_block (saved_block);
7215 set_curr_insn_source_location (saved_curr_loc);
7217 else
7219 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7222 return ret;
7226 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
7227 enum expand_modifier modifier)
7229 rtx op0, op1, op2, temp;
7230 tree type;
7231 int unsignedp;
7232 enum machine_mode mode;
7233 enum tree_code code = ops->code;
7234 optab this_optab;
7235 rtx subtarget, original_target;
7236 int ignore;
7237 bool reduce_bit_field;
7238 location_t loc = ops->location;
7239 tree treeop0, treeop1, treeop2;
7240 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7241 ? reduce_to_bit_field_precision ((expr), \
7242 target, \
7243 type) \
7244 : (expr))
7246 type = ops->type;
7247 mode = TYPE_MODE (type);
7248 unsignedp = TYPE_UNSIGNED (type);
7250 treeop0 = ops->op0;
7251 treeop1 = ops->op1;
7252 treeop2 = ops->op2;
7254 /* We should be called only on simple (binary or unary) expressions,
7255 exactly those that are valid in gimple expressions that aren't
7256 GIMPLE_SINGLE_RHS (or invalid). */
7257 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
7258 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
7259 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
7261 ignore = (target == const0_rtx
7262 || ((CONVERT_EXPR_CODE_P (code)
7263 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
7264 && TREE_CODE (type) == VOID_TYPE));
7266 /* We should be called only if we need the result. */
7267 gcc_assert (!ignore);
7269 /* An operation in what may be a bit-field type needs the
7270 result to be reduced to the precision of the bit-field type,
7271 which is narrower than that of the type's mode. */
7272 reduce_bit_field = (TREE_CODE (type) == INTEGER_TYPE
7273 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
7275 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
7276 target = 0;
7278 /* Use subtarget as the target for operand 0 of a binary operation. */
7279 subtarget = get_subtarget (target);
7280 original_target = target;
7282 switch (code)
7284 case NON_LVALUE_EXPR:
7285 case PAREN_EXPR:
7286 CASE_CONVERT:
7287 if (treeop0 == error_mark_node)
7288 return const0_rtx;
7290 if (TREE_CODE (type) == UNION_TYPE)
7292 tree valtype = TREE_TYPE (treeop0);
7294 /* If both input and output are BLKmode, this conversion isn't doing
7295 anything except possibly changing memory attribute. */
7296 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7298 rtx result = expand_expr (treeop0, target, tmode,
7299 modifier);
7301 result = copy_rtx (result);
7302 set_mem_attributes (result, type, 0);
7303 return result;
7306 if (target == 0)
7308 if (TYPE_MODE (type) != BLKmode)
7309 target = gen_reg_rtx (TYPE_MODE (type));
7310 else
7311 target = assign_temp (type, 0, 1, 1);
7314 if (MEM_P (target))
7315 /* Store data into beginning of memory target. */
7316 store_expr (treeop0,
7317 adjust_address (target, TYPE_MODE (valtype), 0),
7318 modifier == EXPAND_STACK_PARM,
7319 false);
7321 else
7323 gcc_assert (REG_P (target));
7325 /* Store this field into a union of the proper type. */
7326 store_field (target,
7327 MIN ((int_size_in_bytes (TREE_TYPE
7328 (treeop0))
7329 * BITS_PER_UNIT),
7330 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7331 0, TYPE_MODE (valtype), treeop0,
7332 type, 0, false);
7335 /* Return the entire union. */
7336 return target;
7339 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
7341 op0 = expand_expr (treeop0, target, VOIDmode,
7342 modifier);
7344 /* If the signedness of the conversion differs and OP0 is
7345 a promoted SUBREG, clear that indication since we now
7346 have to do the proper extension. */
7347 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
7348 && GET_CODE (op0) == SUBREG)
7349 SUBREG_PROMOTED_VAR_P (op0) = 0;
7351 return REDUCE_BIT_FIELD (op0);
7354 op0 = expand_expr (treeop0, NULL_RTX, mode,
7355 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7356 if (GET_MODE (op0) == mode)
7359 /* If OP0 is a constant, just convert it into the proper mode. */
7360 else if (CONSTANT_P (op0))
7362 tree inner_type = TREE_TYPE (treeop0);
7363 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7365 if (modifier == EXPAND_INITIALIZER)
7366 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7367 subreg_lowpart_offset (mode,
7368 inner_mode));
7369 else
7370 op0= convert_modes (mode, inner_mode, op0,
7371 TYPE_UNSIGNED (inner_type));
7374 else if (modifier == EXPAND_INITIALIZER)
7375 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7377 else if (target == 0)
7378 op0 = convert_to_mode (mode, op0,
7379 TYPE_UNSIGNED (TREE_TYPE
7380 (treeop0)));
7381 else
7383 convert_move (target, op0,
7384 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
7385 op0 = target;
7388 return REDUCE_BIT_FIELD (op0);
7390 case ADDR_SPACE_CONVERT_EXPR:
7392 tree treeop0_type = TREE_TYPE (treeop0);
7393 addr_space_t as_to;
7394 addr_space_t as_from;
7396 gcc_assert (POINTER_TYPE_P (type));
7397 gcc_assert (POINTER_TYPE_P (treeop0_type));
7399 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
7400 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
7402 /* Conversions between pointers to the same address space should
7403 have been implemented via CONVERT_EXPR / NOP_EXPR. */
7404 gcc_assert (as_to != as_from);
7406 /* Ask target code to handle conversion between pointers
7407 to overlapping address spaces. */
7408 if (targetm.addr_space.subset_p (as_to, as_from)
7409 || targetm.addr_space.subset_p (as_from, as_to))
7411 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
7412 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
7413 gcc_assert (op0);
7414 return op0;
7417 /* For disjoint address spaces, converting anything but
7418 a null pointer invokes undefined behaviour. We simply
7419 always return a null pointer here. */
7420 return CONST0_RTX (mode);
7423 case POINTER_PLUS_EXPR:
7424 /* Even though the sizetype mode and the pointer's mode can be different
7425 expand is able to handle this correctly and get the correct result out
7426 of the PLUS_EXPR code. */
7427 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
7428 if sizetype precision is smaller than pointer precision. */
7429 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
7430 treeop1 = fold_convert_loc (loc, type,
7431 fold_convert_loc (loc, ssizetype,
7432 treeop1));
7433 case PLUS_EXPR:
7434 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7435 something else, make sure we add the register to the constant and
7436 then to the other thing. This case can occur during strength
7437 reduction and doing it this way will produce better code if the
7438 frame pointer or argument pointer is eliminated.
7440 fold-const.c will ensure that the constant is always in the inner
7441 PLUS_EXPR, so the only case we need to do anything about is if
7442 sp, ap, or fp is our second argument, in which case we must swap
7443 the innermost first argument and our second argument. */
7445 if (TREE_CODE (treeop0) == PLUS_EXPR
7446 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
7447 && TREE_CODE (treeop1) == VAR_DECL
7448 && (DECL_RTL (treeop1) == frame_pointer_rtx
7449 || DECL_RTL (treeop1) == stack_pointer_rtx
7450 || DECL_RTL (treeop1) == arg_pointer_rtx))
7452 tree t = treeop1;
7454 treeop1 = TREE_OPERAND (treeop0, 0);
7455 TREE_OPERAND (treeop0, 0) = t;
7458 /* If the result is to be ptr_mode and we are adding an integer to
7459 something, we might be forming a constant. So try to use
7460 plus_constant. If it produces a sum and we can't accept it,
7461 use force_operand. This allows P = &ARR[const] to generate
7462 efficient code on machines where a SYMBOL_REF is not a valid
7463 address.
7465 If this is an EXPAND_SUM call, always return the sum. */
7466 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7467 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7469 if (modifier == EXPAND_STACK_PARM)
7470 target = 0;
7471 if (TREE_CODE (treeop0) == INTEGER_CST
7472 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7473 && TREE_CONSTANT (treeop1))
7475 rtx constant_part;
7477 op1 = expand_expr (treeop1, subtarget, VOIDmode,
7478 EXPAND_SUM);
7479 /* Use immed_double_const to ensure that the constant is
7480 truncated according to the mode of OP1, then sign extended
7481 to a HOST_WIDE_INT. Using the constant directly can result
7482 in non-canonical RTL in a 64x32 cross compile. */
7483 constant_part
7484 = immed_double_const (TREE_INT_CST_LOW (treeop0),
7485 (HOST_WIDE_INT) 0,
7486 TYPE_MODE (TREE_TYPE (treeop1)));
7487 op1 = plus_constant (op1, INTVAL (constant_part));
7488 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7489 op1 = force_operand (op1, target);
7490 return REDUCE_BIT_FIELD (op1);
7493 else if (TREE_CODE (treeop1) == INTEGER_CST
7494 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7495 && TREE_CONSTANT (treeop0))
7497 rtx constant_part;
7499 op0 = expand_expr (treeop0, subtarget, VOIDmode,
7500 (modifier == EXPAND_INITIALIZER
7501 ? EXPAND_INITIALIZER : EXPAND_SUM));
7502 if (! CONSTANT_P (op0))
7504 op1 = expand_expr (treeop1, NULL_RTX,
7505 VOIDmode, modifier);
7506 /* Return a PLUS if modifier says it's OK. */
7507 if (modifier == EXPAND_SUM
7508 || modifier == EXPAND_INITIALIZER)
7509 return simplify_gen_binary (PLUS, mode, op0, op1);
7510 goto binop2;
7512 /* Use immed_double_const to ensure that the constant is
7513 truncated according to the mode of OP1, then sign extended
7514 to a HOST_WIDE_INT. Using the constant directly can result
7515 in non-canonical RTL in a 64x32 cross compile. */
7516 constant_part
7517 = immed_double_const (TREE_INT_CST_LOW (treeop1),
7518 (HOST_WIDE_INT) 0,
7519 TYPE_MODE (TREE_TYPE (treeop0)));
7520 op0 = plus_constant (op0, INTVAL (constant_part));
7521 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7522 op0 = force_operand (op0, target);
7523 return REDUCE_BIT_FIELD (op0);
7527 /* Use TER to expand pointer addition of a negated value
7528 as pointer subtraction. */
7529 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
7530 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
7531 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
7532 && TREE_CODE (treeop1) == SSA_NAME
7533 && TYPE_MODE (TREE_TYPE (treeop0))
7534 == TYPE_MODE (TREE_TYPE (treeop1)))
7536 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
7537 if (def)
7539 treeop1 = gimple_assign_rhs1 (def);
7540 code = MINUS_EXPR;
7541 goto do_minus;
7545 /* No sense saving up arithmetic to be done
7546 if it's all in the wrong mode to form part of an address.
7547 And force_operand won't know whether to sign-extend or
7548 zero-extend. */
7549 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7550 || mode != ptr_mode)
7552 expand_operands (treeop0, treeop1,
7553 subtarget, &op0, &op1, EXPAND_NORMAL);
7554 if (op0 == const0_rtx)
7555 return op1;
7556 if (op1 == const0_rtx)
7557 return op0;
7558 goto binop2;
7561 expand_operands (treeop0, treeop1,
7562 subtarget, &op0, &op1, modifier);
7563 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7565 case MINUS_EXPR:
7566 do_minus:
7567 /* For initializers, we are allowed to return a MINUS of two
7568 symbolic constants. Here we handle all cases when both operands
7569 are constant. */
7570 /* Handle difference of two symbolic constants,
7571 for the sake of an initializer. */
7572 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7573 && really_constant_p (treeop0)
7574 && really_constant_p (treeop1))
7576 expand_operands (treeop0, treeop1,
7577 NULL_RTX, &op0, &op1, modifier);
7579 /* If the last operand is a CONST_INT, use plus_constant of
7580 the negated constant. Else make the MINUS. */
7581 if (CONST_INT_P (op1))
7582 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7583 else
7584 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7587 /* No sense saving up arithmetic to be done
7588 if it's all in the wrong mode to form part of an address.
7589 And force_operand won't know whether to sign-extend or
7590 zero-extend. */
7591 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7592 || mode != ptr_mode)
7593 goto binop;
7595 expand_operands (treeop0, treeop1,
7596 subtarget, &op0, &op1, modifier);
7598 /* Convert A - const to A + (-const). */
7599 if (CONST_INT_P (op1))
7601 op1 = negate_rtx (mode, op1);
7602 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7605 goto binop2;
7607 case WIDEN_MULT_PLUS_EXPR:
7608 case WIDEN_MULT_MINUS_EXPR:
7609 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
7610 op2 = expand_normal (treeop2);
7611 target = expand_widen_pattern_expr (ops, op0, op1, op2,
7612 target, unsignedp);
7613 return target;
7615 case WIDEN_MULT_EXPR:
7616 /* If first operand is constant, swap them.
7617 Thus the following special case checks need only
7618 check the second operand. */
7619 if (TREE_CODE (treeop0) == INTEGER_CST)
7621 tree t1 = treeop0;
7622 treeop0 = treeop1;
7623 treeop1 = t1;
7626 /* First, check if we have a multiplication of one signed and one
7627 unsigned operand. */
7628 if (TREE_CODE (treeop1) != INTEGER_CST
7629 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
7630 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
7632 enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
7633 this_optab = usmul_widen_optab;
7634 if (mode == GET_MODE_2XWIDER_MODE (innermode))
7636 if (optab_handler (this_optab, mode) != CODE_FOR_nothing)
7638 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
7639 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
7640 EXPAND_NORMAL);
7641 else
7642 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
7643 EXPAND_NORMAL);
7644 goto binop3;
7648 /* Check for a multiplication with matching signedness. */
7649 else if ((TREE_CODE (treeop1) == INTEGER_CST
7650 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
7651 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
7652 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
7654 tree op0type = TREE_TYPE (treeop0);
7655 enum machine_mode innermode = TYPE_MODE (op0type);
7656 bool zextend_p = TYPE_UNSIGNED (op0type);
7657 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7658 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7660 if (mode == GET_MODE_2XWIDER_MODE (innermode)
7661 && TREE_CODE (treeop0) != INTEGER_CST)
7663 if (optab_handler (this_optab, mode) != CODE_FOR_nothing)
7665 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
7666 EXPAND_NORMAL);
7667 temp = expand_widening_mult (mode, op0, op1, target,
7668 unsignedp, this_optab);
7669 return REDUCE_BIT_FIELD (temp);
7671 if (optab_handler (other_optab, mode) != CODE_FOR_nothing
7672 && innermode == word_mode)
7674 rtx htem, hipart;
7675 op0 = expand_normal (treeop0);
7676 if (TREE_CODE (treeop1) == INTEGER_CST)
7677 op1 = convert_modes (innermode, mode,
7678 expand_normal (treeop1), unsignedp);
7679 else
7680 op1 = expand_normal (treeop1);
7681 temp = expand_binop (mode, other_optab, op0, op1, target,
7682 unsignedp, OPTAB_LIB_WIDEN);
7683 hipart = gen_highpart (innermode, temp);
7684 htem = expand_mult_highpart_adjust (innermode, hipart,
7685 op0, op1, hipart,
7686 zextend_p);
7687 if (htem != hipart)
7688 emit_move_insn (hipart, htem);
7689 return REDUCE_BIT_FIELD (temp);
7693 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
7694 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
7695 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
7696 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7698 case FMA_EXPR:
7700 optab opt = fma_optab;
7701 gimple def0, def2;
7703 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
7704 call. */
7705 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
7707 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
7708 tree call_expr;
7710 gcc_assert (fn != NULL_TREE);
7711 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
7712 return expand_builtin (call_expr, target, subtarget, mode, false);
7715 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
7716 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
7718 op0 = op2 = NULL;
7720 if (def0 && def2
7721 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
7723 opt = fnms_optab;
7724 op0 = expand_normal (gimple_assign_rhs1 (def0));
7725 op2 = expand_normal (gimple_assign_rhs1 (def2));
7727 else if (def0
7728 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
7730 opt = fnma_optab;
7731 op0 = expand_normal (gimple_assign_rhs1 (def0));
7733 else if (def2
7734 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
7736 opt = fms_optab;
7737 op2 = expand_normal (gimple_assign_rhs1 (def2));
7740 if (op0 == NULL)
7741 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
7742 if (op2 == NULL)
7743 op2 = expand_normal (treeop2);
7744 op1 = expand_normal (treeop1);
7746 return expand_ternary_op (TYPE_MODE (type), opt,
7747 op0, op1, op2, target, 0);
7750 case MULT_EXPR:
7751 /* If this is a fixed-point operation, then we cannot use the code
7752 below because "expand_mult" doesn't support sat/no-sat fixed-point
7753 multiplications. */
7754 if (ALL_FIXED_POINT_MODE_P (mode))
7755 goto binop;
7757 /* If first operand is constant, swap them.
7758 Thus the following special case checks need only
7759 check the second operand. */
7760 if (TREE_CODE (treeop0) == INTEGER_CST)
7762 tree t1 = treeop0;
7763 treeop0 = treeop1;
7764 treeop1 = t1;
7767 /* Attempt to return something suitable for generating an
7768 indexed address, for machines that support that. */
7770 if (modifier == EXPAND_SUM && mode == ptr_mode
7771 && host_integerp (treeop1, 0))
7773 tree exp1 = treeop1;
7775 op0 = expand_expr (treeop0, subtarget, VOIDmode,
7776 EXPAND_SUM);
7778 if (!REG_P (op0))
7779 op0 = force_operand (op0, NULL_RTX);
7780 if (!REG_P (op0))
7781 op0 = copy_to_mode_reg (mode, op0);
7783 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7784 gen_int_mode (tree_low_cst (exp1, 0),
7785 TYPE_MODE (TREE_TYPE (exp1)))));
7788 if (modifier == EXPAND_STACK_PARM)
7789 target = 0;
7791 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
7792 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7794 case TRUNC_DIV_EXPR:
7795 case FLOOR_DIV_EXPR:
7796 case CEIL_DIV_EXPR:
7797 case ROUND_DIV_EXPR:
7798 case EXACT_DIV_EXPR:
7799 /* If this is a fixed-point operation, then we cannot use the code
7800 below because "expand_divmod" doesn't support sat/no-sat fixed-point
7801 divisions. */
7802 if (ALL_FIXED_POINT_MODE_P (mode))
7803 goto binop;
7805 if (modifier == EXPAND_STACK_PARM)
7806 target = 0;
7807 /* Possible optimization: compute the dividend with EXPAND_SUM
7808 then if the divisor is constant can optimize the case
7809 where some terms of the dividend have coeffs divisible by it. */
7810 expand_operands (treeop0, treeop1,
7811 subtarget, &op0, &op1, EXPAND_NORMAL);
7812 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7814 case RDIV_EXPR:
7815 goto binop;
7817 case TRUNC_MOD_EXPR:
7818 case FLOOR_MOD_EXPR:
7819 case CEIL_MOD_EXPR:
7820 case ROUND_MOD_EXPR:
7821 if (modifier == EXPAND_STACK_PARM)
7822 target = 0;
7823 expand_operands (treeop0, treeop1,
7824 subtarget, &op0, &op1, EXPAND_NORMAL);
7825 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7827 case FIXED_CONVERT_EXPR:
7828 op0 = expand_normal (treeop0);
7829 if (target == 0 || modifier == EXPAND_STACK_PARM)
7830 target = gen_reg_rtx (mode);
7832 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
7833 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
7834 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
7835 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
7836 else
7837 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
7838 return target;
7840 case FIX_TRUNC_EXPR:
7841 op0 = expand_normal (treeop0);
7842 if (target == 0 || modifier == EXPAND_STACK_PARM)
7843 target = gen_reg_rtx (mode);
7844 expand_fix (target, op0, unsignedp);
7845 return target;
7847 case FLOAT_EXPR:
7848 op0 = expand_normal (treeop0);
7849 if (target == 0 || modifier == EXPAND_STACK_PARM)
7850 target = gen_reg_rtx (mode);
7851 /* expand_float can't figure out what to do if FROM has VOIDmode.
7852 So give it the correct mode. With -O, cse will optimize this. */
7853 if (GET_MODE (op0) == VOIDmode)
7854 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
7855 op0);
7856 expand_float (target, op0,
7857 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
7858 return target;
7860 case NEGATE_EXPR:
7861 op0 = expand_expr (treeop0, subtarget,
7862 VOIDmode, EXPAND_NORMAL);
7863 if (modifier == EXPAND_STACK_PARM)
7864 target = 0;
7865 temp = expand_unop (mode,
7866 optab_for_tree_code (NEGATE_EXPR, type,
7867 optab_default),
7868 op0, target, 0);
7869 gcc_assert (temp);
7870 return REDUCE_BIT_FIELD (temp);
7872 case ABS_EXPR:
7873 op0 = expand_expr (treeop0, subtarget,
7874 VOIDmode, EXPAND_NORMAL);
7875 if (modifier == EXPAND_STACK_PARM)
7876 target = 0;
7878 /* ABS_EXPR is not valid for complex arguments. */
7879 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7880 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
7882 /* Unsigned abs is simply the operand. Testing here means we don't
7883 risk generating incorrect code below. */
7884 if (TYPE_UNSIGNED (type))
7885 return op0;
7887 return expand_abs (mode, op0, target, unsignedp,
7888 safe_from_p (target, treeop0, 1));
7890 case MAX_EXPR:
7891 case MIN_EXPR:
7892 target = original_target;
7893 if (target == 0
7894 || modifier == EXPAND_STACK_PARM
7895 || (MEM_P (target) && MEM_VOLATILE_P (target))
7896 || GET_MODE (target) != mode
7897 || (REG_P (target)
7898 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7899 target = gen_reg_rtx (mode);
7900 expand_operands (treeop0, treeop1,
7901 target, &op0, &op1, EXPAND_NORMAL);
7903 /* First try to do it with a special MIN or MAX instruction.
7904 If that does not win, use a conditional jump to select the proper
7905 value. */
7906 this_optab = optab_for_tree_code (code, type, optab_default);
7907 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7908 OPTAB_WIDEN);
7909 if (temp != 0)
7910 return temp;
7912 /* At this point, a MEM target is no longer useful; we will get better
7913 code without it. */
7915 if (! REG_P (target))
7916 target = gen_reg_rtx (mode);
7918 /* If op1 was placed in target, swap op0 and op1. */
7919 if (target != op0 && target == op1)
7921 temp = op0;
7922 op0 = op1;
7923 op1 = temp;
7926 /* We generate better code and avoid problems with op1 mentioning
7927 target by forcing op1 into a pseudo if it isn't a constant. */
7928 if (! CONSTANT_P (op1))
7929 op1 = force_reg (mode, op1);
7932 enum rtx_code comparison_code;
7933 rtx cmpop1 = op1;
7935 if (code == MAX_EXPR)
7936 comparison_code = unsignedp ? GEU : GE;
7937 else
7938 comparison_code = unsignedp ? LEU : LE;
7940 /* Canonicalize to comparisons against 0. */
7941 if (op1 == const1_rtx)
7943 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
7944 or (a != 0 ? a : 1) for unsigned.
7945 For MIN we are safe converting (a <= 1 ? a : 1)
7946 into (a <= 0 ? a : 1) */
7947 cmpop1 = const0_rtx;
7948 if (code == MAX_EXPR)
7949 comparison_code = unsignedp ? NE : GT;
7951 if (op1 == constm1_rtx && !unsignedp)
7953 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
7954 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
7955 cmpop1 = const0_rtx;
7956 if (code == MIN_EXPR)
7957 comparison_code = LT;
7959 #ifdef HAVE_conditional_move
7960 /* Use a conditional move if possible. */
7961 if (can_conditionally_move_p (mode))
7963 rtx insn;
7965 /* ??? Same problem as in expmed.c: emit_conditional_move
7966 forces a stack adjustment via compare_from_rtx, and we
7967 lose the stack adjustment if the sequence we are about
7968 to create is discarded. */
7969 do_pending_stack_adjust ();
7971 start_sequence ();
7973 /* Try to emit the conditional move. */
7974 insn = emit_conditional_move (target, comparison_code,
7975 op0, cmpop1, mode,
7976 op0, op1, mode,
7977 unsignedp);
7979 /* If we could do the conditional move, emit the sequence,
7980 and return. */
7981 if (insn)
7983 rtx seq = get_insns ();
7984 end_sequence ();
7985 emit_insn (seq);
7986 return target;
7989 /* Otherwise discard the sequence and fall back to code with
7990 branches. */
7991 end_sequence ();
7993 #endif
7994 if (target != op0)
7995 emit_move_insn (target, op0);
7997 temp = gen_label_rtx ();
7998 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
7999 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8000 -1);
8002 emit_move_insn (target, op1);
8003 emit_label (temp);
8004 return target;
8006 case BIT_NOT_EXPR:
8007 op0 = expand_expr (treeop0, subtarget,
8008 VOIDmode, EXPAND_NORMAL);
8009 if (modifier == EXPAND_STACK_PARM)
8010 target = 0;
8011 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8012 gcc_assert (temp);
8013 return temp;
8015 /* ??? Can optimize bitwise operations with one arg constant.
8016 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8017 and (a bitwise1 b) bitwise2 b (etc)
8018 but that is probably not worth while. */
8020 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8021 boolean values when we want in all cases to compute both of them. In
8022 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8023 as actual zero-or-1 values and then bitwise anding. In cases where
8024 there cannot be any side effects, better code would be made by
8025 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8026 how to recognize those cases. */
8028 case TRUTH_AND_EXPR:
8029 code = BIT_AND_EXPR;
8030 case BIT_AND_EXPR:
8031 goto binop;
8033 case TRUTH_OR_EXPR:
8034 code = BIT_IOR_EXPR;
8035 case BIT_IOR_EXPR:
8036 goto binop;
8038 case TRUTH_XOR_EXPR:
8039 code = BIT_XOR_EXPR;
8040 case BIT_XOR_EXPR:
8041 goto binop;
8043 case LROTATE_EXPR:
8044 case RROTATE_EXPR:
8045 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8046 || (GET_MODE_PRECISION (TYPE_MODE (type))
8047 == TYPE_PRECISION (type)));
8048 /* fall through */
8050 case LSHIFT_EXPR:
8051 case RSHIFT_EXPR:
8052 /* If this is a fixed-point operation, then we cannot use the code
8053 below because "expand_shift" doesn't support sat/no-sat fixed-point
8054 shifts. */
8055 if (ALL_FIXED_POINT_MODE_P (mode))
8056 goto binop;
8058 if (! safe_from_p (subtarget, treeop1, 1))
8059 subtarget = 0;
8060 if (modifier == EXPAND_STACK_PARM)
8061 target = 0;
8062 op0 = expand_expr (treeop0, subtarget,
8063 VOIDmode, EXPAND_NORMAL);
8064 temp = expand_shift (code, mode, op0, treeop1, target,
8065 unsignedp);
8066 if (code == LSHIFT_EXPR)
8067 temp = REDUCE_BIT_FIELD (temp);
8068 return temp;
8070 /* Could determine the answer when only additive constants differ. Also,
8071 the addition of one can be handled by changing the condition. */
8072 case LT_EXPR:
8073 case LE_EXPR:
8074 case GT_EXPR:
8075 case GE_EXPR:
8076 case EQ_EXPR:
8077 case NE_EXPR:
8078 case UNORDERED_EXPR:
8079 case ORDERED_EXPR:
8080 case UNLT_EXPR:
8081 case UNLE_EXPR:
8082 case UNGT_EXPR:
8083 case UNGE_EXPR:
8084 case UNEQ_EXPR:
8085 case LTGT_EXPR:
8086 temp = do_store_flag (ops,
8087 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8088 tmode != VOIDmode ? tmode : mode);
8089 if (temp)
8090 return temp;
8092 /* Use a compare and a jump for BLKmode comparisons, or for function
8093 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8095 if ((target == 0
8096 || modifier == EXPAND_STACK_PARM
8097 || ! safe_from_p (target, treeop0, 1)
8098 || ! safe_from_p (target, treeop1, 1)
8099 /* Make sure we don't have a hard reg (such as function's return
8100 value) live across basic blocks, if not optimizing. */
8101 || (!optimize && REG_P (target)
8102 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8103 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8105 emit_move_insn (target, const0_rtx);
8107 op1 = gen_label_rtx ();
8108 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8110 emit_move_insn (target, const1_rtx);
8112 emit_label (op1);
8113 return target;
8115 case TRUTH_NOT_EXPR:
8116 if (modifier == EXPAND_STACK_PARM)
8117 target = 0;
8118 op0 = expand_expr (treeop0, target,
8119 VOIDmode, EXPAND_NORMAL);
8120 /* The parser is careful to generate TRUTH_NOT_EXPR
8121 only with operands that are always zero or one. */
8122 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8123 target, 1, OPTAB_LIB_WIDEN);
8124 gcc_assert (temp);
8125 return temp;
8127 case COMPLEX_EXPR:
8128 /* Get the rtx code of the operands. */
8129 op0 = expand_normal (treeop0);
8130 op1 = expand_normal (treeop1);
8132 if (!target)
8133 target = gen_reg_rtx (TYPE_MODE (type));
8135 /* Move the real (op0) and imaginary (op1) parts to their location. */
8136 write_complex_part (target, op0, false);
8137 write_complex_part (target, op1, true);
8139 return target;
8141 case WIDEN_SUM_EXPR:
8143 tree oprnd0 = treeop0;
8144 tree oprnd1 = treeop1;
8146 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8147 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8148 target, unsignedp);
8149 return target;
8152 case REDUC_MAX_EXPR:
8153 case REDUC_MIN_EXPR:
8154 case REDUC_PLUS_EXPR:
8156 op0 = expand_normal (treeop0);
8157 this_optab = optab_for_tree_code (code, type, optab_default);
8158 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8159 gcc_assert (temp);
8160 return temp;
8163 case VEC_EXTRACT_EVEN_EXPR:
8164 case VEC_EXTRACT_ODD_EXPR:
8166 expand_operands (treeop0, treeop1,
8167 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8168 this_optab = optab_for_tree_code (code, type, optab_default);
8169 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8170 OPTAB_WIDEN);
8171 gcc_assert (temp);
8172 return temp;
8175 case VEC_INTERLEAVE_HIGH_EXPR:
8176 case VEC_INTERLEAVE_LOW_EXPR:
8178 expand_operands (treeop0, treeop1,
8179 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8180 this_optab = optab_for_tree_code (code, type, optab_default);
8181 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8182 OPTAB_WIDEN);
8183 gcc_assert (temp);
8184 return temp;
8187 case VEC_LSHIFT_EXPR:
8188 case VEC_RSHIFT_EXPR:
8190 target = expand_vec_shift_expr (ops, target);
8191 return target;
8194 case VEC_UNPACK_HI_EXPR:
8195 case VEC_UNPACK_LO_EXPR:
8197 op0 = expand_normal (treeop0);
8198 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
8199 target, unsignedp);
8200 gcc_assert (temp);
8201 return temp;
8204 case VEC_UNPACK_FLOAT_HI_EXPR:
8205 case VEC_UNPACK_FLOAT_LO_EXPR:
8207 op0 = expand_normal (treeop0);
8208 /* The signedness is determined from input operand. */
8209 temp = expand_widen_pattern_expr
8210 (ops, op0, NULL_RTX, NULL_RTX,
8211 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8213 gcc_assert (temp);
8214 return temp;
8217 case VEC_WIDEN_MULT_HI_EXPR:
8218 case VEC_WIDEN_MULT_LO_EXPR:
8220 tree oprnd0 = treeop0;
8221 tree oprnd1 = treeop1;
8223 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8224 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
8225 target, unsignedp);
8226 gcc_assert (target);
8227 return target;
8230 case VEC_PACK_TRUNC_EXPR:
8231 case VEC_PACK_SAT_EXPR:
8232 case VEC_PACK_FIX_TRUNC_EXPR:
8233 mode = TYPE_MODE (TREE_TYPE (treeop0));
8234 goto binop;
8236 case DOT_PROD_EXPR:
8238 tree oprnd0 = treeop0;
8239 tree oprnd1 = treeop1;
8240 tree oprnd2 = treeop2;
8241 rtx op2;
8243 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8244 op2 = expand_normal (oprnd2);
8245 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8246 target, unsignedp);
8247 return target;
8250 case REALIGN_LOAD_EXPR:
8252 tree oprnd0 = treeop0;
8253 tree oprnd1 = treeop1;
8254 tree oprnd2 = treeop2;
8255 rtx op2;
8257 this_optab = optab_for_tree_code (code, type, optab_default);
8258 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8259 op2 = expand_normal (oprnd2);
8260 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8261 target, unsignedp);
8262 gcc_assert (temp);
8263 return temp;
8266 default:
8267 gcc_unreachable ();
8270 /* Here to do an ordinary binary operator. */
8271 binop:
8272 expand_operands (treeop0, treeop1,
8273 subtarget, &op0, &op1, EXPAND_NORMAL);
8274 binop2:
8275 this_optab = optab_for_tree_code (code, type, optab_default);
8276 binop3:
8277 if (modifier == EXPAND_STACK_PARM)
8278 target = 0;
8279 temp = expand_binop (mode, this_optab, op0, op1, target,
8280 unsignedp, OPTAB_LIB_WIDEN);
8281 gcc_assert (temp);
8282 return REDUCE_BIT_FIELD (temp);
8284 #undef REDUCE_BIT_FIELD
8287 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
8288 enum expand_modifier modifier, rtx *alt_rtl)
8290 rtx op0, op1, temp, decl_rtl;
8291 tree type;
8292 int unsignedp;
8293 enum machine_mode mode;
8294 enum tree_code code = TREE_CODE (exp);
8295 rtx subtarget, original_target;
8296 int ignore;
8297 tree context;
8298 bool reduce_bit_field;
8299 location_t loc = EXPR_LOCATION (exp);
8300 struct separate_ops ops;
8301 tree treeop0, treeop1, treeop2;
8302 tree ssa_name = NULL_TREE;
8303 gimple g;
8305 type = TREE_TYPE (exp);
8306 mode = TYPE_MODE (type);
8307 unsignedp = TYPE_UNSIGNED (type);
8309 treeop0 = treeop1 = treeop2 = NULL_TREE;
8310 if (!VL_EXP_CLASS_P (exp))
8311 switch (TREE_CODE_LENGTH (code))
8313 default:
8314 case 3: treeop2 = TREE_OPERAND (exp, 2);
8315 case 2: treeop1 = TREE_OPERAND (exp, 1);
8316 case 1: treeop0 = TREE_OPERAND (exp, 0);
8317 case 0: break;
8319 ops.code = code;
8320 ops.type = type;
8321 ops.op0 = treeop0;
8322 ops.op1 = treeop1;
8323 ops.op2 = treeop2;
8324 ops.location = loc;
8326 ignore = (target == const0_rtx
8327 || ((CONVERT_EXPR_CODE_P (code)
8328 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8329 && TREE_CODE (type) == VOID_TYPE));
8331 /* An operation in what may be a bit-field type needs the
8332 result to be reduced to the precision of the bit-field type,
8333 which is narrower than that of the type's mode. */
8334 reduce_bit_field = (!ignore
8335 && TREE_CODE (type) == INTEGER_TYPE
8336 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8338 /* If we are going to ignore this result, we need only do something
8339 if there is a side-effect somewhere in the expression. If there
8340 is, short-circuit the most common cases here. Note that we must
8341 not call expand_expr with anything but const0_rtx in case this
8342 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
8344 if (ignore)
8346 if (! TREE_SIDE_EFFECTS (exp))
8347 return const0_rtx;
8349 /* Ensure we reference a volatile object even if value is ignored, but
8350 don't do this if all we are doing is taking its address. */
8351 if (TREE_THIS_VOLATILE (exp)
8352 && TREE_CODE (exp) != FUNCTION_DECL
8353 && mode != VOIDmode && mode != BLKmode
8354 && modifier != EXPAND_CONST_ADDRESS)
8356 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
8357 if (MEM_P (temp))
8358 copy_to_reg (temp);
8359 return const0_rtx;
8362 if (TREE_CODE_CLASS (code) == tcc_unary
8363 || code == COMPONENT_REF || code == INDIRECT_REF)
8364 return expand_expr (treeop0, const0_rtx, VOIDmode,
8365 modifier);
8367 else if (TREE_CODE_CLASS (code) == tcc_binary
8368 || TREE_CODE_CLASS (code) == tcc_comparison
8369 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
8371 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
8372 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
8373 return const0_rtx;
8375 else if (code == BIT_FIELD_REF)
8377 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
8378 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
8379 expand_expr (treeop2, const0_rtx, VOIDmode, modifier);
8380 return const0_rtx;
8383 target = 0;
8386 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8387 target = 0;
8389 /* Use subtarget as the target for operand 0 of a binary operation. */
8390 subtarget = get_subtarget (target);
8391 original_target = target;
8393 switch (code)
8395 case LABEL_DECL:
8397 tree function = decl_function_context (exp);
8399 temp = label_rtx (exp);
8400 temp = gen_rtx_LABEL_REF (Pmode, temp);
8402 if (function != current_function_decl
8403 && function != 0)
8404 LABEL_REF_NONLOCAL_P (temp) = 1;
8406 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
8407 return temp;
8410 case SSA_NAME:
8411 /* ??? ivopts calls expander, without any preparation from
8412 out-of-ssa. So fake instructions as if this was an access to the
8413 base variable. This unnecessarily allocates a pseudo, see how we can
8414 reuse it, if partition base vars have it set already. */
8415 if (!currently_expanding_to_rtl)
8416 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
8417 NULL);
8419 g = get_gimple_for_ssa_name (exp);
8420 /* For EXPAND_INITIALIZER try harder to get something simpler. */
8421 if (g == NULL
8422 && modifier == EXPAND_INITIALIZER
8423 && !SSA_NAME_IS_DEFAULT_DEF (exp)
8424 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
8425 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
8426 g = SSA_NAME_DEF_STMT (exp);
8427 if (g)
8428 return expand_expr_real (gimple_assign_rhs_to_tree (g), target, tmode,
8429 modifier, NULL);
8431 ssa_name = exp;
8432 decl_rtl = get_rtx_for_ssa_name (ssa_name);
8433 exp = SSA_NAME_VAR (ssa_name);
8434 goto expand_decl_rtl;
8436 case PARM_DECL:
8437 case VAR_DECL:
8438 /* If a static var's type was incomplete when the decl was written,
8439 but the type is complete now, lay out the decl now. */
8440 if (DECL_SIZE (exp) == 0
8441 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
8442 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
8443 layout_decl (exp, 0);
8445 /* ... fall through ... */
8447 case FUNCTION_DECL:
8448 case RESULT_DECL:
8449 decl_rtl = DECL_RTL (exp);
8450 expand_decl_rtl:
8451 gcc_assert (decl_rtl);
8452 decl_rtl = copy_rtx (decl_rtl);
8453 /* Record writes to register variables. */
8454 if (modifier == EXPAND_WRITE && REG_P (decl_rtl)
8455 && REGNO (decl_rtl) < FIRST_PSEUDO_REGISTER)
8457 int i = REGNO (decl_rtl);
8458 int nregs = hard_regno_nregs[i][GET_MODE (decl_rtl)];
8459 while (nregs)
8461 SET_HARD_REG_BIT (crtl->asm_clobbers, i);
8462 i++;
8463 nregs--;
8467 /* Ensure variable marked as used even if it doesn't go through
8468 a parser. If it hasn't be used yet, write out an external
8469 definition. */
8470 if (! TREE_USED (exp))
8472 assemble_external (exp);
8473 TREE_USED (exp) = 1;
8476 /* Show we haven't gotten RTL for this yet. */
8477 temp = 0;
8479 /* Variables inherited from containing functions should have
8480 been lowered by this point. */
8481 context = decl_function_context (exp);
8482 gcc_assert (!context
8483 || context == current_function_decl
8484 || TREE_STATIC (exp)
8485 || DECL_EXTERNAL (exp)
8486 /* ??? C++ creates functions that are not TREE_STATIC. */
8487 || TREE_CODE (exp) == FUNCTION_DECL);
8489 /* This is the case of an array whose size is to be determined
8490 from its initializer, while the initializer is still being parsed.
8491 See expand_decl. */
8493 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
8494 temp = validize_mem (decl_rtl);
8496 /* If DECL_RTL is memory, we are in the normal case and the
8497 address is not valid, get the address into a register. */
8499 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
8501 if (alt_rtl)
8502 *alt_rtl = decl_rtl;
8503 decl_rtl = use_anchored_address (decl_rtl);
8504 if (modifier != EXPAND_CONST_ADDRESS
8505 && modifier != EXPAND_SUM
8506 && !memory_address_addr_space_p (DECL_MODE (exp),
8507 XEXP (decl_rtl, 0),
8508 MEM_ADDR_SPACE (decl_rtl)))
8509 temp = replace_equiv_address (decl_rtl,
8510 copy_rtx (XEXP (decl_rtl, 0)));
8513 /* If we got something, return it. But first, set the alignment
8514 if the address is a register. */
8515 if (temp != 0)
8517 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
8518 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
8520 return temp;
8523 /* If the mode of DECL_RTL does not match that of the decl, it
8524 must be a promoted value. We return a SUBREG of the wanted mode,
8525 but mark it so that we know that it was already extended. */
8526 if (REG_P (decl_rtl) && GET_MODE (decl_rtl) != DECL_MODE (exp))
8528 enum machine_mode pmode;
8530 /* Get the signedness to be used for this variable. Ensure we get
8531 the same mode we got when the variable was declared. */
8532 if (code == SSA_NAME
8533 && (g = SSA_NAME_DEF_STMT (ssa_name))
8534 && gimple_code (g) == GIMPLE_CALL)
8535 pmode = promote_function_mode (type, mode, &unsignedp,
8536 gimple_call_fntype (g),
8538 else
8539 pmode = promote_decl_mode (exp, &unsignedp);
8540 gcc_assert (GET_MODE (decl_rtl) == pmode);
8542 temp = gen_lowpart_SUBREG (mode, decl_rtl);
8543 SUBREG_PROMOTED_VAR_P (temp) = 1;
8544 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
8545 return temp;
8548 return decl_rtl;
8550 case INTEGER_CST:
8551 temp = immed_double_const (TREE_INT_CST_LOW (exp),
8552 TREE_INT_CST_HIGH (exp), mode);
8554 return temp;
8556 case VECTOR_CST:
8558 tree tmp = NULL_TREE;
8559 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
8560 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
8561 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
8562 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
8563 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
8564 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
8565 return const_vector_from_tree (exp);
8566 if (GET_MODE_CLASS (mode) == MODE_INT)
8568 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
8569 if (type_for_mode)
8570 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
8572 if (!tmp)
8573 tmp = build_constructor_from_list (type,
8574 TREE_VECTOR_CST_ELTS (exp));
8575 return expand_expr (tmp, ignore ? const0_rtx : target,
8576 tmode, modifier);
8579 case CONST_DECL:
8580 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
8582 case REAL_CST:
8583 /* If optimized, generate immediate CONST_DOUBLE
8584 which will be turned into memory by reload if necessary.
8586 We used to force a register so that loop.c could see it. But
8587 this does not allow gen_* patterns to perform optimizations with
8588 the constants. It also produces two insns in cases like "x = 1.0;".
8589 On most machines, floating-point constants are not permitted in
8590 many insns, so we'd end up copying it to a register in any case.
8592 Now, we do the copying in expand_binop, if appropriate. */
8593 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
8594 TYPE_MODE (TREE_TYPE (exp)));
8596 case FIXED_CST:
8597 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
8598 TYPE_MODE (TREE_TYPE (exp)));
8600 case COMPLEX_CST:
8601 /* Handle evaluating a complex constant in a CONCAT target. */
8602 if (original_target && GET_CODE (original_target) == CONCAT)
8604 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8605 rtx rtarg, itarg;
8607 rtarg = XEXP (original_target, 0);
8608 itarg = XEXP (original_target, 1);
8610 /* Move the real and imaginary parts separately. */
8611 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
8612 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
8614 if (op0 != rtarg)
8615 emit_move_insn (rtarg, op0);
8616 if (op1 != itarg)
8617 emit_move_insn (itarg, op1);
8619 return original_target;
8622 /* ... fall through ... */
8624 case STRING_CST:
8625 temp = expand_expr_constant (exp, 1, modifier);
8627 /* temp contains a constant address.
8628 On RISC machines where a constant address isn't valid,
8629 make some insns to get that address into a register. */
8630 if (modifier != EXPAND_CONST_ADDRESS
8631 && modifier != EXPAND_INITIALIZER
8632 && modifier != EXPAND_SUM
8633 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
8634 MEM_ADDR_SPACE (temp)))
8635 return replace_equiv_address (temp,
8636 copy_rtx (XEXP (temp, 0)));
8637 return temp;
8639 case SAVE_EXPR:
8641 tree val = treeop0;
8642 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
8644 if (!SAVE_EXPR_RESOLVED_P (exp))
8646 /* We can indeed still hit this case, typically via builtin
8647 expanders calling save_expr immediately before expanding
8648 something. Assume this means that we only have to deal
8649 with non-BLKmode values. */
8650 gcc_assert (GET_MODE (ret) != BLKmode);
8652 val = build_decl (EXPR_LOCATION (exp),
8653 VAR_DECL, NULL, TREE_TYPE (exp));
8654 DECL_ARTIFICIAL (val) = 1;
8655 DECL_IGNORED_P (val) = 1;
8656 treeop0 = val;
8657 TREE_OPERAND (exp, 0) = treeop0;
8658 SAVE_EXPR_RESOLVED_P (exp) = 1;
8660 if (!CONSTANT_P (ret))
8661 ret = copy_to_reg (ret);
8662 SET_DECL_RTL (val, ret);
8665 return ret;
8669 case CONSTRUCTOR:
8670 /* If we don't need the result, just ensure we evaluate any
8671 subexpressions. */
8672 if (ignore)
8674 unsigned HOST_WIDE_INT idx;
8675 tree value;
8677 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
8678 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
8680 return const0_rtx;
8683 return expand_constructor (exp, target, modifier, false);
8685 case TARGET_MEM_REF:
8687 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
8688 struct mem_address addr;
8689 int icode, align;
8691 get_address_description (exp, &addr);
8692 op0 = addr_for_mem_ref (&addr, as, true);
8693 op0 = memory_address_addr_space (mode, op0, as);
8694 temp = gen_rtx_MEM (mode, op0);
8695 set_mem_attributes (temp, exp, 0);
8696 set_mem_addr_space (temp, as);
8697 align = MAX (TYPE_ALIGN (TREE_TYPE (exp)),
8698 get_object_alignment (exp, BIGGEST_ALIGNMENT));
8699 if (mode != BLKmode
8700 && (unsigned) align < GET_MODE_ALIGNMENT (mode)
8701 /* If the target does not have special handling for unaligned
8702 loads of mode then it can use regular moves for them. */
8703 && ((icode = optab_handler (movmisalign_optab, mode))
8704 != CODE_FOR_nothing))
8706 rtx reg, insn;
8708 /* We've already validated the memory, and we're creating a
8709 new pseudo destination. The predicates really can't fail. */
8710 reg = gen_reg_rtx (mode);
8712 /* Nor can the insn generator. */
8713 insn = GEN_FCN (icode) (reg, temp);
8714 gcc_assert (insn != NULL_RTX);
8715 emit_insn (insn);
8717 return reg;
8719 return temp;
8722 case MEM_REF:
8724 addr_space_t as
8725 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8726 enum machine_mode address_mode;
8727 tree base = TREE_OPERAND (exp, 0);
8728 gimple def_stmt;
8729 int icode, align;
8730 /* Handle expansion of non-aliased memory with non-BLKmode. That
8731 might end up in a register. */
8732 if (TREE_CODE (base) == ADDR_EXPR)
8734 HOST_WIDE_INT offset = mem_ref_offset (exp).low;
8735 tree bit_offset;
8736 base = TREE_OPERAND (base, 0);
8737 if (!DECL_P (base))
8739 HOST_WIDE_INT off;
8740 base = get_addr_base_and_unit_offset (base, &off);
8741 gcc_assert (base);
8742 offset += off;
8744 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
8745 decl we must use bitfield operations. */
8746 if (DECL_P (base)
8747 && !TREE_ADDRESSABLE (base)
8748 && DECL_MODE (base) != BLKmode
8749 && DECL_RTL_SET_P (base)
8750 && !MEM_P (DECL_RTL (base)))
8752 tree bftype;
8753 if (offset == 0
8754 && host_integerp (TYPE_SIZE (TREE_TYPE (exp)), 1)
8755 && (GET_MODE_BITSIZE (DECL_MODE (base))
8756 == TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp)))))
8757 return expand_expr (build1 (VIEW_CONVERT_EXPR,
8758 TREE_TYPE (exp), base),
8759 target, tmode, modifier);
8760 bit_offset = bitsize_int (offset * BITS_PER_UNIT);
8761 bftype = TREE_TYPE (base);
8762 if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
8763 bftype = TREE_TYPE (exp);
8764 return expand_expr (build3 (BIT_FIELD_REF, bftype,
8765 base,
8766 TYPE_SIZE (TREE_TYPE (exp)),
8767 bit_offset),
8768 target, tmode, modifier);
8771 address_mode = targetm.addr_space.address_mode (as);
8772 base = TREE_OPERAND (exp, 0);
8773 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
8775 tree mask = gimple_assign_rhs2 (def_stmt);
8776 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
8777 gimple_assign_rhs1 (def_stmt), mask);
8778 TREE_OPERAND (exp, 0) = base;
8780 align = MAX (TYPE_ALIGN (TREE_TYPE (exp)),
8781 get_object_alignment (exp, BIGGEST_ALIGNMENT));
8782 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
8783 op0 = memory_address_addr_space (address_mode, op0, as);
8784 if (!integer_zerop (TREE_OPERAND (exp, 1)))
8786 rtx off
8787 = immed_double_int_const (mem_ref_offset (exp), address_mode);
8788 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
8790 op0 = memory_address_addr_space (mode, op0, as);
8791 temp = gen_rtx_MEM (mode, op0);
8792 set_mem_attributes (temp, exp, 0);
8793 set_mem_addr_space (temp, as);
8794 if (TREE_THIS_VOLATILE (exp))
8795 MEM_VOLATILE_P (temp) = 1;
8796 if (mode != BLKmode
8797 && (unsigned) align < GET_MODE_ALIGNMENT (mode)
8798 /* If the target does not have special handling for unaligned
8799 loads of mode then it can use regular moves for them. */
8800 && ((icode = optab_handler (movmisalign_optab, mode))
8801 != CODE_FOR_nothing))
8803 rtx reg, insn;
8805 /* We've already validated the memory, and we're creating a
8806 new pseudo destination. The predicates really can't fail. */
8807 reg = gen_reg_rtx (mode);
8809 /* Nor can the insn generator. */
8810 insn = GEN_FCN (icode) (reg, temp);
8811 emit_insn (insn);
8813 return reg;
8815 return temp;
8818 case ARRAY_REF:
8821 tree array = treeop0;
8822 tree index = treeop1;
8824 /* Fold an expression like: "foo"[2].
8825 This is not done in fold so it won't happen inside &.
8826 Don't fold if this is for wide characters since it's too
8827 difficult to do correctly and this is a very rare case. */
8829 if (modifier != EXPAND_CONST_ADDRESS
8830 && modifier != EXPAND_INITIALIZER
8831 && modifier != EXPAND_MEMORY)
8833 tree t = fold_read_from_constant_string (exp);
8835 if (t)
8836 return expand_expr (t, target, tmode, modifier);
8839 /* If this is a constant index into a constant array,
8840 just get the value from the array. Handle both the cases when
8841 we have an explicit constructor and when our operand is a variable
8842 that was declared const. */
8844 if (modifier != EXPAND_CONST_ADDRESS
8845 && modifier != EXPAND_INITIALIZER
8846 && modifier != EXPAND_MEMORY
8847 && TREE_CODE (array) == CONSTRUCTOR
8848 && ! TREE_SIDE_EFFECTS (array)
8849 && TREE_CODE (index) == INTEGER_CST)
8851 unsigned HOST_WIDE_INT ix;
8852 tree field, value;
8854 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
8855 field, value)
8856 if (tree_int_cst_equal (field, index))
8858 if (!TREE_SIDE_EFFECTS (value))
8859 return expand_expr (fold (value), target, tmode, modifier);
8860 break;
8864 else if (optimize >= 1
8865 && modifier != EXPAND_CONST_ADDRESS
8866 && modifier != EXPAND_INITIALIZER
8867 && modifier != EXPAND_MEMORY
8868 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8869 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8870 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
8871 && const_value_known_p (array))
8873 if (TREE_CODE (index) == INTEGER_CST)
8875 tree init = DECL_INITIAL (array);
8877 if (TREE_CODE (init) == CONSTRUCTOR)
8879 unsigned HOST_WIDE_INT ix;
8880 tree field, value;
8882 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
8883 field, value)
8884 if (tree_int_cst_equal (field, index))
8886 if (TREE_SIDE_EFFECTS (value))
8887 break;
8889 if (TREE_CODE (value) == CONSTRUCTOR)
8891 /* If VALUE is a CONSTRUCTOR, this
8892 optimization is only useful if
8893 this doesn't store the CONSTRUCTOR
8894 into memory. If it does, it is more
8895 efficient to just load the data from
8896 the array directly. */
8897 rtx ret = expand_constructor (value, target,
8898 modifier, true);
8899 if (ret == NULL_RTX)
8900 break;
8903 return expand_expr (fold (value), target, tmode,
8904 modifier);
8907 else if(TREE_CODE (init) == STRING_CST)
8909 tree index1 = index;
8910 tree low_bound = array_ref_low_bound (exp);
8911 index1 = fold_convert_loc (loc, sizetype,
8912 treeop1);
8914 /* Optimize the special-case of a zero lower bound.
8916 We convert the low_bound to sizetype to avoid some problems
8917 with constant folding. (E.g. suppose the lower bound is 1,
8918 and its mode is QI. Without the conversion,l (ARRAY
8919 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8920 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
8922 if (! integer_zerop (low_bound))
8923 index1 = size_diffop_loc (loc, index1,
8924 fold_convert_loc (loc, sizetype,
8925 low_bound));
8927 if (0 > compare_tree_int (index1,
8928 TREE_STRING_LENGTH (init)))
8930 tree type = TREE_TYPE (TREE_TYPE (init));
8931 enum machine_mode mode = TYPE_MODE (type);
8933 if (GET_MODE_CLASS (mode) == MODE_INT
8934 && GET_MODE_SIZE (mode) == 1)
8935 return gen_int_mode (TREE_STRING_POINTER (init)
8936 [TREE_INT_CST_LOW (index1)],
8937 mode);
8943 goto normal_inner_ref;
8945 case COMPONENT_REF:
8946 /* If the operand is a CONSTRUCTOR, we can just extract the
8947 appropriate field if it is present. */
8948 if (TREE_CODE (treeop0) == CONSTRUCTOR)
8950 unsigned HOST_WIDE_INT idx;
8951 tree field, value;
8953 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
8954 idx, field, value)
8955 if (field == treeop1
8956 /* We can normally use the value of the field in the
8957 CONSTRUCTOR. However, if this is a bitfield in
8958 an integral mode that we can fit in a HOST_WIDE_INT,
8959 we must mask only the number of bits in the bitfield,
8960 since this is done implicitly by the constructor. If
8961 the bitfield does not meet either of those conditions,
8962 we can't do this optimization. */
8963 && (! DECL_BIT_FIELD (field)
8964 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
8965 && (GET_MODE_BITSIZE (DECL_MODE (field))
8966 <= HOST_BITS_PER_WIDE_INT))))
8968 if (DECL_BIT_FIELD (field)
8969 && modifier == EXPAND_STACK_PARM)
8970 target = 0;
8971 op0 = expand_expr (value, target, tmode, modifier);
8972 if (DECL_BIT_FIELD (field))
8974 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
8975 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
8977 if (TYPE_UNSIGNED (TREE_TYPE (field)))
8979 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
8980 op0 = expand_and (imode, op0, op1, target);
8982 else
8984 tree count
8985 = build_int_cst (NULL_TREE,
8986 GET_MODE_BITSIZE (imode) - bitsize);
8988 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
8989 target, 0);
8990 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
8991 target, 0);
8995 return op0;
8998 goto normal_inner_ref;
9000 case BIT_FIELD_REF:
9001 case ARRAY_RANGE_REF:
9002 normal_inner_ref:
9004 enum machine_mode mode1, mode2;
9005 HOST_WIDE_INT bitsize, bitpos;
9006 tree offset;
9007 int volatilep = 0, must_force_mem;
9008 bool packedp = false;
9009 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9010 &mode1, &unsignedp, &volatilep, true);
9011 rtx orig_op0, memloc;
9013 /* If we got back the original object, something is wrong. Perhaps
9014 we are evaluating an expression too early. In any event, don't
9015 infinitely recurse. */
9016 gcc_assert (tem != exp);
9018 if (TYPE_PACKED (TREE_TYPE (TREE_OPERAND (exp, 0)))
9019 || (TREE_CODE (TREE_OPERAND (exp, 1)) == FIELD_DECL
9020 && DECL_PACKED (TREE_OPERAND (exp, 1))))
9021 packedp = true;
9023 /* If TEM's type is a union of variable size, pass TARGET to the inner
9024 computation, since it will need a temporary and TARGET is known
9025 to have to do. This occurs in unchecked conversion in Ada. */
9026 orig_op0 = op0
9027 = expand_expr (tem,
9028 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9029 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9030 != INTEGER_CST)
9031 && modifier != EXPAND_STACK_PARM
9032 ? target : NULL_RTX),
9033 VOIDmode,
9034 (modifier == EXPAND_INITIALIZER
9035 || modifier == EXPAND_CONST_ADDRESS
9036 || modifier == EXPAND_STACK_PARM)
9037 ? modifier : EXPAND_NORMAL);
9040 /* If the bitfield is volatile, we want to access it in the
9041 field's mode, not the computed mode.
9042 If a MEM has VOIDmode (external with incomplete type),
9043 use BLKmode for it instead. */
9044 if (MEM_P (op0))
9046 if (volatilep && flag_strict_volatile_bitfields > 0)
9047 op0 = adjust_address (op0, mode1, 0);
9048 else if (GET_MODE (op0) == VOIDmode)
9049 op0 = adjust_address (op0, BLKmode, 0);
9052 mode2
9053 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
9055 /* If we have either an offset, a BLKmode result, or a reference
9056 outside the underlying object, we must force it to memory.
9057 Such a case can occur in Ada if we have unchecked conversion
9058 of an expression from a scalar type to an aggregate type or
9059 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
9060 passed a partially uninitialized object or a view-conversion
9061 to a larger size. */
9062 must_force_mem = (offset
9063 || mode1 == BLKmode
9064 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
9066 /* Handle CONCAT first. */
9067 if (GET_CODE (op0) == CONCAT && !must_force_mem)
9069 if (bitpos == 0
9070 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
9071 return op0;
9072 if (bitpos == 0
9073 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9074 && bitsize)
9076 op0 = XEXP (op0, 0);
9077 mode2 = GET_MODE (op0);
9079 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9080 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
9081 && bitpos
9082 && bitsize)
9084 op0 = XEXP (op0, 1);
9085 bitpos = 0;
9086 mode2 = GET_MODE (op0);
9088 else
9089 /* Otherwise force into memory. */
9090 must_force_mem = 1;
9093 /* If this is a constant, put it in a register if it is a legitimate
9094 constant and we don't need a memory reference. */
9095 if (CONSTANT_P (op0)
9096 && mode2 != BLKmode
9097 && LEGITIMATE_CONSTANT_P (op0)
9098 && !must_force_mem)
9099 op0 = force_reg (mode2, op0);
9101 /* Otherwise, if this is a constant, try to force it to the constant
9102 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
9103 is a legitimate constant. */
9104 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
9105 op0 = validize_mem (memloc);
9107 /* Otherwise, if this is a constant or the object is not in memory
9108 and need be, put it there. */
9109 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
9111 tree nt = build_qualified_type (TREE_TYPE (tem),
9112 (TYPE_QUALS (TREE_TYPE (tem))
9113 | TYPE_QUAL_CONST));
9114 memloc = assign_temp (nt, 1, 1, 1);
9115 emit_move_insn (memloc, op0);
9116 op0 = memloc;
9119 if (offset)
9121 enum machine_mode address_mode;
9122 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
9123 EXPAND_SUM);
9125 gcc_assert (MEM_P (op0));
9127 address_mode
9128 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (op0));
9129 if (GET_MODE (offset_rtx) != address_mode)
9130 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
9132 if (GET_MODE (op0) == BLKmode
9133 /* A constant address in OP0 can have VOIDmode, we must
9134 not try to call force_reg in that case. */
9135 && GET_MODE (XEXP (op0, 0)) != VOIDmode
9136 && bitsize != 0
9137 && (bitpos % bitsize) == 0
9138 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
9139 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
9141 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9142 bitpos = 0;
9145 op0 = offset_address (op0, offset_rtx,
9146 highest_pow2_factor (offset));
9149 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
9150 record its alignment as BIGGEST_ALIGNMENT. */
9151 if (MEM_P (op0) && bitpos == 0 && offset != 0
9152 && is_aligning_offset (offset, tem))
9153 set_mem_align (op0, BIGGEST_ALIGNMENT);
9155 /* Don't forget about volatility even if this is a bitfield. */
9156 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
9158 if (op0 == orig_op0)
9159 op0 = copy_rtx (op0);
9161 MEM_VOLATILE_P (op0) = 1;
9164 /* In cases where an aligned union has an unaligned object
9165 as a field, we might be extracting a BLKmode value from
9166 an integer-mode (e.g., SImode) object. Handle this case
9167 by doing the extract into an object as wide as the field
9168 (which we know to be the width of a basic mode), then
9169 storing into memory, and changing the mode to BLKmode. */
9170 if (mode1 == VOIDmode
9171 || REG_P (op0) || GET_CODE (op0) == SUBREG
9172 || (mode1 != BLKmode && ! direct_load[(int) mode1]
9173 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
9174 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
9175 && modifier != EXPAND_CONST_ADDRESS
9176 && modifier != EXPAND_INITIALIZER)
9177 /* If the field is volatile, we always want an aligned
9178 access. Only do this if the access is not already naturally
9179 aligned, otherwise "normal" (non-bitfield) volatile fields
9180 become non-addressable. */
9181 || (volatilep && flag_strict_volatile_bitfields > 0
9182 && (bitpos % GET_MODE_ALIGNMENT (mode) != 0))
9183 /* If the field isn't aligned enough to fetch as a memref,
9184 fetch it as a bit field. */
9185 || (mode1 != BLKmode
9186 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
9187 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
9188 || (MEM_P (op0)
9189 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
9190 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
9191 && ((modifier == EXPAND_CONST_ADDRESS
9192 || modifier == EXPAND_INITIALIZER)
9193 ? STRICT_ALIGNMENT
9194 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
9195 || (bitpos % BITS_PER_UNIT != 0)))
9196 /* If the type and the field are a constant size and the
9197 size of the type isn't the same size as the bitfield,
9198 we must use bitfield operations. */
9199 || (bitsize >= 0
9200 && TYPE_SIZE (TREE_TYPE (exp))
9201 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9202 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
9203 bitsize)))
9205 enum machine_mode ext_mode = mode;
9207 if (ext_mode == BLKmode
9208 && ! (target != 0 && MEM_P (op0)
9209 && MEM_P (target)
9210 && bitpos % BITS_PER_UNIT == 0))
9211 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9213 if (ext_mode == BLKmode)
9215 if (target == 0)
9216 target = assign_temp (type, 0, 1, 1);
9218 if (bitsize == 0)
9219 return target;
9221 /* In this case, BITPOS must start at a byte boundary and
9222 TARGET, if specified, must be a MEM. */
9223 gcc_assert (MEM_P (op0)
9224 && (!target || MEM_P (target))
9225 && !(bitpos % BITS_PER_UNIT));
9227 emit_block_move (target,
9228 adjust_address (op0, VOIDmode,
9229 bitpos / BITS_PER_UNIT),
9230 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
9231 / BITS_PER_UNIT),
9232 (modifier == EXPAND_STACK_PARM
9233 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9235 return target;
9238 op0 = validize_mem (op0);
9240 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
9241 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9243 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp, packedp,
9244 (modifier == EXPAND_STACK_PARM
9245 ? NULL_RTX : target),
9246 ext_mode, ext_mode);
9248 /* If the result is a record type and BITSIZE is narrower than
9249 the mode of OP0, an integral mode, and this is a big endian
9250 machine, we must put the field into the high-order bits. */
9251 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9252 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9253 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
9254 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9255 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
9256 - bitsize),
9257 op0, 1);
9259 /* If the result type is BLKmode, store the data into a temporary
9260 of the appropriate type, but with the mode corresponding to the
9261 mode for the data we have (op0's mode). It's tempting to make
9262 this a constant type, since we know it's only being stored once,
9263 but that can cause problems if we are taking the address of this
9264 COMPONENT_REF because the MEM of any reference via that address
9265 will have flags corresponding to the type, which will not
9266 necessarily be constant. */
9267 if (mode == BLKmode)
9269 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
9270 rtx new_rtx;
9272 /* If the reference doesn't use the alias set of its type,
9273 we cannot create the temporary using that type. */
9274 if (component_uses_parent_alias_set (exp))
9276 new_rtx = assign_stack_local (ext_mode, size, 0);
9277 set_mem_alias_set (new_rtx, get_alias_set (exp));
9279 else
9280 new_rtx = assign_stack_temp_for_type (ext_mode, size, 0, type);
9282 emit_move_insn (new_rtx, op0);
9283 op0 = copy_rtx (new_rtx);
9284 PUT_MODE (op0, BLKmode);
9285 set_mem_attributes (op0, exp, 1);
9288 return op0;
9291 /* If the result is BLKmode, use that to access the object
9292 now as well. */
9293 if (mode == BLKmode)
9294 mode1 = BLKmode;
9296 /* Get a reference to just this component. */
9297 if (modifier == EXPAND_CONST_ADDRESS
9298 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9299 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
9300 else
9301 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9303 if (op0 == orig_op0)
9304 op0 = copy_rtx (op0);
9306 set_mem_attributes (op0, exp, 0);
9307 if (REG_P (XEXP (op0, 0)))
9308 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9310 MEM_VOLATILE_P (op0) |= volatilep;
9311 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
9312 || modifier == EXPAND_CONST_ADDRESS
9313 || modifier == EXPAND_INITIALIZER)
9314 return op0;
9315 else if (target == 0)
9316 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9318 convert_move (target, op0, unsignedp);
9319 return target;
9322 case OBJ_TYPE_REF:
9323 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
9325 case CALL_EXPR:
9326 /* All valid uses of __builtin_va_arg_pack () are removed during
9327 inlining. */
9328 if (CALL_EXPR_VA_ARG_PACK (exp))
9329 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
9331 tree fndecl = get_callee_fndecl (exp), attr;
9333 if (fndecl
9334 && (attr = lookup_attribute ("error",
9335 DECL_ATTRIBUTES (fndecl))) != NULL)
9336 error ("%Kcall to %qs declared with attribute error: %s",
9337 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9338 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9339 if (fndecl
9340 && (attr = lookup_attribute ("warning",
9341 DECL_ATTRIBUTES (fndecl))) != NULL)
9342 warning_at (tree_nonartificial_location (exp),
9343 0, "%Kcall to %qs declared with attribute warning: %s",
9344 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9345 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9347 /* Check for a built-in function. */
9348 if (fndecl && DECL_BUILT_IN (fndecl))
9350 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
9351 return expand_builtin (exp, target, subtarget, tmode, ignore);
9354 return expand_call (exp, target, ignore);
9356 case VIEW_CONVERT_EXPR:
9357 op0 = NULL_RTX;
9359 /* If we are converting to BLKmode, try to avoid an intermediate
9360 temporary by fetching an inner memory reference. */
9361 if (mode == BLKmode
9362 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9363 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
9364 && handled_component_p (treeop0))
9366 enum machine_mode mode1;
9367 HOST_WIDE_INT bitsize, bitpos;
9368 tree offset;
9369 int unsignedp;
9370 int volatilep = 0;
9371 tree tem
9372 = get_inner_reference (treeop0, &bitsize, &bitpos,
9373 &offset, &mode1, &unsignedp, &volatilep,
9374 true);
9375 rtx orig_op0;
9377 /* ??? We should work harder and deal with non-zero offsets. */
9378 if (!offset
9379 && (bitpos % BITS_PER_UNIT) == 0
9380 && bitsize >= 0
9381 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) == 0)
9383 /* See the normal_inner_ref case for the rationale. */
9384 orig_op0
9385 = expand_expr (tem,
9386 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9387 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9388 != INTEGER_CST)
9389 && modifier != EXPAND_STACK_PARM
9390 ? target : NULL_RTX),
9391 VOIDmode,
9392 (modifier == EXPAND_INITIALIZER
9393 || modifier == EXPAND_CONST_ADDRESS
9394 || modifier == EXPAND_STACK_PARM)
9395 ? modifier : EXPAND_NORMAL);
9397 if (MEM_P (orig_op0))
9399 op0 = orig_op0;
9401 /* Get a reference to just this component. */
9402 if (modifier == EXPAND_CONST_ADDRESS
9403 || modifier == EXPAND_SUM
9404 || modifier == EXPAND_INITIALIZER)
9405 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
9406 else
9407 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
9409 if (op0 == orig_op0)
9410 op0 = copy_rtx (op0);
9412 set_mem_attributes (op0, treeop0, 0);
9413 if (REG_P (XEXP (op0, 0)))
9414 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9416 MEM_VOLATILE_P (op0) |= volatilep;
9421 if (!op0)
9422 op0 = expand_expr (treeop0,
9423 NULL_RTX, VOIDmode, modifier);
9425 /* If the input and output modes are both the same, we are done. */
9426 if (mode == GET_MODE (op0))
9428 /* If neither mode is BLKmode, and both modes are the same size
9429 then we can use gen_lowpart. */
9430 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
9431 && GET_MODE_SIZE (mode) == GET_MODE_SIZE (GET_MODE (op0))
9432 && !COMPLEX_MODE_P (GET_MODE (op0)))
9434 if (GET_CODE (op0) == SUBREG)
9435 op0 = force_reg (GET_MODE (op0), op0);
9436 temp = gen_lowpart_common (mode, op0);
9437 if (temp)
9438 op0 = temp;
9439 else
9441 if (!REG_P (op0) && !MEM_P (op0))
9442 op0 = force_reg (GET_MODE (op0), op0);
9443 op0 = gen_lowpart (mode, op0);
9446 /* If both types are integral, convert from one mode to the other. */
9447 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
9448 op0 = convert_modes (mode, GET_MODE (op0), op0,
9449 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9450 /* As a last resort, spill op0 to memory, and reload it in a
9451 different mode. */
9452 else if (!MEM_P (op0))
9454 /* If the operand is not a MEM, force it into memory. Since we
9455 are going to be changing the mode of the MEM, don't call
9456 force_const_mem for constants because we don't allow pool
9457 constants to change mode. */
9458 tree inner_type = TREE_TYPE (treeop0);
9460 gcc_assert (!TREE_ADDRESSABLE (exp));
9462 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
9463 target
9464 = assign_stack_temp_for_type
9465 (TYPE_MODE (inner_type),
9466 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
9468 emit_move_insn (target, op0);
9469 op0 = target;
9472 /* At this point, OP0 is in the correct mode. If the output type is
9473 such that the operand is known to be aligned, indicate that it is.
9474 Otherwise, we need only be concerned about alignment for non-BLKmode
9475 results. */
9476 if (MEM_P (op0))
9478 op0 = copy_rtx (op0);
9480 if (TYPE_ALIGN_OK (type))
9481 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
9482 else if (STRICT_ALIGNMENT
9483 && mode != BLKmode
9484 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
9486 tree inner_type = TREE_TYPE (treeop0);
9487 HOST_WIDE_INT temp_size
9488 = MAX (int_size_in_bytes (inner_type),
9489 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
9490 rtx new_rtx
9491 = assign_stack_temp_for_type (mode, temp_size, 0, type);
9492 rtx new_with_op0_mode
9493 = adjust_address (new_rtx, GET_MODE (op0), 0);
9495 gcc_assert (!TREE_ADDRESSABLE (exp));
9497 if (GET_MODE (op0) == BLKmode)
9498 emit_block_move (new_with_op0_mode, op0,
9499 GEN_INT (GET_MODE_SIZE (mode)),
9500 (modifier == EXPAND_STACK_PARM
9501 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9502 else
9503 emit_move_insn (new_with_op0_mode, op0);
9505 op0 = new_rtx;
9508 op0 = adjust_address (op0, mode, 0);
9511 return op0;
9513 /* Use a compare and a jump for BLKmode comparisons, or for function
9514 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
9516 /* Although TRUTH_{AND,OR}IF_EXPR aren't present in GIMPLE, they
9517 are occassionally created by folding during expansion. */
9518 case TRUTH_ANDIF_EXPR:
9519 case TRUTH_ORIF_EXPR:
9520 if (! ignore
9521 && (target == 0
9522 || modifier == EXPAND_STACK_PARM
9523 || ! safe_from_p (target, treeop0, 1)
9524 || ! safe_from_p (target, treeop1, 1)
9525 /* Make sure we don't have a hard reg (such as function's return
9526 value) live across basic blocks, if not optimizing. */
9527 || (!optimize && REG_P (target)
9528 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9529 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9531 if (target)
9532 emit_move_insn (target, const0_rtx);
9534 op1 = gen_label_rtx ();
9535 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
9537 if (target)
9538 emit_move_insn (target, const1_rtx);
9540 emit_label (op1);
9541 return ignore ? const0_rtx : target;
9543 case STATEMENT_LIST:
9545 tree_stmt_iterator iter;
9547 gcc_assert (ignore);
9549 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
9550 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
9552 return const0_rtx;
9554 case COND_EXPR:
9555 /* A COND_EXPR with its type being VOID_TYPE represents a
9556 conditional jump and is handled in
9557 expand_gimple_cond_expr. */
9558 gcc_assert (!VOID_TYPE_P (type));
9560 /* Note that COND_EXPRs whose type is a structure or union
9561 are required to be constructed to contain assignments of
9562 a temporary variable, so that we can evaluate them here
9563 for side effect only. If type is void, we must do likewise. */
9565 gcc_assert (!TREE_ADDRESSABLE (type)
9566 && !ignore
9567 && TREE_TYPE (treeop1) != void_type_node
9568 && TREE_TYPE (treeop2) != void_type_node);
9570 /* If we are not to produce a result, we have no target. Otherwise,
9571 if a target was specified use it; it will not be used as an
9572 intermediate target unless it is safe. If no target, use a
9573 temporary. */
9575 if (modifier != EXPAND_STACK_PARM
9576 && original_target
9577 && safe_from_p (original_target, treeop0, 1)
9578 && GET_MODE (original_target) == mode
9579 #ifdef HAVE_conditional_move
9580 && (! can_conditionally_move_p (mode)
9581 || REG_P (original_target))
9582 #endif
9583 && !MEM_P (original_target))
9584 temp = original_target;
9585 else
9586 temp = assign_temp (type, 0, 0, 1);
9588 do_pending_stack_adjust ();
9589 NO_DEFER_POP;
9590 op0 = gen_label_rtx ();
9591 op1 = gen_label_rtx ();
9592 jumpifnot (treeop0, op0, -1);
9593 store_expr (treeop1, temp,
9594 modifier == EXPAND_STACK_PARM,
9595 false);
9597 emit_jump_insn (gen_jump (op1));
9598 emit_barrier ();
9599 emit_label (op0);
9600 store_expr (treeop2, temp,
9601 modifier == EXPAND_STACK_PARM,
9602 false);
9604 emit_label (op1);
9605 OK_DEFER_POP;
9606 return temp;
9608 case VEC_COND_EXPR:
9609 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9610 return target;
9612 case MODIFY_EXPR:
9614 tree lhs = treeop0;
9615 tree rhs = treeop1;
9616 gcc_assert (ignore);
9618 /* Check for |= or &= of a bitfield of size one into another bitfield
9619 of size 1. In this case, (unless we need the result of the
9620 assignment) we can do this more efficiently with a
9621 test followed by an assignment, if necessary.
9623 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9624 things change so we do, this code should be enhanced to
9625 support it. */
9626 if (TREE_CODE (lhs) == COMPONENT_REF
9627 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9628 || TREE_CODE (rhs) == BIT_AND_EXPR)
9629 && TREE_OPERAND (rhs, 0) == lhs
9630 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9631 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9632 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9634 rtx label = gen_label_rtx ();
9635 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
9636 do_jump (TREE_OPERAND (rhs, 1),
9637 value ? label : 0,
9638 value ? 0 : label, -1);
9639 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
9640 MOVE_NONTEMPORAL (exp));
9641 do_pending_stack_adjust ();
9642 emit_label (label);
9643 return const0_rtx;
9646 expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
9647 return const0_rtx;
9650 case ADDR_EXPR:
9651 return expand_expr_addr_expr (exp, target, tmode, modifier);
9653 case REALPART_EXPR:
9654 op0 = expand_normal (treeop0);
9655 return read_complex_part (op0, false);
9657 case IMAGPART_EXPR:
9658 op0 = expand_normal (treeop0);
9659 return read_complex_part (op0, true);
9661 case RETURN_EXPR:
9662 case LABEL_EXPR:
9663 case GOTO_EXPR:
9664 case SWITCH_EXPR:
9665 case ASM_EXPR:
9666 /* Expanded in cfgexpand.c. */
9667 gcc_unreachable ();
9669 case TRY_CATCH_EXPR:
9670 case CATCH_EXPR:
9671 case EH_FILTER_EXPR:
9672 case TRY_FINALLY_EXPR:
9673 /* Lowered by tree-eh.c. */
9674 gcc_unreachable ();
9676 case WITH_CLEANUP_EXPR:
9677 case CLEANUP_POINT_EXPR:
9678 case TARGET_EXPR:
9679 case CASE_LABEL_EXPR:
9680 case VA_ARG_EXPR:
9681 case BIND_EXPR:
9682 case INIT_EXPR:
9683 case CONJ_EXPR:
9684 case COMPOUND_EXPR:
9685 case PREINCREMENT_EXPR:
9686 case PREDECREMENT_EXPR:
9687 case POSTINCREMENT_EXPR:
9688 case POSTDECREMENT_EXPR:
9689 case LOOP_EXPR:
9690 case EXIT_EXPR:
9691 /* Lowered by gimplify.c. */
9692 gcc_unreachable ();
9694 case FDESC_EXPR:
9695 /* Function descriptors are not valid except for as
9696 initialization constants, and should not be expanded. */
9697 gcc_unreachable ();
9699 case WITH_SIZE_EXPR:
9700 /* WITH_SIZE_EXPR expands to its first argument. The caller should
9701 have pulled out the size to use in whatever context it needed. */
9702 return expand_expr_real (treeop0, original_target, tmode,
9703 modifier, alt_rtl);
9705 case COMPOUND_LITERAL_EXPR:
9707 /* Initialize the anonymous variable declared in the compound
9708 literal, then return the variable. */
9709 tree decl = COMPOUND_LITERAL_EXPR_DECL (exp);
9711 /* Create RTL for this variable. */
9712 if (!DECL_RTL_SET_P (decl))
9714 if (DECL_HARD_REGISTER (decl))
9715 /* The user specified an assembler name for this variable.
9716 Set that up now. */
9717 rest_of_decl_compilation (decl, 0, 0);
9718 else
9719 expand_decl (decl);
9722 return expand_expr_real (decl, original_target, tmode,
9723 modifier, alt_rtl);
9726 default:
9727 return expand_expr_real_2 (&ops, target, tmode, modifier);
9731 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9732 signedness of TYPE), possibly returning the result in TARGET. */
9733 static rtx
9734 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
9736 HOST_WIDE_INT prec = TYPE_PRECISION (type);
9737 if (target && GET_MODE (target) != GET_MODE (exp))
9738 target = 0;
9739 /* For constant values, reduce using build_int_cst_type. */
9740 if (CONST_INT_P (exp))
9742 HOST_WIDE_INT value = INTVAL (exp);
9743 tree t = build_int_cst_type (type, value);
9744 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
9746 else if (TYPE_UNSIGNED (type))
9748 rtx mask = immed_double_int_const (double_int_mask (prec),
9749 GET_MODE (exp));
9750 return expand_and (GET_MODE (exp), exp, mask, target);
9752 else
9754 tree count = build_int_cst (NULL_TREE,
9755 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
9756 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9757 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9761 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9762 when applied to the address of EXP produces an address known to be
9763 aligned more than BIGGEST_ALIGNMENT. */
9765 static int
9766 is_aligning_offset (const_tree offset, const_tree exp)
9768 /* Strip off any conversions. */
9769 while (CONVERT_EXPR_P (offset))
9770 offset = TREE_OPERAND (offset, 0);
9772 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9773 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9774 if (TREE_CODE (offset) != BIT_AND_EXPR
9775 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9776 || compare_tree_int (TREE_OPERAND (offset, 1),
9777 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9778 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9779 return 0;
9781 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9782 It must be NEGATE_EXPR. Then strip any more conversions. */
9783 offset = TREE_OPERAND (offset, 0);
9784 while (CONVERT_EXPR_P (offset))
9785 offset = TREE_OPERAND (offset, 0);
9787 if (TREE_CODE (offset) != NEGATE_EXPR)
9788 return 0;
9790 offset = TREE_OPERAND (offset, 0);
9791 while (CONVERT_EXPR_P (offset))
9792 offset = TREE_OPERAND (offset, 0);
9794 /* This must now be the address of EXP. */
9795 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9798 /* Return the tree node if an ARG corresponds to a string constant or zero
9799 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9800 in bytes within the string that ARG is accessing. The type of the
9801 offset will be `sizetype'. */
9803 tree
9804 string_constant (tree arg, tree *ptr_offset)
9806 tree array, offset, lower_bound;
9807 STRIP_NOPS (arg);
9809 if (TREE_CODE (arg) == ADDR_EXPR)
9811 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9813 *ptr_offset = size_zero_node;
9814 return TREE_OPERAND (arg, 0);
9816 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9818 array = TREE_OPERAND (arg, 0);
9819 offset = size_zero_node;
9821 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9823 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9824 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9825 if (TREE_CODE (array) != STRING_CST
9826 && TREE_CODE (array) != VAR_DECL)
9827 return 0;
9829 /* Check if the array has a nonzero lower bound. */
9830 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9831 if (!integer_zerop (lower_bound))
9833 /* If the offset and base aren't both constants, return 0. */
9834 if (TREE_CODE (lower_bound) != INTEGER_CST)
9835 return 0;
9836 if (TREE_CODE (offset) != INTEGER_CST)
9837 return 0;
9838 /* Adjust offset by the lower bound. */
9839 offset = size_diffop (fold_convert (sizetype, offset),
9840 fold_convert (sizetype, lower_bound));
9843 else
9844 return 0;
9846 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
9848 tree arg0 = TREE_OPERAND (arg, 0);
9849 tree arg1 = TREE_OPERAND (arg, 1);
9851 STRIP_NOPS (arg0);
9852 STRIP_NOPS (arg1);
9854 if (TREE_CODE (arg0) == ADDR_EXPR
9855 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9856 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9858 array = TREE_OPERAND (arg0, 0);
9859 offset = arg1;
9861 else if (TREE_CODE (arg1) == ADDR_EXPR
9862 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9863 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9865 array = TREE_OPERAND (arg1, 0);
9866 offset = arg0;
9868 else
9869 return 0;
9871 else
9872 return 0;
9874 if (TREE_CODE (array) == STRING_CST)
9876 *ptr_offset = fold_convert (sizetype, offset);
9877 return array;
9879 else if (TREE_CODE (array) == VAR_DECL
9880 || TREE_CODE (array) == CONST_DECL)
9882 int length;
9884 /* Variables initialized to string literals can be handled too. */
9885 if (!const_value_known_p (array)
9886 || !DECL_INITIAL (array)
9887 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9888 return 0;
9890 /* Avoid const char foo[4] = "abcde"; */
9891 if (DECL_SIZE_UNIT (array) == NULL_TREE
9892 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9893 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9894 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9895 return 0;
9897 /* If variable is bigger than the string literal, OFFSET must be constant
9898 and inside of the bounds of the string literal. */
9899 offset = fold_convert (sizetype, offset);
9900 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9901 && (! host_integerp (offset, 1)
9902 || compare_tree_int (offset, length) >= 0))
9903 return 0;
9905 *ptr_offset = offset;
9906 return DECL_INITIAL (array);
9909 return 0;
9912 /* Generate code to calculate OPS, and exploded expression
9913 using a store-flag instruction and return an rtx for the result.
9914 OPS reflects a comparison.
9916 If TARGET is nonzero, store the result there if convenient.
9918 Return zero if there is no suitable set-flag instruction
9919 available on this machine.
9921 Once expand_expr has been called on the arguments of the comparison,
9922 we are committed to doing the store flag, since it is not safe to
9923 re-evaluate the expression. We emit the store-flag insn by calling
9924 emit_store_flag, but only expand the arguments if we have a reason
9925 to believe that emit_store_flag will be successful. If we think that
9926 it will, but it isn't, we have to simulate the store-flag with a
9927 set/jump/set sequence. */
9929 static rtx
9930 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
9932 enum rtx_code code;
9933 tree arg0, arg1, type;
9934 tree tem;
9935 enum machine_mode operand_mode;
9936 int unsignedp;
9937 rtx op0, op1;
9938 rtx subtarget = target;
9939 location_t loc = ops->location;
9941 arg0 = ops->op0;
9942 arg1 = ops->op1;
9944 /* Don't crash if the comparison was erroneous. */
9945 if (arg0 == error_mark_node || arg1 == error_mark_node)
9946 return const0_rtx;
9948 type = TREE_TYPE (arg0);
9949 operand_mode = TYPE_MODE (type);
9950 unsignedp = TYPE_UNSIGNED (type);
9952 /* We won't bother with BLKmode store-flag operations because it would mean
9953 passing a lot of information to emit_store_flag. */
9954 if (operand_mode == BLKmode)
9955 return 0;
9957 /* We won't bother with store-flag operations involving function pointers
9958 when function pointers must be canonicalized before comparisons. */
9959 #ifdef HAVE_canonicalize_funcptr_for_compare
9960 if (HAVE_canonicalize_funcptr_for_compare
9961 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
9962 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
9963 == FUNCTION_TYPE))
9964 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
9965 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
9966 == FUNCTION_TYPE))))
9967 return 0;
9968 #endif
9970 STRIP_NOPS (arg0);
9971 STRIP_NOPS (arg1);
9973 /* Get the rtx comparison code to use. We know that EXP is a comparison
9974 operation of some type. Some comparisons against 1 and -1 can be
9975 converted to comparisons with zero. Do so here so that the tests
9976 below will be aware that we have a comparison with zero. These
9977 tests will not catch constants in the first operand, but constants
9978 are rarely passed as the first operand. */
9980 switch (ops->code)
9982 case EQ_EXPR:
9983 code = EQ;
9984 break;
9985 case NE_EXPR:
9986 code = NE;
9987 break;
9988 case LT_EXPR:
9989 if (integer_onep (arg1))
9990 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9991 else
9992 code = unsignedp ? LTU : LT;
9993 break;
9994 case LE_EXPR:
9995 if (! unsignedp && integer_all_onesp (arg1))
9996 arg1 = integer_zero_node, code = LT;
9997 else
9998 code = unsignedp ? LEU : LE;
9999 break;
10000 case GT_EXPR:
10001 if (! unsignedp && integer_all_onesp (arg1))
10002 arg1 = integer_zero_node, code = GE;
10003 else
10004 code = unsignedp ? GTU : GT;
10005 break;
10006 case GE_EXPR:
10007 if (integer_onep (arg1))
10008 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10009 else
10010 code = unsignedp ? GEU : GE;
10011 break;
10013 case UNORDERED_EXPR:
10014 code = UNORDERED;
10015 break;
10016 case ORDERED_EXPR:
10017 code = ORDERED;
10018 break;
10019 case UNLT_EXPR:
10020 code = UNLT;
10021 break;
10022 case UNLE_EXPR:
10023 code = UNLE;
10024 break;
10025 case UNGT_EXPR:
10026 code = UNGT;
10027 break;
10028 case UNGE_EXPR:
10029 code = UNGE;
10030 break;
10031 case UNEQ_EXPR:
10032 code = UNEQ;
10033 break;
10034 case LTGT_EXPR:
10035 code = LTGT;
10036 break;
10038 default:
10039 gcc_unreachable ();
10042 /* Put a constant second. */
10043 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
10044 || TREE_CODE (arg0) == FIXED_CST)
10046 tem = arg0; arg0 = arg1; arg1 = tem;
10047 code = swap_condition (code);
10050 /* If this is an equality or inequality test of a single bit, we can
10051 do this by shifting the bit being tested to the low-order bit and
10052 masking the result with the constant 1. If the condition was EQ,
10053 we xor it with 1. This does not require an scc insn and is faster
10054 than an scc insn even if we have it.
10056 The code to make this transformation was moved into fold_single_bit_test,
10057 so we just call into the folder and expand its result. */
10059 if ((code == NE || code == EQ)
10060 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10061 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10063 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
10064 return expand_expr (fold_single_bit_test (loc,
10065 code == NE ? NE_EXPR : EQ_EXPR,
10066 arg0, arg1, type),
10067 target, VOIDmode, EXPAND_NORMAL);
10070 if (! get_subtarget (target)
10071 || GET_MODE (subtarget) != operand_mode)
10072 subtarget = 0;
10074 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
10076 if (target == 0)
10077 target = gen_reg_rtx (mode);
10079 /* Try a cstore if possible. */
10080 return emit_store_flag_force (target, code, op0, op1,
10081 operand_mode, unsignedp, 1);
10085 /* Stubs in case we haven't got a casesi insn. */
10086 #ifndef HAVE_casesi
10087 # define HAVE_casesi 0
10088 # define gen_casesi(a, b, c, d, e) (0)
10089 # define CODE_FOR_casesi CODE_FOR_nothing
10090 #endif
10092 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10093 0 otherwise (i.e. if there is no casesi instruction). */
10095 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10096 rtx table_label ATTRIBUTE_UNUSED, rtx default_label,
10097 rtx fallback_label ATTRIBUTE_UNUSED)
10099 struct expand_operand ops[5];
10100 enum machine_mode index_mode = SImode;
10101 int index_bits = GET_MODE_BITSIZE (index_mode);
10102 rtx op1, op2, index;
10104 if (! HAVE_casesi)
10105 return 0;
10107 /* Convert the index to SImode. */
10108 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10110 enum machine_mode omode = TYPE_MODE (index_type);
10111 rtx rangertx = expand_normal (range);
10113 /* We must handle the endpoints in the original mode. */
10114 index_expr = build2 (MINUS_EXPR, index_type,
10115 index_expr, minval);
10116 minval = integer_zero_node;
10117 index = expand_normal (index_expr);
10118 if (default_label)
10119 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10120 omode, 1, default_label);
10121 /* Now we can safely truncate. */
10122 index = convert_to_mode (index_mode, index, 0);
10124 else
10126 if (TYPE_MODE (index_type) != index_mode)
10128 index_type = lang_hooks.types.type_for_size (index_bits, 0);
10129 index_expr = fold_convert (index_type, index_expr);
10132 index = expand_normal (index_expr);
10135 do_pending_stack_adjust ();
10137 op1 = expand_normal (minval);
10138 op2 = expand_normal (range);
10140 create_input_operand (&ops[0], index, index_mode);
10141 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
10142 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
10143 create_fixed_operand (&ops[3], table_label);
10144 create_fixed_operand (&ops[4], (default_label
10145 ? default_label
10146 : fallback_label));
10147 expand_jump_insn (CODE_FOR_casesi, 5, ops);
10148 return 1;
10151 /* Attempt to generate a tablejump instruction; same concept. */
10152 #ifndef HAVE_tablejump
10153 #define HAVE_tablejump 0
10154 #define gen_tablejump(x, y) (0)
10155 #endif
10157 /* Subroutine of the next function.
10159 INDEX is the value being switched on, with the lowest value
10160 in the table already subtracted.
10161 MODE is its expected mode (needed if INDEX is constant).
10162 RANGE is the length of the jump table.
10163 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10165 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10166 index value is out of range. */
10168 static void
10169 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10170 rtx default_label)
10172 rtx temp, vector;
10174 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
10175 cfun->cfg->max_jumptable_ents = INTVAL (range);
10177 /* Do an unsigned comparison (in the proper mode) between the index
10178 expression and the value which represents the length of the range.
10179 Since we just finished subtracting the lower bound of the range
10180 from the index expression, this comparison allows us to simultaneously
10181 check that the original index expression value is both greater than
10182 or equal to the minimum value of the range and less than or equal to
10183 the maximum value of the range. */
10185 if (default_label)
10186 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10187 default_label);
10189 /* If index is in range, it must fit in Pmode.
10190 Convert to Pmode so we can index with it. */
10191 if (mode != Pmode)
10192 index = convert_to_mode (Pmode, index, 1);
10194 /* Don't let a MEM slip through, because then INDEX that comes
10195 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10196 and break_out_memory_refs will go to work on it and mess it up. */
10197 #ifdef PIC_CASE_VECTOR_ADDRESS
10198 if (flag_pic && !REG_P (index))
10199 index = copy_to_mode_reg (Pmode, index);
10200 #endif
10202 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10203 GET_MODE_SIZE, because this indicates how large insns are. The other
10204 uses should all be Pmode, because they are addresses. This code
10205 could fail if addresses and insns are not the same size. */
10206 index = gen_rtx_PLUS (Pmode,
10207 gen_rtx_MULT (Pmode, index,
10208 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10209 gen_rtx_LABEL_REF (Pmode, table_label));
10210 #ifdef PIC_CASE_VECTOR_ADDRESS
10211 if (flag_pic)
10212 index = PIC_CASE_VECTOR_ADDRESS (index);
10213 else
10214 #endif
10215 index = memory_address (CASE_VECTOR_MODE, index);
10216 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10217 vector = gen_const_mem (CASE_VECTOR_MODE, index);
10218 convert_move (temp, vector, 0);
10220 emit_jump_insn (gen_tablejump (temp, table_label));
10222 /* If we are generating PIC code or if the table is PC-relative, the
10223 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10224 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10225 emit_barrier ();
10229 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10230 rtx table_label, rtx default_label)
10232 rtx index;
10234 if (! HAVE_tablejump)
10235 return 0;
10237 index_expr = fold_build2 (MINUS_EXPR, index_type,
10238 fold_convert (index_type, index_expr),
10239 fold_convert (index_type, minval));
10240 index = expand_normal (index_expr);
10241 do_pending_stack_adjust ();
10243 do_tablejump (index, TYPE_MODE (index_type),
10244 convert_modes (TYPE_MODE (index_type),
10245 TYPE_MODE (TREE_TYPE (range)),
10246 expand_normal (range),
10247 TYPE_UNSIGNED (TREE_TYPE (range))),
10248 table_label, default_label);
10249 return 1;
10252 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10253 static rtx
10254 const_vector_from_tree (tree exp)
10256 rtvec v;
10257 int units, i;
10258 tree link, elt;
10259 enum machine_mode inner, mode;
10261 mode = TYPE_MODE (TREE_TYPE (exp));
10263 if (initializer_zerop (exp))
10264 return CONST0_RTX (mode);
10266 units = GET_MODE_NUNITS (mode);
10267 inner = GET_MODE_INNER (mode);
10269 v = rtvec_alloc (units);
10271 link = TREE_VECTOR_CST_ELTS (exp);
10272 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10274 elt = TREE_VALUE (link);
10276 if (TREE_CODE (elt) == REAL_CST)
10277 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10278 inner);
10279 else if (TREE_CODE (elt) == FIXED_CST)
10280 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
10281 inner);
10282 else
10283 RTVEC_ELT (v, i) = immed_double_int_const (tree_to_double_int (elt),
10284 inner);
10287 /* Initialize remaining elements to 0. */
10288 for (; i < units; ++i)
10289 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10291 return gen_rtx_CONST_VECTOR (mode, v);
10294 /* Build a decl for a personality function given a language prefix. */
10296 tree
10297 build_personality_function (const char *lang)
10299 const char *unwind_and_version;
10300 tree decl, type;
10301 char *name;
10303 switch (targetm.except_unwind_info (&global_options))
10305 case UI_NONE:
10306 return NULL;
10307 case UI_SJLJ:
10308 unwind_and_version = "_sj0";
10309 break;
10310 case UI_DWARF2:
10311 case UI_TARGET:
10312 unwind_and_version = "_v0";
10313 break;
10314 default:
10315 gcc_unreachable ();
10318 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
10320 type = build_function_type_list (integer_type_node, integer_type_node,
10321 long_long_unsigned_type_node,
10322 ptr_type_node, ptr_type_node, NULL_TREE);
10323 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
10324 get_identifier (name), type);
10325 DECL_ARTIFICIAL (decl) = 1;
10326 DECL_EXTERNAL (decl) = 1;
10327 TREE_PUBLIC (decl) = 1;
10329 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
10330 are the flags assigned by targetm.encode_section_info. */
10331 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
10333 return decl;
10336 /* Extracts the personality function of DECL and returns the corresponding
10337 libfunc. */
10340 get_personality_function (tree decl)
10342 tree personality = DECL_FUNCTION_PERSONALITY (decl);
10343 enum eh_personality_kind pk;
10345 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
10346 if (pk == eh_personality_none)
10347 return NULL;
10349 if (!personality
10350 && pk == eh_personality_any)
10351 personality = lang_hooks.eh_personality ();
10353 if (pk == eh_personality_lang)
10354 gcc_assert (personality != NULL_TREE);
10356 return XEXP (DECL_RTL (personality), 0);
10359 #include "gt-expr.h"