2010-07-14 Jerry DeLisle <jvdelisle@gcc.gnu.org>
[official-gcc.git] / gcc / expr.c
blob899d5b83ebbb06aec9cf70dd6ed0452f3e06f2e4
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "flags.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "except.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37 #include "expr.h"
38 #include "optabs.h"
39 #include "libfuncs.h"
40 #include "recog.h"
41 #include "reload.h"
42 #include "output.h"
43 #include "typeclass.h"
44 #include "toplev.h"
45 #include "langhooks.h"
46 #include "intl.h"
47 #include "tm_p.h"
48 #include "tree-iterator.h"
49 #include "tree-pass.h"
50 #include "tree-flow.h"
51 #include "target.h"
52 #include "timevar.h"
53 #include "df.h"
54 #include "diagnostic.h"
55 #include "ssaexpand.h"
56 #include "target-globals.h"
58 /* Decide whether a function's arguments should be processed
59 from first to last or from last to first.
61 They should if the stack and args grow in opposite directions, but
62 only if we have push insns. */
64 #ifdef PUSH_ROUNDING
66 #ifndef PUSH_ARGS_REVERSED
67 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
68 #define PUSH_ARGS_REVERSED /* If it's last to first. */
69 #endif
70 #endif
72 #endif
74 #ifndef STACK_PUSH_CODE
75 #ifdef STACK_GROWS_DOWNWARD
76 #define STACK_PUSH_CODE PRE_DEC
77 #else
78 #define STACK_PUSH_CODE PRE_INC
79 #endif
80 #endif
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
89 int cse_not_expected;
91 /* This structure is used by move_by_pieces to describe the move to
92 be performed. */
93 struct move_by_pieces_d
95 rtx to;
96 rtx to_addr;
97 int autinc_to;
98 int explicit_inc_to;
99 rtx from;
100 rtx from_addr;
101 int autinc_from;
102 int explicit_inc_from;
103 unsigned HOST_WIDE_INT len;
104 HOST_WIDE_INT offset;
105 int reverse;
108 /* This structure is used by store_by_pieces to describe the clear to
109 be performed. */
111 struct store_by_pieces_d
113 rtx to;
114 rtx to_addr;
115 int autinc_to;
116 int explicit_inc_to;
117 unsigned HOST_WIDE_INT len;
118 HOST_WIDE_INT offset;
119 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
120 void *constfundata;
121 int reverse;
124 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
125 unsigned int,
126 unsigned int);
127 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
128 struct move_by_pieces_d *);
129 static bool block_move_libcall_safe_for_call_parm (void);
130 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
131 static tree emit_block_move_libcall_fn (int);
132 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
133 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
134 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
135 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
136 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
137 struct store_by_pieces_d *);
138 static tree clear_storage_libcall_fn (int);
139 static rtx compress_float_constant (rtx, rtx);
140 static rtx get_subtarget (rtx);
141 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, alias_set_type);
144 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
146 tree, tree, alias_set_type, bool);
148 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
150 static int is_aligning_offset (const_tree, const_tree);
151 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
152 enum expand_modifier);
153 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
154 static rtx do_store_flag (sepops, rtx, enum machine_mode);
155 #ifdef PUSH_ROUNDING
156 static void emit_single_push_insn (enum machine_mode, rtx, tree);
157 #endif
158 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
159 static rtx const_vector_from_tree (tree);
160 static void write_complex_part (rtx, rtx, bool);
162 /* This macro is used to determine whether move_by_pieces should be called
163 to perform a structure copy. */
164 #ifndef MOVE_BY_PIECES_P
165 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
166 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
167 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
168 #endif
170 /* This macro is used to determine whether clear_by_pieces should be
171 called to clear storage. */
172 #ifndef CLEAR_BY_PIECES_P
173 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
174 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
175 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
176 #endif
178 /* This macro is used to determine whether store_by_pieces should be
179 called to "memset" storage with byte values other than zero. */
180 #ifndef SET_BY_PIECES_P
181 #define SET_BY_PIECES_P(SIZE, ALIGN) \
182 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
183 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
184 #endif
186 /* This macro is used to determine whether store_by_pieces should be
187 called to "memcpy" storage when the source is a constant string. */
188 #ifndef STORE_BY_PIECES_P
189 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
190 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
191 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
192 #endif
194 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
196 #ifndef SLOW_UNALIGNED_ACCESS
197 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
198 #endif
200 /* This is run to set up which modes can be used
201 directly in memory and to initialize the block move optab. It is run
202 at the beginning of compilation and when the target is reinitialized. */
204 void
205 init_expr_target (void)
207 rtx insn, pat;
208 enum machine_mode mode;
209 int num_clobbers;
210 rtx mem, mem1;
211 rtx reg;
213 /* Try indexing by frame ptr and try by stack ptr.
214 It is known that on the Convex the stack ptr isn't a valid index.
215 With luck, one or the other is valid on any machine. */
216 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
217 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
219 /* A scratch register we can modify in-place below to avoid
220 useless RTL allocations. */
221 reg = gen_rtx_REG (VOIDmode, -1);
223 insn = rtx_alloc (INSN);
224 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
225 PATTERN (insn) = pat;
227 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
228 mode = (enum machine_mode) ((int) mode + 1))
230 int regno;
232 direct_load[(int) mode] = direct_store[(int) mode] = 0;
233 PUT_MODE (mem, mode);
234 PUT_MODE (mem1, mode);
235 PUT_MODE (reg, mode);
237 /* See if there is some register that can be used in this mode and
238 directly loaded or stored from memory. */
240 if (mode != VOIDmode && mode != BLKmode)
241 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
242 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
243 regno++)
245 if (! HARD_REGNO_MODE_OK (regno, mode))
246 continue;
248 SET_REGNO (reg, regno);
250 SET_SRC (pat) = mem;
251 SET_DEST (pat) = reg;
252 if (recog (pat, insn, &num_clobbers) >= 0)
253 direct_load[(int) mode] = 1;
255 SET_SRC (pat) = mem1;
256 SET_DEST (pat) = reg;
257 if (recog (pat, insn, &num_clobbers) >= 0)
258 direct_load[(int) mode] = 1;
260 SET_SRC (pat) = reg;
261 SET_DEST (pat) = mem;
262 if (recog (pat, insn, &num_clobbers) >= 0)
263 direct_store[(int) mode] = 1;
265 SET_SRC (pat) = reg;
266 SET_DEST (pat) = mem1;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_store[(int) mode] = 1;
272 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
274 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
275 mode = GET_MODE_WIDER_MODE (mode))
277 enum machine_mode srcmode;
278 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
279 srcmode = GET_MODE_WIDER_MODE (srcmode))
281 enum insn_code ic;
283 ic = can_extend_p (mode, srcmode, 0);
284 if (ic == CODE_FOR_nothing)
285 continue;
287 PUT_MODE (mem, srcmode);
289 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
290 float_extend_from_mem[mode][srcmode] = true;
295 /* This is run at the start of compiling a function. */
297 void
298 init_expr (void)
300 memset (&crtl->expr, 0, sizeof (crtl->expr));
303 /* Copy data from FROM to TO, where the machine modes are not the same.
304 Both modes may be integer, or both may be floating, or both may be
305 fixed-point.
306 UNSIGNEDP should be nonzero if FROM is an unsigned type.
307 This causes zero-extension instead of sign-extension. */
309 void
310 convert_move (rtx to, rtx from, int unsignedp)
312 enum machine_mode to_mode = GET_MODE (to);
313 enum machine_mode from_mode = GET_MODE (from);
314 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
315 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
316 enum insn_code code;
317 rtx libcall;
319 /* rtx code for making an equivalent value. */
320 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
321 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
324 gcc_assert (to_real == from_real);
325 gcc_assert (to_mode != BLKmode);
326 gcc_assert (from_mode != BLKmode);
328 /* If the source and destination are already the same, then there's
329 nothing to do. */
330 if (to == from)
331 return;
333 /* If FROM is a SUBREG that indicates that we have already done at least
334 the required extension, strip it. We don't handle such SUBREGs as
335 TO here. */
337 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
338 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
339 >= GET_MODE_SIZE (to_mode))
340 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
341 from = gen_lowpart (to_mode, from), from_mode = to_mode;
343 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
345 if (to_mode == from_mode
346 || (from_mode == VOIDmode && CONSTANT_P (from)))
348 emit_move_insn (to, from);
349 return;
352 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
354 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
356 if (VECTOR_MODE_P (to_mode))
357 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
358 else
359 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
361 emit_move_insn (to, from);
362 return;
365 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
367 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
368 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
369 return;
372 if (to_real)
374 rtx value, insns;
375 convert_optab tab;
377 gcc_assert ((GET_MODE_PRECISION (from_mode)
378 != GET_MODE_PRECISION (to_mode))
379 || (DECIMAL_FLOAT_MODE_P (from_mode)
380 != DECIMAL_FLOAT_MODE_P (to_mode)));
382 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
383 /* Conversion between decimal float and binary float, same size. */
384 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
385 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
386 tab = sext_optab;
387 else
388 tab = trunc_optab;
390 /* Try converting directly if the insn is supported. */
392 code = convert_optab_handler (tab, to_mode, from_mode);
393 if (code != CODE_FOR_nothing)
395 emit_unop_insn (code, to, from,
396 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
397 return;
400 /* Otherwise use a libcall. */
401 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
403 /* Is this conversion implemented yet? */
404 gcc_assert (libcall);
406 start_sequence ();
407 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
408 1, from, from_mode);
409 insns = get_insns ();
410 end_sequence ();
411 emit_libcall_block (insns, to, value,
412 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
413 from)
414 : gen_rtx_FLOAT_EXTEND (to_mode, from));
415 return;
418 /* Handle pointer conversion. */ /* SPEE 900220. */
419 /* Targets are expected to provide conversion insns between PxImode and
420 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
421 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
423 enum machine_mode full_mode
424 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
426 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
427 != CODE_FOR_nothing);
429 if (full_mode != from_mode)
430 from = convert_to_mode (full_mode, from, unsignedp);
431 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
432 to, from, UNKNOWN);
433 return;
435 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
437 rtx new_from;
438 enum machine_mode full_mode
439 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
441 gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)
442 != CODE_FOR_nothing);
444 if (to_mode == full_mode)
446 emit_unop_insn (convert_optab_handler (sext_optab, full_mode,
447 from_mode),
448 to, from, UNKNOWN);
449 return;
452 new_from = gen_reg_rtx (full_mode);
453 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode),
454 new_from, from, UNKNOWN);
456 /* else proceed to integer conversions below. */
457 from_mode = full_mode;
458 from = new_from;
461 /* Make sure both are fixed-point modes or both are not. */
462 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
463 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
464 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
466 /* If we widen from_mode to to_mode and they are in the same class,
467 we won't saturate the result.
468 Otherwise, always saturate the result to play safe. */
469 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
470 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
471 expand_fixed_convert (to, from, 0, 0);
472 else
473 expand_fixed_convert (to, from, 0, 1);
474 return;
477 /* Now both modes are integers. */
479 /* Handle expanding beyond a word. */
480 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
481 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
483 rtx insns;
484 rtx lowpart;
485 rtx fill_value;
486 rtx lowfrom;
487 int i;
488 enum machine_mode lowpart_mode;
489 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
491 /* Try converting directly if the insn is supported. */
492 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
493 != CODE_FOR_nothing)
495 /* If FROM is a SUBREG, put it into a register. Do this
496 so that we always generate the same set of insns for
497 better cse'ing; if an intermediate assignment occurred,
498 we won't be doing the operation directly on the SUBREG. */
499 if (optimize > 0 && GET_CODE (from) == SUBREG)
500 from = force_reg (from_mode, from);
501 emit_unop_insn (code, to, from, equiv_code);
502 return;
504 /* Next, try converting via full word. */
505 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
506 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
507 != CODE_FOR_nothing))
509 rtx word_to = gen_reg_rtx (word_mode);
510 if (REG_P (to))
512 if (reg_overlap_mentioned_p (to, from))
513 from = force_reg (from_mode, from);
514 emit_clobber (to);
516 convert_move (word_to, from, unsignedp);
517 emit_unop_insn (code, to, word_to, equiv_code);
518 return;
521 /* No special multiword conversion insn; do it by hand. */
522 start_sequence ();
524 /* Since we will turn this into a no conflict block, we must ensure
525 that the source does not overlap the target. */
527 if (reg_overlap_mentioned_p (to, from))
528 from = force_reg (from_mode, from);
530 /* Get a copy of FROM widened to a word, if necessary. */
531 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
532 lowpart_mode = word_mode;
533 else
534 lowpart_mode = from_mode;
536 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
538 lowpart = gen_lowpart (lowpart_mode, to);
539 emit_move_insn (lowpart, lowfrom);
541 /* Compute the value to put in each remaining word. */
542 if (unsignedp)
543 fill_value = const0_rtx;
544 else
545 fill_value = emit_store_flag (gen_reg_rtx (word_mode),
546 LT, lowfrom, const0_rtx,
547 VOIDmode, 0, -1);
549 /* Fill the remaining words. */
550 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
552 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
553 rtx subword = operand_subword (to, index, 1, to_mode);
555 gcc_assert (subword);
557 if (fill_value != subword)
558 emit_move_insn (subword, fill_value);
561 insns = get_insns ();
562 end_sequence ();
564 emit_insn (insns);
565 return;
568 /* Truncating multi-word to a word or less. */
569 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
570 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
572 if (!((MEM_P (from)
573 && ! MEM_VOLATILE_P (from)
574 && direct_load[(int) to_mode]
575 && ! mode_dependent_address_p (XEXP (from, 0)))
576 || REG_P (from)
577 || GET_CODE (from) == SUBREG))
578 from = force_reg (from_mode, from);
579 convert_move (to, gen_lowpart (word_mode, from), 0);
580 return;
583 /* Now follow all the conversions between integers
584 no more than a word long. */
586 /* For truncation, usually we can just refer to FROM in a narrower mode. */
587 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
588 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
589 GET_MODE_BITSIZE (from_mode)))
591 if (!((MEM_P (from)
592 && ! MEM_VOLATILE_P (from)
593 && direct_load[(int) to_mode]
594 && ! mode_dependent_address_p (XEXP (from, 0)))
595 || REG_P (from)
596 || GET_CODE (from) == SUBREG))
597 from = force_reg (from_mode, from);
598 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
599 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
600 from = copy_to_reg (from);
601 emit_move_insn (to, gen_lowpart (to_mode, from));
602 return;
605 /* Handle extension. */
606 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
608 /* Convert directly if that works. */
609 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
610 != CODE_FOR_nothing)
612 emit_unop_insn (code, to, from, equiv_code);
613 return;
615 else
617 enum machine_mode intermediate;
618 rtx tmp;
619 tree shift_amount;
621 /* Search for a mode to convert via. */
622 for (intermediate = from_mode; intermediate != VOIDmode;
623 intermediate = GET_MODE_WIDER_MODE (intermediate))
624 if (((can_extend_p (to_mode, intermediate, unsignedp)
625 != CODE_FOR_nothing)
626 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
627 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
628 GET_MODE_BITSIZE (intermediate))))
629 && (can_extend_p (intermediate, from_mode, unsignedp)
630 != CODE_FOR_nothing))
632 convert_move (to, convert_to_mode (intermediate, from,
633 unsignedp), unsignedp);
634 return;
637 /* No suitable intermediate mode.
638 Generate what we need with shifts. */
639 shift_amount = build_int_cst (NULL_TREE,
640 GET_MODE_BITSIZE (to_mode)
641 - GET_MODE_BITSIZE (from_mode));
642 from = gen_lowpart (to_mode, force_reg (from_mode, from));
643 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
644 to, unsignedp);
645 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
646 to, unsignedp);
647 if (tmp != to)
648 emit_move_insn (to, tmp);
649 return;
653 /* Support special truncate insns for certain modes. */
654 if (convert_optab_handler (trunc_optab, to_mode,
655 from_mode) != CODE_FOR_nothing)
657 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
658 to, from, UNKNOWN);
659 return;
662 /* Handle truncation of volatile memrefs, and so on;
663 the things that couldn't be truncated directly,
664 and for which there was no special instruction.
666 ??? Code above formerly short-circuited this, for most integer
667 mode pairs, with a force_reg in from_mode followed by a recursive
668 call to this routine. Appears always to have been wrong. */
669 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
671 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
672 emit_move_insn (to, temp);
673 return;
676 /* Mode combination is not recognized. */
677 gcc_unreachable ();
680 /* Return an rtx for a value that would result
681 from converting X to mode MODE.
682 Both X and MODE may be floating, or both integer.
683 UNSIGNEDP is nonzero if X is an unsigned value.
684 This can be done by referring to a part of X in place
685 or by copying to a new temporary with conversion. */
688 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
690 return convert_modes (mode, VOIDmode, x, unsignedp);
693 /* Return an rtx for a value that would result
694 from converting X from mode OLDMODE to mode MODE.
695 Both modes may be floating, or both integer.
696 UNSIGNEDP is nonzero if X is an unsigned value.
698 This can be done by referring to a part of X in place
699 or by copying to a new temporary with conversion.
701 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
704 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
706 rtx temp;
708 /* If FROM is a SUBREG that indicates that we have already done at least
709 the required extension, strip it. */
711 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
712 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
713 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
714 x = gen_lowpart (mode, x);
716 if (GET_MODE (x) != VOIDmode)
717 oldmode = GET_MODE (x);
719 if (mode == oldmode)
720 return x;
722 /* There is one case that we must handle specially: If we are converting
723 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
724 we are to interpret the constant as unsigned, gen_lowpart will do
725 the wrong if the constant appears negative. What we want to do is
726 make the high-order word of the constant zero, not all ones. */
728 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
729 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
730 && CONST_INT_P (x) && INTVAL (x) < 0)
732 double_int val = uhwi_to_double_int (INTVAL (x));
734 /* We need to zero extend VAL. */
735 if (oldmode != VOIDmode)
736 val = double_int_zext (val, GET_MODE_BITSIZE (oldmode));
738 return immed_double_int_const (val, mode);
741 /* We can do this with a gen_lowpart if both desired and current modes
742 are integer, and this is either a constant integer, a register, or a
743 non-volatile MEM. Except for the constant case where MODE is no
744 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
746 if ((CONST_INT_P (x)
747 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
748 || (GET_MODE_CLASS (mode) == MODE_INT
749 && GET_MODE_CLASS (oldmode) == MODE_INT
750 && (GET_CODE (x) == CONST_DOUBLE
751 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
752 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
753 && direct_load[(int) mode])
754 || (REG_P (x)
755 && (! HARD_REGISTER_P (x)
756 || HARD_REGNO_MODE_OK (REGNO (x), mode))
757 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
758 GET_MODE_BITSIZE (GET_MODE (x)))))))))
760 /* ?? If we don't know OLDMODE, we have to assume here that
761 X does not need sign- or zero-extension. This may not be
762 the case, but it's the best we can do. */
763 if (CONST_INT_P (x) && oldmode != VOIDmode
764 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
766 HOST_WIDE_INT val = INTVAL (x);
767 int width = GET_MODE_BITSIZE (oldmode);
769 /* We must sign or zero-extend in this case. Start by
770 zero-extending, then sign extend if we need to. */
771 val &= ((HOST_WIDE_INT) 1 << width) - 1;
772 if (! unsignedp
773 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
774 val |= (HOST_WIDE_INT) (-1) << width;
776 return gen_int_mode (val, mode);
779 return gen_lowpart (mode, x);
782 /* Converting from integer constant into mode is always equivalent to an
783 subreg operation. */
784 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
786 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
787 return simplify_gen_subreg (mode, x, oldmode, 0);
790 temp = gen_reg_rtx (mode);
791 convert_move (temp, x, unsignedp);
792 return temp;
795 /* STORE_MAX_PIECES is the number of bytes at a time that we can
796 store efficiently. Due to internal GCC limitations, this is
797 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
798 for an immediate constant. */
800 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
802 /* Determine whether the LEN bytes can be moved by using several move
803 instructions. Return nonzero if a call to move_by_pieces should
804 succeed. */
807 can_move_by_pieces (unsigned HOST_WIDE_INT len,
808 unsigned int align ATTRIBUTE_UNUSED)
810 return MOVE_BY_PIECES_P (len, align);
813 /* Generate several move instructions to copy LEN bytes from block FROM to
814 block TO. (These are MEM rtx's with BLKmode).
816 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
817 used to push FROM to the stack.
819 ALIGN is maximum stack alignment we can assume.
821 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
822 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
823 stpcpy. */
826 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
827 unsigned int align, int endp)
829 struct move_by_pieces_d data;
830 enum machine_mode to_addr_mode, from_addr_mode
831 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (from));
832 rtx to_addr, from_addr = XEXP (from, 0);
833 unsigned int max_size = MOVE_MAX_PIECES + 1;
834 enum machine_mode mode = VOIDmode, tmode;
835 enum insn_code icode;
837 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
839 data.offset = 0;
840 data.from_addr = from_addr;
841 if (to)
843 to_addr_mode = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
844 to_addr = XEXP (to, 0);
845 data.to = to;
846 data.autinc_to
847 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
848 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
849 data.reverse
850 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
852 else
854 to_addr_mode = VOIDmode;
855 to_addr = NULL_RTX;
856 data.to = NULL_RTX;
857 data.autinc_to = 1;
858 #ifdef STACK_GROWS_DOWNWARD
859 data.reverse = 1;
860 #else
861 data.reverse = 0;
862 #endif
864 data.to_addr = to_addr;
865 data.from = from;
866 data.autinc_from
867 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
868 || GET_CODE (from_addr) == POST_INC
869 || GET_CODE (from_addr) == POST_DEC);
871 data.explicit_inc_from = 0;
872 data.explicit_inc_to = 0;
873 if (data.reverse) data.offset = len;
874 data.len = len;
876 /* If copying requires more than two move insns,
877 copy addresses to registers (to make displacements shorter)
878 and use post-increment if available. */
879 if (!(data.autinc_from && data.autinc_to)
880 && move_by_pieces_ninsns (len, align, max_size) > 2)
882 /* Find the mode of the largest move... */
883 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
884 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
885 if (GET_MODE_SIZE (tmode) < max_size)
886 mode = tmode;
888 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
890 data.from_addr = copy_to_mode_reg (from_addr_mode,
891 plus_constant (from_addr, len));
892 data.autinc_from = 1;
893 data.explicit_inc_from = -1;
895 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
897 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
898 data.autinc_from = 1;
899 data.explicit_inc_from = 1;
901 if (!data.autinc_from && CONSTANT_P (from_addr))
902 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
903 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
905 data.to_addr = copy_to_mode_reg (to_addr_mode,
906 plus_constant (to_addr, len));
907 data.autinc_to = 1;
908 data.explicit_inc_to = -1;
910 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
912 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
913 data.autinc_to = 1;
914 data.explicit_inc_to = 1;
916 if (!data.autinc_to && CONSTANT_P (to_addr))
917 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
920 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
921 if (align >= GET_MODE_ALIGNMENT (tmode))
922 align = GET_MODE_ALIGNMENT (tmode);
923 else
925 enum machine_mode xmode;
927 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
928 tmode != VOIDmode;
929 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
930 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
931 || SLOW_UNALIGNED_ACCESS (tmode, align))
932 break;
934 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
937 /* First move what we can in the largest integer mode, then go to
938 successively smaller modes. */
940 while (max_size > 1)
942 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
943 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
944 if (GET_MODE_SIZE (tmode) < max_size)
945 mode = tmode;
947 if (mode == VOIDmode)
948 break;
950 icode = optab_handler (mov_optab, mode);
951 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
952 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
954 max_size = GET_MODE_SIZE (mode);
957 /* The code above should have handled everything. */
958 gcc_assert (!data.len);
960 if (endp)
962 rtx to1;
964 gcc_assert (!data.reverse);
965 if (data.autinc_to)
967 if (endp == 2)
969 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
970 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
971 else
972 data.to_addr = copy_to_mode_reg (to_addr_mode,
973 plus_constant (data.to_addr,
974 -1));
976 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
977 data.offset);
979 else
981 if (endp == 2)
982 --data.offset;
983 to1 = adjust_address (data.to, QImode, data.offset);
985 return to1;
987 else
988 return data.to;
991 /* Return number of insns required to move L bytes by pieces.
992 ALIGN (in bits) is maximum alignment we can assume. */
994 static unsigned HOST_WIDE_INT
995 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
996 unsigned int max_size)
998 unsigned HOST_WIDE_INT n_insns = 0;
999 enum machine_mode tmode;
1001 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1002 if (align >= GET_MODE_ALIGNMENT (tmode))
1003 align = GET_MODE_ALIGNMENT (tmode);
1004 else
1006 enum machine_mode tmode, xmode;
1008 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1009 tmode != VOIDmode;
1010 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1011 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1012 || SLOW_UNALIGNED_ACCESS (tmode, align))
1013 break;
1015 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1018 while (max_size > 1)
1020 enum machine_mode mode = VOIDmode;
1021 enum insn_code icode;
1023 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1024 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1025 if (GET_MODE_SIZE (tmode) < max_size)
1026 mode = tmode;
1028 if (mode == VOIDmode)
1029 break;
1031 icode = optab_handler (mov_optab, mode);
1032 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1033 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1035 max_size = GET_MODE_SIZE (mode);
1038 gcc_assert (!l);
1039 return n_insns;
1042 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1043 with move instructions for mode MODE. GENFUN is the gen_... function
1044 to make a move insn for that mode. DATA has all the other info. */
1046 static void
1047 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1048 struct move_by_pieces_d *data)
1050 unsigned int size = GET_MODE_SIZE (mode);
1051 rtx to1 = NULL_RTX, from1;
1053 while (data->len >= size)
1055 if (data->reverse)
1056 data->offset -= size;
1058 if (data->to)
1060 if (data->autinc_to)
1061 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1062 data->offset);
1063 else
1064 to1 = adjust_address (data->to, mode, data->offset);
1067 if (data->autinc_from)
1068 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1069 data->offset);
1070 else
1071 from1 = adjust_address (data->from, mode, data->offset);
1073 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1074 emit_insn (gen_add2_insn (data->to_addr,
1075 GEN_INT (-(HOST_WIDE_INT)size)));
1076 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1077 emit_insn (gen_add2_insn (data->from_addr,
1078 GEN_INT (-(HOST_WIDE_INT)size)));
1080 if (data->to)
1081 emit_insn ((*genfun) (to1, from1));
1082 else
1084 #ifdef PUSH_ROUNDING
1085 emit_single_push_insn (mode, from1, NULL);
1086 #else
1087 gcc_unreachable ();
1088 #endif
1091 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1092 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1093 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1094 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1096 if (! data->reverse)
1097 data->offset += size;
1099 data->len -= size;
1103 /* Emit code to move a block Y to a block X. This may be done with
1104 string-move instructions, with multiple scalar move instructions,
1105 or with a library call.
1107 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1108 SIZE is an rtx that says how long they are.
1109 ALIGN is the maximum alignment we can assume they have.
1110 METHOD describes what kind of copy this is, and what mechanisms may be used.
1112 Return the address of the new block, if memcpy is called and returns it,
1113 0 otherwise. */
1116 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1117 unsigned int expected_align, HOST_WIDE_INT expected_size)
1119 bool may_use_call;
1120 rtx retval = 0;
1121 unsigned int align;
1123 switch (method)
1125 case BLOCK_OP_NORMAL:
1126 case BLOCK_OP_TAILCALL:
1127 may_use_call = true;
1128 break;
1130 case BLOCK_OP_CALL_PARM:
1131 may_use_call = block_move_libcall_safe_for_call_parm ();
1133 /* Make inhibit_defer_pop nonzero around the library call
1134 to force it to pop the arguments right away. */
1135 NO_DEFER_POP;
1136 break;
1138 case BLOCK_OP_NO_LIBCALL:
1139 may_use_call = false;
1140 break;
1142 default:
1143 gcc_unreachable ();
1146 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1147 gcc_assert (align >= BITS_PER_UNIT);
1149 gcc_assert (MEM_P (x));
1150 gcc_assert (MEM_P (y));
1151 gcc_assert (size);
1153 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1154 block copy is more efficient for other large modes, e.g. DCmode. */
1155 x = adjust_address (x, BLKmode, 0);
1156 y = adjust_address (y, BLKmode, 0);
1158 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1159 can be incorrect is coming from __builtin_memcpy. */
1160 if (CONST_INT_P (size))
1162 if (INTVAL (size) == 0)
1163 return 0;
1165 x = shallow_copy_rtx (x);
1166 y = shallow_copy_rtx (y);
1167 set_mem_size (x, size);
1168 set_mem_size (y, size);
1171 if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1172 move_by_pieces (x, y, INTVAL (size), align, 0);
1173 else if (emit_block_move_via_movmem (x, y, size, align,
1174 expected_align, expected_size))
1176 else if (may_use_call
1177 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1178 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1179 retval = emit_block_move_via_libcall (x, y, size,
1180 method == BLOCK_OP_TAILCALL);
1181 else
1182 emit_block_move_via_loop (x, y, size, align);
1184 if (method == BLOCK_OP_CALL_PARM)
1185 OK_DEFER_POP;
1187 return retval;
1191 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1193 return emit_block_move_hints (x, y, size, method, 0, -1);
1196 /* A subroutine of emit_block_move. Returns true if calling the
1197 block move libcall will not clobber any parameters which may have
1198 already been placed on the stack. */
1200 static bool
1201 block_move_libcall_safe_for_call_parm (void)
1203 #if defined (REG_PARM_STACK_SPACE)
1204 tree fn;
1205 #endif
1207 /* If arguments are pushed on the stack, then they're safe. */
1208 if (PUSH_ARGS)
1209 return true;
1211 /* If registers go on the stack anyway, any argument is sure to clobber
1212 an outgoing argument. */
1213 #if defined (REG_PARM_STACK_SPACE)
1214 fn = emit_block_move_libcall_fn (false);
1215 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1216 depend on its argument. */
1217 (void) fn;
1218 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1219 && REG_PARM_STACK_SPACE (fn) != 0)
1220 return false;
1221 #endif
1223 /* If any argument goes in memory, then it might clobber an outgoing
1224 argument. */
1226 CUMULATIVE_ARGS args_so_far;
1227 tree fn, arg;
1229 fn = emit_block_move_libcall_fn (false);
1230 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1232 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1233 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1235 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1236 rtx tmp = targetm.calls.function_arg (&args_so_far, mode,
1237 NULL_TREE, true);
1238 if (!tmp || !REG_P (tmp))
1239 return false;
1240 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1241 return false;
1242 targetm.calls.function_arg_advance (&args_so_far, mode,
1243 NULL_TREE, true);
1246 return true;
1249 /* A subroutine of emit_block_move. Expand a movmem pattern;
1250 return true if successful. */
1252 static bool
1253 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1254 unsigned int expected_align, HOST_WIDE_INT expected_size)
1256 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1257 int save_volatile_ok = volatile_ok;
1258 enum machine_mode mode;
1260 if (expected_align < align)
1261 expected_align = align;
1263 /* Since this is a move insn, we don't care about volatility. */
1264 volatile_ok = 1;
1266 /* Try the most limited insn first, because there's no point
1267 including more than one in the machine description unless
1268 the more limited one has some advantage. */
1270 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1271 mode = GET_MODE_WIDER_MODE (mode))
1273 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1274 insn_operand_predicate_fn pred;
1276 if (code != CODE_FOR_nothing
1277 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1278 here because if SIZE is less than the mode mask, as it is
1279 returned by the macro, it will definitely be less than the
1280 actual mode mask. */
1281 && ((CONST_INT_P (size)
1282 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1283 <= (GET_MODE_MASK (mode) >> 1)))
1284 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1285 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1286 || (*pred) (x, BLKmode))
1287 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1288 || (*pred) (y, BLKmode))
1289 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1290 || (*pred) (opalign, VOIDmode)))
1292 rtx op2;
1293 rtx last = get_last_insn ();
1294 rtx pat;
1296 op2 = convert_to_mode (mode, size, 1);
1297 pred = insn_data[(int) code].operand[2].predicate;
1298 if (pred != 0 && ! (*pred) (op2, mode))
1299 op2 = copy_to_mode_reg (mode, op2);
1301 /* ??? When called via emit_block_move_for_call, it'd be
1302 nice if there were some way to inform the backend, so
1303 that it doesn't fail the expansion because it thinks
1304 emitting the libcall would be more efficient. */
1306 if (insn_data[(int) code].n_operands == 4)
1307 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1308 else
1309 pat = GEN_FCN ((int) code) (x, y, op2, opalign,
1310 GEN_INT (expected_align
1311 / BITS_PER_UNIT),
1312 GEN_INT (expected_size));
1313 if (pat)
1315 emit_insn (pat);
1316 volatile_ok = save_volatile_ok;
1317 return true;
1319 else
1320 delete_insns_since (last);
1324 volatile_ok = save_volatile_ok;
1325 return false;
1328 /* A subroutine of emit_block_move. Expand a call to memcpy.
1329 Return the return value from memcpy, 0 otherwise. */
1332 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1334 rtx dst_addr, src_addr;
1335 tree call_expr, fn, src_tree, dst_tree, size_tree;
1336 enum machine_mode size_mode;
1337 rtx retval;
1339 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1340 pseudos. We can then place those new pseudos into a VAR_DECL and
1341 use them later. */
1343 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1344 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1346 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1347 src_addr = convert_memory_address (ptr_mode, src_addr);
1349 dst_tree = make_tree (ptr_type_node, dst_addr);
1350 src_tree = make_tree (ptr_type_node, src_addr);
1352 size_mode = TYPE_MODE (sizetype);
1354 size = convert_to_mode (size_mode, size, 1);
1355 size = copy_to_mode_reg (size_mode, size);
1357 /* It is incorrect to use the libcall calling conventions to call
1358 memcpy in this context. This could be a user call to memcpy and
1359 the user may wish to examine the return value from memcpy. For
1360 targets where libcalls and normal calls have different conventions
1361 for returning pointers, we could end up generating incorrect code. */
1363 size_tree = make_tree (sizetype, size);
1365 fn = emit_block_move_libcall_fn (true);
1366 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1367 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1369 retval = expand_normal (call_expr);
1371 return retval;
1374 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1375 for the function we use for block copies. The first time FOR_CALL
1376 is true, we call assemble_external. */
1378 static GTY(()) tree block_move_fn;
1380 void
1381 init_block_move_fn (const char *asmspec)
1383 if (!block_move_fn)
1385 tree args, fn;
1387 fn = get_identifier ("memcpy");
1388 args = build_function_type_list (ptr_type_node, ptr_type_node,
1389 const_ptr_type_node, sizetype,
1390 NULL_TREE);
1392 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1393 DECL_EXTERNAL (fn) = 1;
1394 TREE_PUBLIC (fn) = 1;
1395 DECL_ARTIFICIAL (fn) = 1;
1396 TREE_NOTHROW (fn) = 1;
1397 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1398 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1400 block_move_fn = fn;
1403 if (asmspec)
1404 set_user_assembler_name (block_move_fn, asmspec);
1407 static tree
1408 emit_block_move_libcall_fn (int for_call)
1410 static bool emitted_extern;
1412 if (!block_move_fn)
1413 init_block_move_fn (NULL);
1415 if (for_call && !emitted_extern)
1417 emitted_extern = true;
1418 make_decl_rtl (block_move_fn);
1419 assemble_external (block_move_fn);
1422 return block_move_fn;
1425 /* A subroutine of emit_block_move. Copy the data via an explicit
1426 loop. This is used only when libcalls are forbidden. */
1427 /* ??? It'd be nice to copy in hunks larger than QImode. */
1429 static void
1430 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1431 unsigned int align ATTRIBUTE_UNUSED)
1433 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1434 enum machine_mode x_addr_mode
1435 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (x));
1436 enum machine_mode y_addr_mode
1437 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (y));
1438 enum machine_mode iter_mode;
1440 iter_mode = GET_MODE (size);
1441 if (iter_mode == VOIDmode)
1442 iter_mode = word_mode;
1444 top_label = gen_label_rtx ();
1445 cmp_label = gen_label_rtx ();
1446 iter = gen_reg_rtx (iter_mode);
1448 emit_move_insn (iter, const0_rtx);
1450 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1451 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1452 do_pending_stack_adjust ();
1454 emit_jump (cmp_label);
1455 emit_label (top_label);
1457 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1458 x_addr = gen_rtx_PLUS (x_addr_mode, x_addr, tmp);
1460 if (x_addr_mode != y_addr_mode)
1461 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1462 y_addr = gen_rtx_PLUS (y_addr_mode, y_addr, tmp);
1464 x = change_address (x, QImode, x_addr);
1465 y = change_address (y, QImode, y_addr);
1467 emit_move_insn (x, y);
1469 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1470 true, OPTAB_LIB_WIDEN);
1471 if (tmp != iter)
1472 emit_move_insn (iter, tmp);
1474 emit_label (cmp_label);
1476 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1477 true, top_label);
1480 /* Copy all or part of a value X into registers starting at REGNO.
1481 The number of registers to be filled is NREGS. */
1483 void
1484 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1486 int i;
1487 #ifdef HAVE_load_multiple
1488 rtx pat;
1489 rtx last;
1490 #endif
1492 if (nregs == 0)
1493 return;
1495 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1496 x = validize_mem (force_const_mem (mode, x));
1498 /* See if the machine can do this with a load multiple insn. */
1499 #ifdef HAVE_load_multiple
1500 if (HAVE_load_multiple)
1502 last = get_last_insn ();
1503 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1504 GEN_INT (nregs));
1505 if (pat)
1507 emit_insn (pat);
1508 return;
1510 else
1511 delete_insns_since (last);
1513 #endif
1515 for (i = 0; i < nregs; i++)
1516 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1517 operand_subword_force (x, i, mode));
1520 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1521 The number of registers to be filled is NREGS. */
1523 void
1524 move_block_from_reg (int regno, rtx x, int nregs)
1526 int i;
1528 if (nregs == 0)
1529 return;
1531 /* See if the machine can do this with a store multiple insn. */
1532 #ifdef HAVE_store_multiple
1533 if (HAVE_store_multiple)
1535 rtx last = get_last_insn ();
1536 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1537 GEN_INT (nregs));
1538 if (pat)
1540 emit_insn (pat);
1541 return;
1543 else
1544 delete_insns_since (last);
1546 #endif
1548 for (i = 0; i < nregs; i++)
1550 rtx tem = operand_subword (x, i, 1, BLKmode);
1552 gcc_assert (tem);
1554 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1558 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1559 ORIG, where ORIG is a non-consecutive group of registers represented by
1560 a PARALLEL. The clone is identical to the original except in that the
1561 original set of registers is replaced by a new set of pseudo registers.
1562 The new set has the same modes as the original set. */
1565 gen_group_rtx (rtx orig)
1567 int i, length;
1568 rtx *tmps;
1570 gcc_assert (GET_CODE (orig) == PARALLEL);
1572 length = XVECLEN (orig, 0);
1573 tmps = XALLOCAVEC (rtx, length);
1575 /* Skip a NULL entry in first slot. */
1576 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1578 if (i)
1579 tmps[0] = 0;
1581 for (; i < length; i++)
1583 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1584 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1586 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1589 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1592 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1593 except that values are placed in TMPS[i], and must later be moved
1594 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1596 static void
1597 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1599 rtx src;
1600 int start, i;
1601 enum machine_mode m = GET_MODE (orig_src);
1603 gcc_assert (GET_CODE (dst) == PARALLEL);
1605 if (m != VOIDmode
1606 && !SCALAR_INT_MODE_P (m)
1607 && !MEM_P (orig_src)
1608 && GET_CODE (orig_src) != CONCAT)
1610 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1611 if (imode == BLKmode)
1612 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1613 else
1614 src = gen_reg_rtx (imode);
1615 if (imode != BLKmode)
1616 src = gen_lowpart (GET_MODE (orig_src), src);
1617 emit_move_insn (src, orig_src);
1618 /* ...and back again. */
1619 if (imode != BLKmode)
1620 src = gen_lowpart (imode, src);
1621 emit_group_load_1 (tmps, dst, src, type, ssize);
1622 return;
1625 /* Check for a NULL entry, used to indicate that the parameter goes
1626 both on the stack and in registers. */
1627 if (XEXP (XVECEXP (dst, 0, 0), 0))
1628 start = 0;
1629 else
1630 start = 1;
1632 /* Process the pieces. */
1633 for (i = start; i < XVECLEN (dst, 0); i++)
1635 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1636 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1637 unsigned int bytelen = GET_MODE_SIZE (mode);
1638 int shift = 0;
1640 /* Handle trailing fragments that run over the size of the struct. */
1641 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1643 /* Arrange to shift the fragment to where it belongs.
1644 extract_bit_field loads to the lsb of the reg. */
1645 if (
1646 #ifdef BLOCK_REG_PADDING
1647 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1648 == (BYTES_BIG_ENDIAN ? upward : downward)
1649 #else
1650 BYTES_BIG_ENDIAN
1651 #endif
1653 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1654 bytelen = ssize - bytepos;
1655 gcc_assert (bytelen > 0);
1658 /* If we won't be loading directly from memory, protect the real source
1659 from strange tricks we might play; but make sure that the source can
1660 be loaded directly into the destination. */
1661 src = orig_src;
1662 if (!MEM_P (orig_src)
1663 && (!CONSTANT_P (orig_src)
1664 || (GET_MODE (orig_src) != mode
1665 && GET_MODE (orig_src) != VOIDmode)))
1667 if (GET_MODE (orig_src) == VOIDmode)
1668 src = gen_reg_rtx (mode);
1669 else
1670 src = gen_reg_rtx (GET_MODE (orig_src));
1672 emit_move_insn (src, orig_src);
1675 /* Optimize the access just a bit. */
1676 if (MEM_P (src)
1677 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1678 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1679 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1680 && bytelen == GET_MODE_SIZE (mode))
1682 tmps[i] = gen_reg_rtx (mode);
1683 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1685 else if (COMPLEX_MODE_P (mode)
1686 && GET_MODE (src) == mode
1687 && bytelen == GET_MODE_SIZE (mode))
1688 /* Let emit_move_complex do the bulk of the work. */
1689 tmps[i] = src;
1690 else if (GET_CODE (src) == CONCAT)
1692 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1693 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1695 if ((bytepos == 0 && bytelen == slen0)
1696 || (bytepos != 0 && bytepos + bytelen <= slen))
1698 /* The following assumes that the concatenated objects all
1699 have the same size. In this case, a simple calculation
1700 can be used to determine the object and the bit field
1701 to be extracted. */
1702 tmps[i] = XEXP (src, bytepos / slen0);
1703 if (! CONSTANT_P (tmps[i])
1704 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1705 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1706 (bytepos % slen0) * BITS_PER_UNIT,
1707 1, NULL_RTX, mode, mode);
1709 else
1711 rtx mem;
1713 gcc_assert (!bytepos);
1714 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1715 emit_move_insn (mem, src);
1716 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1717 0, 1, NULL_RTX, mode, mode);
1720 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1721 SIMD register, which is currently broken. While we get GCC
1722 to emit proper RTL for these cases, let's dump to memory. */
1723 else if (VECTOR_MODE_P (GET_MODE (dst))
1724 && REG_P (src))
1726 int slen = GET_MODE_SIZE (GET_MODE (src));
1727 rtx mem;
1729 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1730 emit_move_insn (mem, src);
1731 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1733 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1734 && XVECLEN (dst, 0) > 1)
1735 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1736 else if (CONSTANT_P (src))
1738 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1740 if (len == ssize)
1741 tmps[i] = src;
1742 else
1744 rtx first, second;
1746 gcc_assert (2 * len == ssize);
1747 split_double (src, &first, &second);
1748 if (i)
1749 tmps[i] = second;
1750 else
1751 tmps[i] = first;
1754 else if (REG_P (src) && GET_MODE (src) == mode)
1755 tmps[i] = src;
1756 else
1757 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1758 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1759 mode, mode);
1761 if (shift)
1762 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1763 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1767 /* Emit code to move a block SRC of type TYPE to a block DST,
1768 where DST is non-consecutive registers represented by a PARALLEL.
1769 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1770 if not known. */
1772 void
1773 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1775 rtx *tmps;
1776 int i;
1778 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1779 emit_group_load_1 (tmps, dst, src, type, ssize);
1781 /* Copy the extracted pieces into the proper (probable) hard regs. */
1782 for (i = 0; i < XVECLEN (dst, 0); i++)
1784 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1785 if (d == NULL)
1786 continue;
1787 emit_move_insn (d, tmps[i]);
1791 /* Similar, but load SRC into new pseudos in a format that looks like
1792 PARALLEL. This can later be fed to emit_group_move to get things
1793 in the right place. */
1796 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1798 rtvec vec;
1799 int i;
1801 vec = rtvec_alloc (XVECLEN (parallel, 0));
1802 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1804 /* Convert the vector to look just like the original PARALLEL, except
1805 with the computed values. */
1806 for (i = 0; i < XVECLEN (parallel, 0); i++)
1808 rtx e = XVECEXP (parallel, 0, i);
1809 rtx d = XEXP (e, 0);
1811 if (d)
1813 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1814 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1816 RTVEC_ELT (vec, i) = e;
1819 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1822 /* Emit code to move a block SRC to block DST, where SRC and DST are
1823 non-consecutive groups of registers, each represented by a PARALLEL. */
1825 void
1826 emit_group_move (rtx dst, rtx src)
1828 int i;
1830 gcc_assert (GET_CODE (src) == PARALLEL
1831 && GET_CODE (dst) == PARALLEL
1832 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1834 /* Skip first entry if NULL. */
1835 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1836 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1837 XEXP (XVECEXP (src, 0, i), 0));
1840 /* Move a group of registers represented by a PARALLEL into pseudos. */
1843 emit_group_move_into_temps (rtx src)
1845 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1846 int i;
1848 for (i = 0; i < XVECLEN (src, 0); i++)
1850 rtx e = XVECEXP (src, 0, i);
1851 rtx d = XEXP (e, 0);
1853 if (d)
1854 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1855 RTVEC_ELT (vec, i) = e;
1858 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1861 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1862 where SRC is non-consecutive registers represented by a PARALLEL.
1863 SSIZE represents the total size of block ORIG_DST, or -1 if not
1864 known. */
1866 void
1867 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1869 rtx *tmps, dst;
1870 int start, finish, i;
1871 enum machine_mode m = GET_MODE (orig_dst);
1873 gcc_assert (GET_CODE (src) == PARALLEL);
1875 if (!SCALAR_INT_MODE_P (m)
1876 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1878 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1879 if (imode == BLKmode)
1880 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1881 else
1882 dst = gen_reg_rtx (imode);
1883 emit_group_store (dst, src, type, ssize);
1884 if (imode != BLKmode)
1885 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1886 emit_move_insn (orig_dst, dst);
1887 return;
1890 /* Check for a NULL entry, used to indicate that the parameter goes
1891 both on the stack and in registers. */
1892 if (XEXP (XVECEXP (src, 0, 0), 0))
1893 start = 0;
1894 else
1895 start = 1;
1896 finish = XVECLEN (src, 0);
1898 tmps = XALLOCAVEC (rtx, finish);
1900 /* Copy the (probable) hard regs into pseudos. */
1901 for (i = start; i < finish; i++)
1903 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1904 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1906 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1907 emit_move_insn (tmps[i], reg);
1909 else
1910 tmps[i] = reg;
1913 /* If we won't be storing directly into memory, protect the real destination
1914 from strange tricks we might play. */
1915 dst = orig_dst;
1916 if (GET_CODE (dst) == PARALLEL)
1918 rtx temp;
1920 /* We can get a PARALLEL dst if there is a conditional expression in
1921 a return statement. In that case, the dst and src are the same,
1922 so no action is necessary. */
1923 if (rtx_equal_p (dst, src))
1924 return;
1926 /* It is unclear if we can ever reach here, but we may as well handle
1927 it. Allocate a temporary, and split this into a store/load to/from
1928 the temporary. */
1930 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1931 emit_group_store (temp, src, type, ssize);
1932 emit_group_load (dst, temp, type, ssize);
1933 return;
1935 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1937 enum machine_mode outer = GET_MODE (dst);
1938 enum machine_mode inner;
1939 HOST_WIDE_INT bytepos;
1940 bool done = false;
1941 rtx temp;
1943 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1944 dst = gen_reg_rtx (outer);
1946 /* Make life a bit easier for combine. */
1947 /* If the first element of the vector is the low part
1948 of the destination mode, use a paradoxical subreg to
1949 initialize the destination. */
1950 if (start < finish)
1952 inner = GET_MODE (tmps[start]);
1953 bytepos = subreg_lowpart_offset (inner, outer);
1954 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1956 temp = simplify_gen_subreg (outer, tmps[start],
1957 inner, 0);
1958 if (temp)
1960 emit_move_insn (dst, temp);
1961 done = true;
1962 start++;
1967 /* If the first element wasn't the low part, try the last. */
1968 if (!done
1969 && start < finish - 1)
1971 inner = GET_MODE (tmps[finish - 1]);
1972 bytepos = subreg_lowpart_offset (inner, outer);
1973 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1975 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1976 inner, 0);
1977 if (temp)
1979 emit_move_insn (dst, temp);
1980 done = true;
1981 finish--;
1986 /* Otherwise, simply initialize the result to zero. */
1987 if (!done)
1988 emit_move_insn (dst, CONST0_RTX (outer));
1991 /* Process the pieces. */
1992 for (i = start; i < finish; i++)
1994 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1995 enum machine_mode mode = GET_MODE (tmps[i]);
1996 unsigned int bytelen = GET_MODE_SIZE (mode);
1997 unsigned int adj_bytelen = bytelen;
1998 rtx dest = dst;
2000 /* Handle trailing fragments that run over the size of the struct. */
2001 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2002 adj_bytelen = ssize - bytepos;
2004 if (GET_CODE (dst) == CONCAT)
2006 if (bytepos + adj_bytelen
2007 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2008 dest = XEXP (dst, 0);
2009 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2011 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2012 dest = XEXP (dst, 1);
2014 else
2016 enum machine_mode dest_mode = GET_MODE (dest);
2017 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2019 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2021 if (GET_MODE_ALIGNMENT (dest_mode)
2022 >= GET_MODE_ALIGNMENT (tmp_mode))
2024 dest = assign_stack_temp (dest_mode,
2025 GET_MODE_SIZE (dest_mode),
2027 emit_move_insn (adjust_address (dest,
2028 tmp_mode,
2029 bytepos),
2030 tmps[i]);
2031 dst = dest;
2033 else
2035 dest = assign_stack_temp (tmp_mode,
2036 GET_MODE_SIZE (tmp_mode),
2038 emit_move_insn (dest, tmps[i]);
2039 dst = adjust_address (dest, dest_mode, bytepos);
2041 break;
2045 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2047 /* store_bit_field always takes its value from the lsb.
2048 Move the fragment to the lsb if it's not already there. */
2049 if (
2050 #ifdef BLOCK_REG_PADDING
2051 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2052 == (BYTES_BIG_ENDIAN ? upward : downward)
2053 #else
2054 BYTES_BIG_ENDIAN
2055 #endif
2058 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2059 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2060 build_int_cst (NULL_TREE, shift),
2061 tmps[i], 0);
2063 bytelen = adj_bytelen;
2066 /* Optimize the access just a bit. */
2067 if (MEM_P (dest)
2068 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2069 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2070 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2071 && bytelen == GET_MODE_SIZE (mode))
2072 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2073 else
2074 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2075 mode, tmps[i]);
2078 /* Copy from the pseudo into the (probable) hard reg. */
2079 if (orig_dst != dst)
2080 emit_move_insn (orig_dst, dst);
2083 /* Generate code to copy a BLKmode object of TYPE out of a
2084 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2085 is null, a stack temporary is created. TGTBLK is returned.
2087 The purpose of this routine is to handle functions that return
2088 BLKmode structures in registers. Some machines (the PA for example)
2089 want to return all small structures in registers regardless of the
2090 structure's alignment. */
2093 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2095 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2096 rtx src = NULL, dst = NULL;
2097 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2098 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2099 enum machine_mode copy_mode;
2101 if (tgtblk == 0)
2103 tgtblk = assign_temp (build_qualified_type (type,
2104 (TYPE_QUALS (type)
2105 | TYPE_QUAL_CONST)),
2106 0, 1, 1);
2107 preserve_temp_slots (tgtblk);
2110 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2111 into a new pseudo which is a full word. */
2113 if (GET_MODE (srcreg) != BLKmode
2114 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2115 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2117 /* If the structure doesn't take up a whole number of words, see whether
2118 SRCREG is padded on the left or on the right. If it's on the left,
2119 set PADDING_CORRECTION to the number of bits to skip.
2121 In most ABIs, the structure will be returned at the least end of
2122 the register, which translates to right padding on little-endian
2123 targets and left padding on big-endian targets. The opposite
2124 holds if the structure is returned at the most significant
2125 end of the register. */
2126 if (bytes % UNITS_PER_WORD != 0
2127 && (targetm.calls.return_in_msb (type)
2128 ? !BYTES_BIG_ENDIAN
2129 : BYTES_BIG_ENDIAN))
2130 padding_correction
2131 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2133 /* Copy the structure BITSIZE bits at a time. If the target lives in
2134 memory, take care of not reading/writing past its end by selecting
2135 a copy mode suited to BITSIZE. This should always be possible given
2136 how it is computed.
2138 We could probably emit more efficient code for machines which do not use
2139 strict alignment, but it doesn't seem worth the effort at the current
2140 time. */
2142 copy_mode = word_mode;
2143 if (MEM_P (tgtblk))
2145 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2146 if (mem_mode != BLKmode)
2147 copy_mode = mem_mode;
2150 for (bitpos = 0, xbitpos = padding_correction;
2151 bitpos < bytes * BITS_PER_UNIT;
2152 bitpos += bitsize, xbitpos += bitsize)
2154 /* We need a new source operand each time xbitpos is on a
2155 word boundary and when xbitpos == padding_correction
2156 (the first time through). */
2157 if (xbitpos % BITS_PER_WORD == 0
2158 || xbitpos == padding_correction)
2159 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2160 GET_MODE (srcreg));
2162 /* We need a new destination operand each time bitpos is on
2163 a word boundary. */
2164 if (bitpos % BITS_PER_WORD == 0)
2165 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2167 /* Use xbitpos for the source extraction (right justified) and
2168 bitpos for the destination store (left justified). */
2169 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, copy_mode,
2170 extract_bit_field (src, bitsize,
2171 xbitpos % BITS_PER_WORD, 1,
2172 NULL_RTX, copy_mode, copy_mode));
2175 return tgtblk;
2178 /* Add a USE expression for REG to the (possibly empty) list pointed
2179 to by CALL_FUSAGE. REG must denote a hard register. */
2181 void
2182 use_reg (rtx *call_fusage, rtx reg)
2184 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2186 *call_fusage
2187 = gen_rtx_EXPR_LIST (VOIDmode,
2188 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2191 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2192 starting at REGNO. All of these registers must be hard registers. */
2194 void
2195 use_regs (rtx *call_fusage, int regno, int nregs)
2197 int i;
2199 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2201 for (i = 0; i < nregs; i++)
2202 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2205 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2206 PARALLEL REGS. This is for calls that pass values in multiple
2207 non-contiguous locations. The Irix 6 ABI has examples of this. */
2209 void
2210 use_group_regs (rtx *call_fusage, rtx regs)
2212 int i;
2214 for (i = 0; i < XVECLEN (regs, 0); i++)
2216 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2218 /* A NULL entry means the parameter goes both on the stack and in
2219 registers. This can also be a MEM for targets that pass values
2220 partially on the stack and partially in registers. */
2221 if (reg != 0 && REG_P (reg))
2222 use_reg (call_fusage, reg);
2226 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2227 assigment and the code of the expresion on the RHS is CODE. Return
2228 NULL otherwise. */
2230 static gimple
2231 get_def_for_expr (tree name, enum tree_code code)
2233 gimple def_stmt;
2235 if (TREE_CODE (name) != SSA_NAME)
2236 return NULL;
2238 def_stmt = get_gimple_for_ssa_name (name);
2239 if (!def_stmt
2240 || gimple_assign_rhs_code (def_stmt) != code)
2241 return NULL;
2243 return def_stmt;
2247 /* Determine whether the LEN bytes generated by CONSTFUN can be
2248 stored to memory using several move instructions. CONSTFUNDATA is
2249 a pointer which will be passed as argument in every CONSTFUN call.
2250 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2251 a memset operation and false if it's a copy of a constant string.
2252 Return nonzero if a call to store_by_pieces should succeed. */
2255 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2256 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2257 void *constfundata, unsigned int align, bool memsetp)
2259 unsigned HOST_WIDE_INT l;
2260 unsigned int max_size;
2261 HOST_WIDE_INT offset = 0;
2262 enum machine_mode mode, tmode;
2263 enum insn_code icode;
2264 int reverse;
2265 rtx cst;
2267 if (len == 0)
2268 return 1;
2270 if (! (memsetp
2271 ? SET_BY_PIECES_P (len, align)
2272 : STORE_BY_PIECES_P (len, align)))
2273 return 0;
2275 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2276 if (align >= GET_MODE_ALIGNMENT (tmode))
2277 align = GET_MODE_ALIGNMENT (tmode);
2278 else
2280 enum machine_mode xmode;
2282 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2283 tmode != VOIDmode;
2284 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2285 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2286 || SLOW_UNALIGNED_ACCESS (tmode, align))
2287 break;
2289 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2292 /* We would first store what we can in the largest integer mode, then go to
2293 successively smaller modes. */
2295 for (reverse = 0;
2296 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2297 reverse++)
2299 l = len;
2300 mode = VOIDmode;
2301 max_size = STORE_MAX_PIECES + 1;
2302 while (max_size > 1)
2304 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2305 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2306 if (GET_MODE_SIZE (tmode) < max_size)
2307 mode = tmode;
2309 if (mode == VOIDmode)
2310 break;
2312 icode = optab_handler (mov_optab, mode);
2313 if (icode != CODE_FOR_nothing
2314 && align >= GET_MODE_ALIGNMENT (mode))
2316 unsigned int size = GET_MODE_SIZE (mode);
2318 while (l >= size)
2320 if (reverse)
2321 offset -= size;
2323 cst = (*constfun) (constfundata, offset, mode);
2324 if (!LEGITIMATE_CONSTANT_P (cst))
2325 return 0;
2327 if (!reverse)
2328 offset += size;
2330 l -= size;
2334 max_size = GET_MODE_SIZE (mode);
2337 /* The code above should have handled everything. */
2338 gcc_assert (!l);
2341 return 1;
2344 /* Generate several move instructions to store LEN bytes generated by
2345 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2346 pointer which will be passed as argument in every CONSTFUN call.
2347 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2348 a memset operation and false if it's a copy of a constant string.
2349 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2350 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2351 stpcpy. */
2354 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2355 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2356 void *constfundata, unsigned int align, bool memsetp, int endp)
2358 enum machine_mode to_addr_mode
2359 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
2360 struct store_by_pieces_d data;
2362 if (len == 0)
2364 gcc_assert (endp != 2);
2365 return to;
2368 gcc_assert (memsetp
2369 ? SET_BY_PIECES_P (len, align)
2370 : STORE_BY_PIECES_P (len, align));
2371 data.constfun = constfun;
2372 data.constfundata = constfundata;
2373 data.len = len;
2374 data.to = to;
2375 store_by_pieces_1 (&data, align);
2376 if (endp)
2378 rtx to1;
2380 gcc_assert (!data.reverse);
2381 if (data.autinc_to)
2383 if (endp == 2)
2385 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2386 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2387 else
2388 data.to_addr = copy_to_mode_reg (to_addr_mode,
2389 plus_constant (data.to_addr,
2390 -1));
2392 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2393 data.offset);
2395 else
2397 if (endp == 2)
2398 --data.offset;
2399 to1 = adjust_address (data.to, QImode, data.offset);
2401 return to1;
2403 else
2404 return data.to;
2407 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2408 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2410 static void
2411 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2413 struct store_by_pieces_d data;
2415 if (len == 0)
2416 return;
2418 data.constfun = clear_by_pieces_1;
2419 data.constfundata = NULL;
2420 data.len = len;
2421 data.to = to;
2422 store_by_pieces_1 (&data, align);
2425 /* Callback routine for clear_by_pieces.
2426 Return const0_rtx unconditionally. */
2428 static rtx
2429 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2430 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2431 enum machine_mode mode ATTRIBUTE_UNUSED)
2433 return const0_rtx;
2436 /* Subroutine of clear_by_pieces and store_by_pieces.
2437 Generate several move instructions to store LEN bytes of block TO. (A MEM
2438 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2440 static void
2441 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2442 unsigned int align ATTRIBUTE_UNUSED)
2444 enum machine_mode to_addr_mode
2445 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (data->to));
2446 rtx to_addr = XEXP (data->to, 0);
2447 unsigned int max_size = STORE_MAX_PIECES + 1;
2448 enum machine_mode mode = VOIDmode, tmode;
2449 enum insn_code icode;
2451 data->offset = 0;
2452 data->to_addr = to_addr;
2453 data->autinc_to
2454 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2455 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2457 data->explicit_inc_to = 0;
2458 data->reverse
2459 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2460 if (data->reverse)
2461 data->offset = data->len;
2463 /* If storing requires more than two move insns,
2464 copy addresses to registers (to make displacements shorter)
2465 and use post-increment if available. */
2466 if (!data->autinc_to
2467 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2469 /* Determine the main mode we'll be using. */
2470 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2471 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2472 if (GET_MODE_SIZE (tmode) < max_size)
2473 mode = tmode;
2475 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2477 data->to_addr = copy_to_mode_reg (to_addr_mode,
2478 plus_constant (to_addr, data->len));
2479 data->autinc_to = 1;
2480 data->explicit_inc_to = -1;
2483 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2484 && ! data->autinc_to)
2486 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2487 data->autinc_to = 1;
2488 data->explicit_inc_to = 1;
2491 if ( !data->autinc_to && CONSTANT_P (to_addr))
2492 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2495 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2496 if (align >= GET_MODE_ALIGNMENT (tmode))
2497 align = GET_MODE_ALIGNMENT (tmode);
2498 else
2500 enum machine_mode xmode;
2502 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2503 tmode != VOIDmode;
2504 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2505 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2506 || SLOW_UNALIGNED_ACCESS (tmode, align))
2507 break;
2509 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2512 /* First store what we can in the largest integer mode, then go to
2513 successively smaller modes. */
2515 while (max_size > 1)
2517 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2518 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2519 if (GET_MODE_SIZE (tmode) < max_size)
2520 mode = tmode;
2522 if (mode == VOIDmode)
2523 break;
2525 icode = optab_handler (mov_optab, mode);
2526 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2527 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2529 max_size = GET_MODE_SIZE (mode);
2532 /* The code above should have handled everything. */
2533 gcc_assert (!data->len);
2536 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2537 with move instructions for mode MODE. GENFUN is the gen_... function
2538 to make a move insn for that mode. DATA has all the other info. */
2540 static void
2541 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2542 struct store_by_pieces_d *data)
2544 unsigned int size = GET_MODE_SIZE (mode);
2545 rtx to1, cst;
2547 while (data->len >= size)
2549 if (data->reverse)
2550 data->offset -= size;
2552 if (data->autinc_to)
2553 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2554 data->offset);
2555 else
2556 to1 = adjust_address (data->to, mode, data->offset);
2558 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2559 emit_insn (gen_add2_insn (data->to_addr,
2560 GEN_INT (-(HOST_WIDE_INT) size)));
2562 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2563 emit_insn ((*genfun) (to1, cst));
2565 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2566 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2568 if (! data->reverse)
2569 data->offset += size;
2571 data->len -= size;
2575 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2576 its length in bytes. */
2579 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2580 unsigned int expected_align, HOST_WIDE_INT expected_size)
2582 enum machine_mode mode = GET_MODE (object);
2583 unsigned int align;
2585 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2587 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2588 just move a zero. Otherwise, do this a piece at a time. */
2589 if (mode != BLKmode
2590 && CONST_INT_P (size)
2591 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2593 rtx zero = CONST0_RTX (mode);
2594 if (zero != NULL)
2596 emit_move_insn (object, zero);
2597 return NULL;
2600 if (COMPLEX_MODE_P (mode))
2602 zero = CONST0_RTX (GET_MODE_INNER (mode));
2603 if (zero != NULL)
2605 write_complex_part (object, zero, 0);
2606 write_complex_part (object, zero, 1);
2607 return NULL;
2612 if (size == const0_rtx)
2613 return NULL;
2615 align = MEM_ALIGN (object);
2617 if (CONST_INT_P (size)
2618 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2619 clear_by_pieces (object, INTVAL (size), align);
2620 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2621 expected_align, expected_size))
2623 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2624 return set_storage_via_libcall (object, size, const0_rtx,
2625 method == BLOCK_OP_TAILCALL);
2626 else
2627 gcc_unreachable ();
2629 return NULL;
2633 clear_storage (rtx object, rtx size, enum block_op_methods method)
2635 return clear_storage_hints (object, size, method, 0, -1);
2639 /* A subroutine of clear_storage. Expand a call to memset.
2640 Return the return value of memset, 0 otherwise. */
2643 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2645 tree call_expr, fn, object_tree, size_tree, val_tree;
2646 enum machine_mode size_mode;
2647 rtx retval;
2649 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2650 place those into new pseudos into a VAR_DECL and use them later. */
2652 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2654 size_mode = TYPE_MODE (sizetype);
2655 size = convert_to_mode (size_mode, size, 1);
2656 size = copy_to_mode_reg (size_mode, size);
2658 /* It is incorrect to use the libcall calling conventions to call
2659 memset in this context. This could be a user call to memset and
2660 the user may wish to examine the return value from memset. For
2661 targets where libcalls and normal calls have different conventions
2662 for returning pointers, we could end up generating incorrect code. */
2664 object_tree = make_tree (ptr_type_node, object);
2665 if (!CONST_INT_P (val))
2666 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2667 size_tree = make_tree (sizetype, size);
2668 val_tree = make_tree (integer_type_node, val);
2670 fn = clear_storage_libcall_fn (true);
2671 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2672 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2674 retval = expand_normal (call_expr);
2676 return retval;
2679 /* A subroutine of set_storage_via_libcall. Create the tree node
2680 for the function we use for block clears. The first time FOR_CALL
2681 is true, we call assemble_external. */
2683 tree block_clear_fn;
2685 void
2686 init_block_clear_fn (const char *asmspec)
2688 if (!block_clear_fn)
2690 tree fn, args;
2692 fn = get_identifier ("memset");
2693 args = build_function_type_list (ptr_type_node, ptr_type_node,
2694 integer_type_node, sizetype,
2695 NULL_TREE);
2697 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2698 DECL_EXTERNAL (fn) = 1;
2699 TREE_PUBLIC (fn) = 1;
2700 DECL_ARTIFICIAL (fn) = 1;
2701 TREE_NOTHROW (fn) = 1;
2702 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2703 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2705 block_clear_fn = fn;
2708 if (asmspec)
2709 set_user_assembler_name (block_clear_fn, asmspec);
2712 static tree
2713 clear_storage_libcall_fn (int for_call)
2715 static bool emitted_extern;
2717 if (!block_clear_fn)
2718 init_block_clear_fn (NULL);
2720 if (for_call && !emitted_extern)
2722 emitted_extern = true;
2723 make_decl_rtl (block_clear_fn);
2724 assemble_external (block_clear_fn);
2727 return block_clear_fn;
2730 /* Expand a setmem pattern; return true if successful. */
2732 bool
2733 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2734 unsigned int expected_align, HOST_WIDE_INT expected_size)
2736 /* Try the most limited insn first, because there's no point
2737 including more than one in the machine description unless
2738 the more limited one has some advantage. */
2740 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2741 enum machine_mode mode;
2743 if (expected_align < align)
2744 expected_align = align;
2746 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2747 mode = GET_MODE_WIDER_MODE (mode))
2749 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2750 insn_operand_predicate_fn pred;
2752 if (code != CODE_FOR_nothing
2753 /* We don't need MODE to be narrower than
2754 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2755 the mode mask, as it is returned by the macro, it will
2756 definitely be less than the actual mode mask. */
2757 && ((CONST_INT_P (size)
2758 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2759 <= (GET_MODE_MASK (mode) >> 1)))
2760 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2761 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2762 || (*pred) (object, BLKmode))
2763 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2764 || (*pred) (opalign, VOIDmode)))
2766 rtx opsize, opchar;
2767 enum machine_mode char_mode;
2768 rtx last = get_last_insn ();
2769 rtx pat;
2771 opsize = convert_to_mode (mode, size, 1);
2772 pred = insn_data[(int) code].operand[1].predicate;
2773 if (pred != 0 && ! (*pred) (opsize, mode))
2774 opsize = copy_to_mode_reg (mode, opsize);
2776 opchar = val;
2777 char_mode = insn_data[(int) code].operand[2].mode;
2778 if (char_mode != VOIDmode)
2780 opchar = convert_to_mode (char_mode, opchar, 1);
2781 pred = insn_data[(int) code].operand[2].predicate;
2782 if (pred != 0 && ! (*pred) (opchar, char_mode))
2783 opchar = copy_to_mode_reg (char_mode, opchar);
2786 if (insn_data[(int) code].n_operands == 4)
2787 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2788 else
2789 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
2790 GEN_INT (expected_align
2791 / BITS_PER_UNIT),
2792 GEN_INT (expected_size));
2793 if (pat)
2795 emit_insn (pat);
2796 return true;
2798 else
2799 delete_insns_since (last);
2803 return false;
2807 /* Write to one of the components of the complex value CPLX. Write VAL to
2808 the real part if IMAG_P is false, and the imaginary part if its true. */
2810 static void
2811 write_complex_part (rtx cplx, rtx val, bool imag_p)
2813 enum machine_mode cmode;
2814 enum machine_mode imode;
2815 unsigned ibitsize;
2817 if (GET_CODE (cplx) == CONCAT)
2819 emit_move_insn (XEXP (cplx, imag_p), val);
2820 return;
2823 cmode = GET_MODE (cplx);
2824 imode = GET_MODE_INNER (cmode);
2825 ibitsize = GET_MODE_BITSIZE (imode);
2827 /* For MEMs simplify_gen_subreg may generate an invalid new address
2828 because, e.g., the original address is considered mode-dependent
2829 by the target, which restricts simplify_subreg from invoking
2830 adjust_address_nv. Instead of preparing fallback support for an
2831 invalid address, we call adjust_address_nv directly. */
2832 if (MEM_P (cplx))
2834 emit_move_insn (adjust_address_nv (cplx, imode,
2835 imag_p ? GET_MODE_SIZE (imode) : 0),
2836 val);
2837 return;
2840 /* If the sub-object is at least word sized, then we know that subregging
2841 will work. This special case is important, since store_bit_field
2842 wants to operate on integer modes, and there's rarely an OImode to
2843 correspond to TCmode. */
2844 if (ibitsize >= BITS_PER_WORD
2845 /* For hard regs we have exact predicates. Assume we can split
2846 the original object if it spans an even number of hard regs.
2847 This special case is important for SCmode on 64-bit platforms
2848 where the natural size of floating-point regs is 32-bit. */
2849 || (REG_P (cplx)
2850 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2851 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2853 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2854 imag_p ? GET_MODE_SIZE (imode) : 0);
2855 if (part)
2857 emit_move_insn (part, val);
2858 return;
2860 else
2861 /* simplify_gen_subreg may fail for sub-word MEMs. */
2862 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2865 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2868 /* Extract one of the components of the complex value CPLX. Extract the
2869 real part if IMAG_P is false, and the imaginary part if it's true. */
2871 static rtx
2872 read_complex_part (rtx cplx, bool imag_p)
2874 enum machine_mode cmode, imode;
2875 unsigned ibitsize;
2877 if (GET_CODE (cplx) == CONCAT)
2878 return XEXP (cplx, imag_p);
2880 cmode = GET_MODE (cplx);
2881 imode = GET_MODE_INNER (cmode);
2882 ibitsize = GET_MODE_BITSIZE (imode);
2884 /* Special case reads from complex constants that got spilled to memory. */
2885 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2887 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2888 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2890 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2891 if (CONSTANT_CLASS_P (part))
2892 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2896 /* For MEMs simplify_gen_subreg may generate an invalid new address
2897 because, e.g., the original address is considered mode-dependent
2898 by the target, which restricts simplify_subreg from invoking
2899 adjust_address_nv. Instead of preparing fallback support for an
2900 invalid address, we call adjust_address_nv directly. */
2901 if (MEM_P (cplx))
2902 return adjust_address_nv (cplx, imode,
2903 imag_p ? GET_MODE_SIZE (imode) : 0);
2905 /* If the sub-object is at least word sized, then we know that subregging
2906 will work. This special case is important, since extract_bit_field
2907 wants to operate on integer modes, and there's rarely an OImode to
2908 correspond to TCmode. */
2909 if (ibitsize >= BITS_PER_WORD
2910 /* For hard regs we have exact predicates. Assume we can split
2911 the original object if it spans an even number of hard regs.
2912 This special case is important for SCmode on 64-bit platforms
2913 where the natural size of floating-point regs is 32-bit. */
2914 || (REG_P (cplx)
2915 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2916 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2918 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2919 imag_p ? GET_MODE_SIZE (imode) : 0);
2920 if (ret)
2921 return ret;
2922 else
2923 /* simplify_gen_subreg may fail for sub-word MEMs. */
2924 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2927 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2928 true, NULL_RTX, imode, imode);
2931 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2932 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2933 represented in NEW_MODE. If FORCE is true, this will never happen, as
2934 we'll force-create a SUBREG if needed. */
2936 static rtx
2937 emit_move_change_mode (enum machine_mode new_mode,
2938 enum machine_mode old_mode, rtx x, bool force)
2940 rtx ret;
2942 if (push_operand (x, GET_MODE (x)))
2944 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2945 MEM_COPY_ATTRIBUTES (ret, x);
2947 else if (MEM_P (x))
2949 /* We don't have to worry about changing the address since the
2950 size in bytes is supposed to be the same. */
2951 if (reload_in_progress)
2953 /* Copy the MEM to change the mode and move any
2954 substitutions from the old MEM to the new one. */
2955 ret = adjust_address_nv (x, new_mode, 0);
2956 copy_replacements (x, ret);
2958 else
2959 ret = adjust_address (x, new_mode, 0);
2961 else
2963 /* Note that we do want simplify_subreg's behavior of validating
2964 that the new mode is ok for a hard register. If we were to use
2965 simplify_gen_subreg, we would create the subreg, but would
2966 probably run into the target not being able to implement it. */
2967 /* Except, of course, when FORCE is true, when this is exactly what
2968 we want. Which is needed for CCmodes on some targets. */
2969 if (force)
2970 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2971 else
2972 ret = simplify_subreg (new_mode, x, old_mode, 0);
2975 return ret;
2978 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2979 an integer mode of the same size as MODE. Returns the instruction
2980 emitted, or NULL if such a move could not be generated. */
2982 static rtx
2983 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2985 enum machine_mode imode;
2986 enum insn_code code;
2988 /* There must exist a mode of the exact size we require. */
2989 imode = int_mode_for_mode (mode);
2990 if (imode == BLKmode)
2991 return NULL_RTX;
2993 /* The target must support moves in this mode. */
2994 code = optab_handler (mov_optab, imode);
2995 if (code == CODE_FOR_nothing)
2996 return NULL_RTX;
2998 x = emit_move_change_mode (imode, mode, x, force);
2999 if (x == NULL_RTX)
3000 return NULL_RTX;
3001 y = emit_move_change_mode (imode, mode, y, force);
3002 if (y == NULL_RTX)
3003 return NULL_RTX;
3004 return emit_insn (GEN_FCN (code) (x, y));
3007 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3008 Return an equivalent MEM that does not use an auto-increment. */
3010 static rtx
3011 emit_move_resolve_push (enum machine_mode mode, rtx x)
3013 enum rtx_code code = GET_CODE (XEXP (x, 0));
3014 HOST_WIDE_INT adjust;
3015 rtx temp;
3017 adjust = GET_MODE_SIZE (mode);
3018 #ifdef PUSH_ROUNDING
3019 adjust = PUSH_ROUNDING (adjust);
3020 #endif
3021 if (code == PRE_DEC || code == POST_DEC)
3022 adjust = -adjust;
3023 else if (code == PRE_MODIFY || code == POST_MODIFY)
3025 rtx expr = XEXP (XEXP (x, 0), 1);
3026 HOST_WIDE_INT val;
3028 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3029 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3030 val = INTVAL (XEXP (expr, 1));
3031 if (GET_CODE (expr) == MINUS)
3032 val = -val;
3033 gcc_assert (adjust == val || adjust == -val);
3034 adjust = val;
3037 /* Do not use anti_adjust_stack, since we don't want to update
3038 stack_pointer_delta. */
3039 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3040 GEN_INT (adjust), stack_pointer_rtx,
3041 0, OPTAB_LIB_WIDEN);
3042 if (temp != stack_pointer_rtx)
3043 emit_move_insn (stack_pointer_rtx, temp);
3045 switch (code)
3047 case PRE_INC:
3048 case PRE_DEC:
3049 case PRE_MODIFY:
3050 temp = stack_pointer_rtx;
3051 break;
3052 case POST_INC:
3053 case POST_DEC:
3054 case POST_MODIFY:
3055 temp = plus_constant (stack_pointer_rtx, -adjust);
3056 break;
3057 default:
3058 gcc_unreachable ();
3061 return replace_equiv_address (x, temp);
3064 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3065 X is known to satisfy push_operand, and MODE is known to be complex.
3066 Returns the last instruction emitted. */
3069 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3071 enum machine_mode submode = GET_MODE_INNER (mode);
3072 bool imag_first;
3074 #ifdef PUSH_ROUNDING
3075 unsigned int submodesize = GET_MODE_SIZE (submode);
3077 /* In case we output to the stack, but the size is smaller than the
3078 machine can push exactly, we need to use move instructions. */
3079 if (PUSH_ROUNDING (submodesize) != submodesize)
3081 x = emit_move_resolve_push (mode, x);
3082 return emit_move_insn (x, y);
3084 #endif
3086 /* Note that the real part always precedes the imag part in memory
3087 regardless of machine's endianness. */
3088 switch (GET_CODE (XEXP (x, 0)))
3090 case PRE_DEC:
3091 case POST_DEC:
3092 imag_first = true;
3093 break;
3094 case PRE_INC:
3095 case POST_INC:
3096 imag_first = false;
3097 break;
3098 default:
3099 gcc_unreachable ();
3102 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3103 read_complex_part (y, imag_first));
3104 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3105 read_complex_part (y, !imag_first));
3108 /* A subroutine of emit_move_complex. Perform the move from Y to X
3109 via two moves of the parts. Returns the last instruction emitted. */
3112 emit_move_complex_parts (rtx x, rtx y)
3114 /* Show the output dies here. This is necessary for SUBREGs
3115 of pseudos since we cannot track their lifetimes correctly;
3116 hard regs shouldn't appear here except as return values. */
3117 if (!reload_completed && !reload_in_progress
3118 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3119 emit_clobber (x);
3121 write_complex_part (x, read_complex_part (y, false), false);
3122 write_complex_part (x, read_complex_part (y, true), true);
3124 return get_last_insn ();
3127 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3128 MODE is known to be complex. Returns the last instruction emitted. */
3130 static rtx
3131 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3133 bool try_int;
3135 /* Need to take special care for pushes, to maintain proper ordering
3136 of the data, and possibly extra padding. */
3137 if (push_operand (x, mode))
3138 return emit_move_complex_push (mode, x, y);
3140 /* See if we can coerce the target into moving both values at once. */
3142 /* Move floating point as parts. */
3143 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3144 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing)
3145 try_int = false;
3146 /* Not possible if the values are inherently not adjacent. */
3147 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3148 try_int = false;
3149 /* Is possible if both are registers (or subregs of registers). */
3150 else if (register_operand (x, mode) && register_operand (y, mode))
3151 try_int = true;
3152 /* If one of the operands is a memory, and alignment constraints
3153 are friendly enough, we may be able to do combined memory operations.
3154 We do not attempt this if Y is a constant because that combination is
3155 usually better with the by-parts thing below. */
3156 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3157 && (!STRICT_ALIGNMENT
3158 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3159 try_int = true;
3160 else
3161 try_int = false;
3163 if (try_int)
3165 rtx ret;
3167 /* For memory to memory moves, optimal behavior can be had with the
3168 existing block move logic. */
3169 if (MEM_P (x) && MEM_P (y))
3171 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3172 BLOCK_OP_NO_LIBCALL);
3173 return get_last_insn ();
3176 ret = emit_move_via_integer (mode, x, y, true);
3177 if (ret)
3178 return ret;
3181 return emit_move_complex_parts (x, y);
3184 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3185 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3187 static rtx
3188 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3190 rtx ret;
3192 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3193 if (mode != CCmode)
3195 enum insn_code code = optab_handler (mov_optab, CCmode);
3196 if (code != CODE_FOR_nothing)
3198 x = emit_move_change_mode (CCmode, mode, x, true);
3199 y = emit_move_change_mode (CCmode, mode, y, true);
3200 return emit_insn (GEN_FCN (code) (x, y));
3204 /* Otherwise, find the MODE_INT mode of the same width. */
3205 ret = emit_move_via_integer (mode, x, y, false);
3206 gcc_assert (ret != NULL);
3207 return ret;
3210 /* Return true if word I of OP lies entirely in the
3211 undefined bits of a paradoxical subreg. */
3213 static bool
3214 undefined_operand_subword_p (const_rtx op, int i)
3216 enum machine_mode innermode, innermostmode;
3217 int offset;
3218 if (GET_CODE (op) != SUBREG)
3219 return false;
3220 innermode = GET_MODE (op);
3221 innermostmode = GET_MODE (SUBREG_REG (op));
3222 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3223 /* The SUBREG_BYTE represents offset, as if the value were stored in
3224 memory, except for a paradoxical subreg where we define
3225 SUBREG_BYTE to be 0; undo this exception as in
3226 simplify_subreg. */
3227 if (SUBREG_BYTE (op) == 0
3228 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3230 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3231 if (WORDS_BIG_ENDIAN)
3232 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3233 if (BYTES_BIG_ENDIAN)
3234 offset += difference % UNITS_PER_WORD;
3236 if (offset >= GET_MODE_SIZE (innermostmode)
3237 || offset <= -GET_MODE_SIZE (word_mode))
3238 return true;
3239 return false;
3242 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3243 MODE is any multi-word or full-word mode that lacks a move_insn
3244 pattern. Note that you will get better code if you define such
3245 patterns, even if they must turn into multiple assembler instructions. */
3247 static rtx
3248 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3250 rtx last_insn = 0;
3251 rtx seq, inner;
3252 bool need_clobber;
3253 int i;
3255 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3257 /* If X is a push on the stack, do the push now and replace
3258 X with a reference to the stack pointer. */
3259 if (push_operand (x, mode))
3260 x = emit_move_resolve_push (mode, x);
3262 /* If we are in reload, see if either operand is a MEM whose address
3263 is scheduled for replacement. */
3264 if (reload_in_progress && MEM_P (x)
3265 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3266 x = replace_equiv_address_nv (x, inner);
3267 if (reload_in_progress && MEM_P (y)
3268 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3269 y = replace_equiv_address_nv (y, inner);
3271 start_sequence ();
3273 need_clobber = false;
3274 for (i = 0;
3275 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3276 i++)
3278 rtx xpart = operand_subword (x, i, 1, mode);
3279 rtx ypart;
3281 /* Do not generate code for a move if it would come entirely
3282 from the undefined bits of a paradoxical subreg. */
3283 if (undefined_operand_subword_p (y, i))
3284 continue;
3286 ypart = operand_subword (y, i, 1, mode);
3288 /* If we can't get a part of Y, put Y into memory if it is a
3289 constant. Otherwise, force it into a register. Then we must
3290 be able to get a part of Y. */
3291 if (ypart == 0 && CONSTANT_P (y))
3293 y = use_anchored_address (force_const_mem (mode, y));
3294 ypart = operand_subword (y, i, 1, mode);
3296 else if (ypart == 0)
3297 ypart = operand_subword_force (y, i, mode);
3299 gcc_assert (xpart && ypart);
3301 need_clobber |= (GET_CODE (xpart) == SUBREG);
3303 last_insn = emit_move_insn (xpart, ypart);
3306 seq = get_insns ();
3307 end_sequence ();
3309 /* Show the output dies here. This is necessary for SUBREGs
3310 of pseudos since we cannot track their lifetimes correctly;
3311 hard regs shouldn't appear here except as return values.
3312 We never want to emit such a clobber after reload. */
3313 if (x != y
3314 && ! (reload_in_progress || reload_completed)
3315 && need_clobber != 0)
3316 emit_clobber (x);
3318 emit_insn (seq);
3320 return last_insn;
3323 /* Low level part of emit_move_insn.
3324 Called just like emit_move_insn, but assumes X and Y
3325 are basically valid. */
3328 emit_move_insn_1 (rtx x, rtx y)
3330 enum machine_mode mode = GET_MODE (x);
3331 enum insn_code code;
3333 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3335 code = optab_handler (mov_optab, mode);
3336 if (code != CODE_FOR_nothing)
3337 return emit_insn (GEN_FCN (code) (x, y));
3339 /* Expand complex moves by moving real part and imag part. */
3340 if (COMPLEX_MODE_P (mode))
3341 return emit_move_complex (mode, x, y);
3343 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3344 || ALL_FIXED_POINT_MODE_P (mode))
3346 rtx result = emit_move_via_integer (mode, x, y, true);
3348 /* If we can't find an integer mode, use multi words. */
3349 if (result)
3350 return result;
3351 else
3352 return emit_move_multi_word (mode, x, y);
3355 if (GET_MODE_CLASS (mode) == MODE_CC)
3356 return emit_move_ccmode (mode, x, y);
3358 /* Try using a move pattern for the corresponding integer mode. This is
3359 only safe when simplify_subreg can convert MODE constants into integer
3360 constants. At present, it can only do this reliably if the value
3361 fits within a HOST_WIDE_INT. */
3362 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3364 rtx ret = emit_move_via_integer (mode, x, y, false);
3365 if (ret)
3366 return ret;
3369 return emit_move_multi_word (mode, x, y);
3372 /* Generate code to copy Y into X.
3373 Both Y and X must have the same mode, except that
3374 Y can be a constant with VOIDmode.
3375 This mode cannot be BLKmode; use emit_block_move for that.
3377 Return the last instruction emitted. */
3380 emit_move_insn (rtx x, rtx y)
3382 enum machine_mode mode = GET_MODE (x);
3383 rtx y_cst = NULL_RTX;
3384 rtx last_insn, set;
3386 gcc_assert (mode != BLKmode
3387 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3389 if (CONSTANT_P (y))
3391 if (optimize
3392 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3393 && (last_insn = compress_float_constant (x, y)))
3394 return last_insn;
3396 y_cst = y;
3398 if (!LEGITIMATE_CONSTANT_P (y))
3400 y = force_const_mem (mode, y);
3402 /* If the target's cannot_force_const_mem prevented the spill,
3403 assume that the target's move expanders will also take care
3404 of the non-legitimate constant. */
3405 if (!y)
3406 y = y_cst;
3407 else
3408 y = use_anchored_address (y);
3412 /* If X or Y are memory references, verify that their addresses are valid
3413 for the machine. */
3414 if (MEM_P (x)
3415 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3416 MEM_ADDR_SPACE (x))
3417 && ! push_operand (x, GET_MODE (x))))
3418 x = validize_mem (x);
3420 if (MEM_P (y)
3421 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3422 MEM_ADDR_SPACE (y)))
3423 y = validize_mem (y);
3425 gcc_assert (mode != BLKmode);
3427 last_insn = emit_move_insn_1 (x, y);
3429 if (y_cst && REG_P (x)
3430 && (set = single_set (last_insn)) != NULL_RTX
3431 && SET_DEST (set) == x
3432 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3433 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3435 return last_insn;
3438 /* If Y is representable exactly in a narrower mode, and the target can
3439 perform the extension directly from constant or memory, then emit the
3440 move as an extension. */
3442 static rtx
3443 compress_float_constant (rtx x, rtx y)
3445 enum machine_mode dstmode = GET_MODE (x);
3446 enum machine_mode orig_srcmode = GET_MODE (y);
3447 enum machine_mode srcmode;
3448 REAL_VALUE_TYPE r;
3449 int oldcost, newcost;
3450 bool speed = optimize_insn_for_speed_p ();
3452 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3454 if (LEGITIMATE_CONSTANT_P (y))
3455 oldcost = rtx_cost (y, SET, speed);
3456 else
3457 oldcost = rtx_cost (force_const_mem (dstmode, y), SET, speed);
3459 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3460 srcmode != orig_srcmode;
3461 srcmode = GET_MODE_WIDER_MODE (srcmode))
3463 enum insn_code ic;
3464 rtx trunc_y, last_insn;
3466 /* Skip if the target can't extend this way. */
3467 ic = can_extend_p (dstmode, srcmode, 0);
3468 if (ic == CODE_FOR_nothing)
3469 continue;
3471 /* Skip if the narrowed value isn't exact. */
3472 if (! exact_real_truncate (srcmode, &r))
3473 continue;
3475 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3477 if (LEGITIMATE_CONSTANT_P (trunc_y))
3479 /* Skip if the target needs extra instructions to perform
3480 the extension. */
3481 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3482 continue;
3483 /* This is valid, but may not be cheaper than the original. */
3484 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3485 if (oldcost < newcost)
3486 continue;
3488 else if (float_extend_from_mem[dstmode][srcmode])
3490 trunc_y = force_const_mem (srcmode, trunc_y);
3491 /* This is valid, but may not be cheaper than the original. */
3492 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3493 if (oldcost < newcost)
3494 continue;
3495 trunc_y = validize_mem (trunc_y);
3497 else
3498 continue;
3500 /* For CSE's benefit, force the compressed constant pool entry
3501 into a new pseudo. This constant may be used in different modes,
3502 and if not, combine will put things back together for us. */
3503 trunc_y = force_reg (srcmode, trunc_y);
3504 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3505 last_insn = get_last_insn ();
3507 if (REG_P (x))
3508 set_unique_reg_note (last_insn, REG_EQUAL, y);
3510 return last_insn;
3513 return NULL_RTX;
3516 /* Pushing data onto the stack. */
3518 /* Push a block of length SIZE (perhaps variable)
3519 and return an rtx to address the beginning of the block.
3520 The value may be virtual_outgoing_args_rtx.
3522 EXTRA is the number of bytes of padding to push in addition to SIZE.
3523 BELOW nonzero means this padding comes at low addresses;
3524 otherwise, the padding comes at high addresses. */
3527 push_block (rtx size, int extra, int below)
3529 rtx temp;
3531 size = convert_modes (Pmode, ptr_mode, size, 1);
3532 if (CONSTANT_P (size))
3533 anti_adjust_stack (plus_constant (size, extra));
3534 else if (REG_P (size) && extra == 0)
3535 anti_adjust_stack (size);
3536 else
3538 temp = copy_to_mode_reg (Pmode, size);
3539 if (extra != 0)
3540 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3541 temp, 0, OPTAB_LIB_WIDEN);
3542 anti_adjust_stack (temp);
3545 #ifndef STACK_GROWS_DOWNWARD
3546 if (0)
3547 #else
3548 if (1)
3549 #endif
3551 temp = virtual_outgoing_args_rtx;
3552 if (extra != 0 && below)
3553 temp = plus_constant (temp, extra);
3555 else
3557 if (CONST_INT_P (size))
3558 temp = plus_constant (virtual_outgoing_args_rtx,
3559 -INTVAL (size) - (below ? 0 : extra));
3560 else if (extra != 0 && !below)
3561 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3562 negate_rtx (Pmode, plus_constant (size, extra)));
3563 else
3564 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3565 negate_rtx (Pmode, size));
3568 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3571 #ifdef PUSH_ROUNDING
3573 /* Emit single push insn. */
3575 static void
3576 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3578 rtx dest_addr;
3579 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3580 rtx dest;
3581 enum insn_code icode;
3582 insn_operand_predicate_fn pred;
3584 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3585 /* If there is push pattern, use it. Otherwise try old way of throwing
3586 MEM representing push operation to move expander. */
3587 icode = optab_handler (push_optab, mode);
3588 if (icode != CODE_FOR_nothing)
3590 if (((pred = insn_data[(int) icode].operand[0].predicate)
3591 && !((*pred) (x, mode))))
3592 x = force_reg (mode, x);
3593 emit_insn (GEN_FCN (icode) (x));
3594 return;
3596 if (GET_MODE_SIZE (mode) == rounded_size)
3597 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3598 /* If we are to pad downward, adjust the stack pointer first and
3599 then store X into the stack location using an offset. This is
3600 because emit_move_insn does not know how to pad; it does not have
3601 access to type. */
3602 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3604 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3605 HOST_WIDE_INT offset;
3607 emit_move_insn (stack_pointer_rtx,
3608 expand_binop (Pmode,
3609 #ifdef STACK_GROWS_DOWNWARD
3610 sub_optab,
3611 #else
3612 add_optab,
3613 #endif
3614 stack_pointer_rtx,
3615 GEN_INT (rounded_size),
3616 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3618 offset = (HOST_WIDE_INT) padding_size;
3619 #ifdef STACK_GROWS_DOWNWARD
3620 if (STACK_PUSH_CODE == POST_DEC)
3621 /* We have already decremented the stack pointer, so get the
3622 previous value. */
3623 offset += (HOST_WIDE_INT) rounded_size;
3624 #else
3625 if (STACK_PUSH_CODE == POST_INC)
3626 /* We have already incremented the stack pointer, so get the
3627 previous value. */
3628 offset -= (HOST_WIDE_INT) rounded_size;
3629 #endif
3630 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3632 else
3634 #ifdef STACK_GROWS_DOWNWARD
3635 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3636 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3637 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3638 #else
3639 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3640 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3641 GEN_INT (rounded_size));
3642 #endif
3643 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3646 dest = gen_rtx_MEM (mode, dest_addr);
3648 if (type != 0)
3650 set_mem_attributes (dest, type, 1);
3652 if (flag_optimize_sibling_calls)
3653 /* Function incoming arguments may overlap with sibling call
3654 outgoing arguments and we cannot allow reordering of reads
3655 from function arguments with stores to outgoing arguments
3656 of sibling calls. */
3657 set_mem_alias_set (dest, 0);
3659 emit_move_insn (dest, x);
3661 #endif
3663 /* Generate code to push X onto the stack, assuming it has mode MODE and
3664 type TYPE.
3665 MODE is redundant except when X is a CONST_INT (since they don't
3666 carry mode info).
3667 SIZE is an rtx for the size of data to be copied (in bytes),
3668 needed only if X is BLKmode.
3670 ALIGN (in bits) is maximum alignment we can assume.
3672 If PARTIAL and REG are both nonzero, then copy that many of the first
3673 bytes of X into registers starting with REG, and push the rest of X.
3674 The amount of space pushed is decreased by PARTIAL bytes.
3675 REG must be a hard register in this case.
3676 If REG is zero but PARTIAL is not, take any all others actions for an
3677 argument partially in registers, but do not actually load any
3678 registers.
3680 EXTRA is the amount in bytes of extra space to leave next to this arg.
3681 This is ignored if an argument block has already been allocated.
3683 On a machine that lacks real push insns, ARGS_ADDR is the address of
3684 the bottom of the argument block for this call. We use indexing off there
3685 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3686 argument block has not been preallocated.
3688 ARGS_SO_FAR is the size of args previously pushed for this call.
3690 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3691 for arguments passed in registers. If nonzero, it will be the number
3692 of bytes required. */
3694 void
3695 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3696 unsigned int align, int partial, rtx reg, int extra,
3697 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3698 rtx alignment_pad)
3700 rtx xinner;
3701 enum direction stack_direction
3702 #ifdef STACK_GROWS_DOWNWARD
3703 = downward;
3704 #else
3705 = upward;
3706 #endif
3708 /* Decide where to pad the argument: `downward' for below,
3709 `upward' for above, or `none' for don't pad it.
3710 Default is below for small data on big-endian machines; else above. */
3711 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3713 /* Invert direction if stack is post-decrement.
3714 FIXME: why? */
3715 if (STACK_PUSH_CODE == POST_DEC)
3716 if (where_pad != none)
3717 where_pad = (where_pad == downward ? upward : downward);
3719 xinner = x;
3721 if (mode == BLKmode
3722 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3724 /* Copy a block into the stack, entirely or partially. */
3726 rtx temp;
3727 int used;
3728 int offset;
3729 int skip;
3731 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3732 used = partial - offset;
3734 if (mode != BLKmode)
3736 /* A value is to be stored in an insufficiently aligned
3737 stack slot; copy via a suitably aligned slot if
3738 necessary. */
3739 size = GEN_INT (GET_MODE_SIZE (mode));
3740 if (!MEM_P (xinner))
3742 temp = assign_temp (type, 0, 1, 1);
3743 emit_move_insn (temp, xinner);
3744 xinner = temp;
3748 gcc_assert (size);
3750 /* USED is now the # of bytes we need not copy to the stack
3751 because registers will take care of them. */
3753 if (partial != 0)
3754 xinner = adjust_address (xinner, BLKmode, used);
3756 /* If the partial register-part of the arg counts in its stack size,
3757 skip the part of stack space corresponding to the registers.
3758 Otherwise, start copying to the beginning of the stack space,
3759 by setting SKIP to 0. */
3760 skip = (reg_parm_stack_space == 0) ? 0 : used;
3762 #ifdef PUSH_ROUNDING
3763 /* Do it with several push insns if that doesn't take lots of insns
3764 and if there is no difficulty with push insns that skip bytes
3765 on the stack for alignment purposes. */
3766 if (args_addr == 0
3767 && PUSH_ARGS
3768 && CONST_INT_P (size)
3769 && skip == 0
3770 && MEM_ALIGN (xinner) >= align
3771 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3772 /* Here we avoid the case of a structure whose weak alignment
3773 forces many pushes of a small amount of data,
3774 and such small pushes do rounding that causes trouble. */
3775 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3776 || align >= BIGGEST_ALIGNMENT
3777 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3778 == (align / BITS_PER_UNIT)))
3779 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3781 /* Push padding now if padding above and stack grows down,
3782 or if padding below and stack grows up.
3783 But if space already allocated, this has already been done. */
3784 if (extra && args_addr == 0
3785 && where_pad != none && where_pad != stack_direction)
3786 anti_adjust_stack (GEN_INT (extra));
3788 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3790 else
3791 #endif /* PUSH_ROUNDING */
3793 rtx target;
3795 /* Otherwise make space on the stack and copy the data
3796 to the address of that space. */
3798 /* Deduct words put into registers from the size we must copy. */
3799 if (partial != 0)
3801 if (CONST_INT_P (size))
3802 size = GEN_INT (INTVAL (size) - used);
3803 else
3804 size = expand_binop (GET_MODE (size), sub_optab, size,
3805 GEN_INT (used), NULL_RTX, 0,
3806 OPTAB_LIB_WIDEN);
3809 /* Get the address of the stack space.
3810 In this case, we do not deal with EXTRA separately.
3811 A single stack adjust will do. */
3812 if (! args_addr)
3814 temp = push_block (size, extra, where_pad == downward);
3815 extra = 0;
3817 else if (CONST_INT_P (args_so_far))
3818 temp = memory_address (BLKmode,
3819 plus_constant (args_addr,
3820 skip + INTVAL (args_so_far)));
3821 else
3822 temp = memory_address (BLKmode,
3823 plus_constant (gen_rtx_PLUS (Pmode,
3824 args_addr,
3825 args_so_far),
3826 skip));
3828 if (!ACCUMULATE_OUTGOING_ARGS)
3830 /* If the source is referenced relative to the stack pointer,
3831 copy it to another register to stabilize it. We do not need
3832 to do this if we know that we won't be changing sp. */
3834 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3835 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3836 temp = copy_to_reg (temp);
3839 target = gen_rtx_MEM (BLKmode, temp);
3841 /* We do *not* set_mem_attributes here, because incoming arguments
3842 may overlap with sibling call outgoing arguments and we cannot
3843 allow reordering of reads from function arguments with stores
3844 to outgoing arguments of sibling calls. We do, however, want
3845 to record the alignment of the stack slot. */
3846 /* ALIGN may well be better aligned than TYPE, e.g. due to
3847 PARM_BOUNDARY. Assume the caller isn't lying. */
3848 set_mem_align (target, align);
3850 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3853 else if (partial > 0)
3855 /* Scalar partly in registers. */
3857 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3858 int i;
3859 int not_stack;
3860 /* # bytes of start of argument
3861 that we must make space for but need not store. */
3862 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3863 int args_offset = INTVAL (args_so_far);
3864 int skip;
3866 /* Push padding now if padding above and stack grows down,
3867 or if padding below and stack grows up.
3868 But if space already allocated, this has already been done. */
3869 if (extra && args_addr == 0
3870 && where_pad != none && where_pad != stack_direction)
3871 anti_adjust_stack (GEN_INT (extra));
3873 /* If we make space by pushing it, we might as well push
3874 the real data. Otherwise, we can leave OFFSET nonzero
3875 and leave the space uninitialized. */
3876 if (args_addr == 0)
3877 offset = 0;
3879 /* Now NOT_STACK gets the number of words that we don't need to
3880 allocate on the stack. Convert OFFSET to words too. */
3881 not_stack = (partial - offset) / UNITS_PER_WORD;
3882 offset /= UNITS_PER_WORD;
3884 /* If the partial register-part of the arg counts in its stack size,
3885 skip the part of stack space corresponding to the registers.
3886 Otherwise, start copying to the beginning of the stack space,
3887 by setting SKIP to 0. */
3888 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3890 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3891 x = validize_mem (force_const_mem (mode, x));
3893 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3894 SUBREGs of such registers are not allowed. */
3895 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3896 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3897 x = copy_to_reg (x);
3899 /* Loop over all the words allocated on the stack for this arg. */
3900 /* We can do it by words, because any scalar bigger than a word
3901 has a size a multiple of a word. */
3902 #ifndef PUSH_ARGS_REVERSED
3903 for (i = not_stack; i < size; i++)
3904 #else
3905 for (i = size - 1; i >= not_stack; i--)
3906 #endif
3907 if (i >= not_stack + offset)
3908 emit_push_insn (operand_subword_force (x, i, mode),
3909 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3910 0, args_addr,
3911 GEN_INT (args_offset + ((i - not_stack + skip)
3912 * UNITS_PER_WORD)),
3913 reg_parm_stack_space, alignment_pad);
3915 else
3917 rtx addr;
3918 rtx dest;
3920 /* Push padding now if padding above and stack grows down,
3921 or if padding below and stack grows up.
3922 But if space already allocated, this has already been done. */
3923 if (extra && args_addr == 0
3924 && where_pad != none && where_pad != stack_direction)
3925 anti_adjust_stack (GEN_INT (extra));
3927 #ifdef PUSH_ROUNDING
3928 if (args_addr == 0 && PUSH_ARGS)
3929 emit_single_push_insn (mode, x, type);
3930 else
3931 #endif
3933 if (CONST_INT_P (args_so_far))
3934 addr
3935 = memory_address (mode,
3936 plus_constant (args_addr,
3937 INTVAL (args_so_far)));
3938 else
3939 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3940 args_so_far));
3941 dest = gen_rtx_MEM (mode, addr);
3943 /* We do *not* set_mem_attributes here, because incoming arguments
3944 may overlap with sibling call outgoing arguments and we cannot
3945 allow reordering of reads from function arguments with stores
3946 to outgoing arguments of sibling calls. We do, however, want
3947 to record the alignment of the stack slot. */
3948 /* ALIGN may well be better aligned than TYPE, e.g. due to
3949 PARM_BOUNDARY. Assume the caller isn't lying. */
3950 set_mem_align (dest, align);
3952 emit_move_insn (dest, x);
3956 /* If part should go in registers, copy that part
3957 into the appropriate registers. Do this now, at the end,
3958 since mem-to-mem copies above may do function calls. */
3959 if (partial > 0 && reg != 0)
3961 /* Handle calls that pass values in multiple non-contiguous locations.
3962 The Irix 6 ABI has examples of this. */
3963 if (GET_CODE (reg) == PARALLEL)
3964 emit_group_load (reg, x, type, -1);
3965 else
3967 gcc_assert (partial % UNITS_PER_WORD == 0);
3968 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3972 if (extra && args_addr == 0 && where_pad == stack_direction)
3973 anti_adjust_stack (GEN_INT (extra));
3975 if (alignment_pad && args_addr == 0)
3976 anti_adjust_stack (alignment_pad);
3979 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3980 operations. */
3982 static rtx
3983 get_subtarget (rtx x)
3985 return (optimize
3986 || x == 0
3987 /* Only registers can be subtargets. */
3988 || !REG_P (x)
3989 /* Don't use hard regs to avoid extending their life. */
3990 || REGNO (x) < FIRST_PSEUDO_REGISTER
3991 ? 0 : x);
3994 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3995 FIELD is a bitfield. Returns true if the optimization was successful,
3996 and there's nothing else to do. */
3998 static bool
3999 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4000 unsigned HOST_WIDE_INT bitpos,
4001 enum machine_mode mode1, rtx str_rtx,
4002 tree to, tree src)
4004 enum machine_mode str_mode = GET_MODE (str_rtx);
4005 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4006 tree op0, op1;
4007 rtx value, result;
4008 optab binop;
4010 if (mode1 != VOIDmode
4011 || bitsize >= BITS_PER_WORD
4012 || str_bitsize > BITS_PER_WORD
4013 || TREE_SIDE_EFFECTS (to)
4014 || TREE_THIS_VOLATILE (to))
4015 return false;
4017 STRIP_NOPS (src);
4018 if (!BINARY_CLASS_P (src)
4019 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4020 return false;
4022 op0 = TREE_OPERAND (src, 0);
4023 op1 = TREE_OPERAND (src, 1);
4024 STRIP_NOPS (op0);
4026 if (!operand_equal_p (to, op0, 0))
4027 return false;
4029 if (MEM_P (str_rtx))
4031 unsigned HOST_WIDE_INT offset1;
4033 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4034 str_mode = word_mode;
4035 str_mode = get_best_mode (bitsize, bitpos,
4036 MEM_ALIGN (str_rtx), str_mode, 0);
4037 if (str_mode == VOIDmode)
4038 return false;
4039 str_bitsize = GET_MODE_BITSIZE (str_mode);
4041 offset1 = bitpos;
4042 bitpos %= str_bitsize;
4043 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4044 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4046 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4047 return false;
4049 /* If the bit field covers the whole REG/MEM, store_field
4050 will likely generate better code. */
4051 if (bitsize >= str_bitsize)
4052 return false;
4054 /* We can't handle fields split across multiple entities. */
4055 if (bitpos + bitsize > str_bitsize)
4056 return false;
4058 if (BYTES_BIG_ENDIAN)
4059 bitpos = str_bitsize - bitpos - bitsize;
4061 switch (TREE_CODE (src))
4063 case PLUS_EXPR:
4064 case MINUS_EXPR:
4065 /* For now, just optimize the case of the topmost bitfield
4066 where we don't need to do any masking and also
4067 1 bit bitfields where xor can be used.
4068 We might win by one instruction for the other bitfields
4069 too if insv/extv instructions aren't used, so that
4070 can be added later. */
4071 if (bitpos + bitsize != str_bitsize
4072 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4073 break;
4075 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4076 value = convert_modes (str_mode,
4077 TYPE_MODE (TREE_TYPE (op1)), value,
4078 TYPE_UNSIGNED (TREE_TYPE (op1)));
4080 /* We may be accessing data outside the field, which means
4081 we can alias adjacent data. */
4082 if (MEM_P (str_rtx))
4084 str_rtx = shallow_copy_rtx (str_rtx);
4085 set_mem_alias_set (str_rtx, 0);
4086 set_mem_expr (str_rtx, 0);
4089 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
4090 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4092 value = expand_and (str_mode, value, const1_rtx, NULL);
4093 binop = xor_optab;
4095 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4096 build_int_cst (NULL_TREE, bitpos),
4097 NULL_RTX, 1);
4098 result = expand_binop (str_mode, binop, str_rtx,
4099 value, str_rtx, 1, OPTAB_WIDEN);
4100 if (result != str_rtx)
4101 emit_move_insn (str_rtx, result);
4102 return true;
4104 case BIT_IOR_EXPR:
4105 case BIT_XOR_EXPR:
4106 if (TREE_CODE (op1) != INTEGER_CST)
4107 break;
4108 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4109 value = convert_modes (GET_MODE (str_rtx),
4110 TYPE_MODE (TREE_TYPE (op1)), value,
4111 TYPE_UNSIGNED (TREE_TYPE (op1)));
4113 /* We may be accessing data outside the field, which means
4114 we can alias adjacent data. */
4115 if (MEM_P (str_rtx))
4117 str_rtx = shallow_copy_rtx (str_rtx);
4118 set_mem_alias_set (str_rtx, 0);
4119 set_mem_expr (str_rtx, 0);
4122 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4123 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4125 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4126 - 1);
4127 value = expand_and (GET_MODE (str_rtx), value, mask,
4128 NULL_RTX);
4130 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4131 build_int_cst (NULL_TREE, bitpos),
4132 NULL_RTX, 1);
4133 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4134 value, str_rtx, 1, OPTAB_WIDEN);
4135 if (result != str_rtx)
4136 emit_move_insn (str_rtx, result);
4137 return true;
4139 default:
4140 break;
4143 return false;
4147 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4148 is true, try generating a nontemporal store. */
4150 void
4151 expand_assignment (tree to, tree from, bool nontemporal)
4153 rtx to_rtx = 0;
4154 rtx result;
4156 /* Don't crash if the lhs of the assignment was erroneous. */
4157 if (TREE_CODE (to) == ERROR_MARK)
4159 result = expand_normal (from);
4160 return;
4163 /* Optimize away no-op moves without side-effects. */
4164 if (operand_equal_p (to, from, 0))
4165 return;
4167 /* Assignment of a structure component needs special treatment
4168 if the structure component's rtx is not simply a MEM.
4169 Assignment of an array element at a constant index, and assignment of
4170 an array element in an unaligned packed structure field, has the same
4171 problem. */
4172 if (handled_component_p (to)
4173 /* ??? We only need to handle MEM_REF here if the access is not
4174 a full access of the base object. */
4175 || (TREE_CODE (to) == MEM_REF
4176 && TREE_CODE (TREE_OPERAND (to, 0)) == ADDR_EXPR)
4177 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4179 enum machine_mode mode1;
4180 HOST_WIDE_INT bitsize, bitpos;
4181 tree offset;
4182 int unsignedp;
4183 int volatilep = 0;
4184 tree tem;
4186 push_temp_slots ();
4187 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4188 &unsignedp, &volatilep, true);
4190 /* If we are going to use store_bit_field and extract_bit_field,
4191 make sure to_rtx will be safe for multiple use. */
4193 to_rtx = expand_normal (tem);
4195 /* If the bitfield is volatile, we want to access it in the
4196 field's mode, not the computed mode. */
4197 if (volatilep
4198 && GET_CODE (to_rtx) == MEM
4199 && flag_strict_volatile_bitfields > 0)
4200 to_rtx = adjust_address (to_rtx, mode1, 0);
4202 if (offset != 0)
4204 enum machine_mode address_mode;
4205 rtx offset_rtx;
4207 if (!MEM_P (to_rtx))
4209 /* We can get constant negative offsets into arrays with broken
4210 user code. Translate this to a trap instead of ICEing. */
4211 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4212 expand_builtin_trap ();
4213 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4216 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4217 address_mode
4218 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
4219 if (GET_MODE (offset_rtx) != address_mode)
4220 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4222 /* A constant address in TO_RTX can have VOIDmode, we must not try
4223 to call force_reg for that case. Avoid that case. */
4224 if (MEM_P (to_rtx)
4225 && GET_MODE (to_rtx) == BLKmode
4226 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4227 && bitsize > 0
4228 && (bitpos % bitsize) == 0
4229 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4230 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4232 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4233 bitpos = 0;
4236 to_rtx = offset_address (to_rtx, offset_rtx,
4237 highest_pow2_factor_for_target (to,
4238 offset));
4241 /* No action is needed if the target is not a memory and the field
4242 lies completely outside that target. This can occur if the source
4243 code contains an out-of-bounds access to a small array. */
4244 if (!MEM_P (to_rtx)
4245 && GET_MODE (to_rtx) != BLKmode
4246 && (unsigned HOST_WIDE_INT) bitpos
4247 >= GET_MODE_BITSIZE (GET_MODE (to_rtx)))
4249 expand_normal (from);
4250 result = NULL;
4252 /* Handle expand_expr of a complex value returning a CONCAT. */
4253 else if (GET_CODE (to_rtx) == CONCAT)
4255 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from))))
4257 gcc_assert (bitpos == 0);
4258 result = store_expr (from, to_rtx, false, nontemporal);
4260 else
4262 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4263 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4264 nontemporal);
4267 else
4269 if (MEM_P (to_rtx))
4271 /* If the field is at offset zero, we could have been given the
4272 DECL_RTX of the parent struct. Don't munge it. */
4273 to_rtx = shallow_copy_rtx (to_rtx);
4275 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4277 /* Deal with volatile and readonly fields. The former is only
4278 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4279 if (volatilep)
4280 MEM_VOLATILE_P (to_rtx) = 1;
4281 if (component_uses_parent_alias_set (to))
4282 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4285 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4286 to_rtx, to, from))
4287 result = NULL;
4288 else
4289 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4290 TREE_TYPE (tem), get_alias_set (to),
4291 nontemporal);
4294 if (result)
4295 preserve_temp_slots (result);
4296 free_temp_slots ();
4297 pop_temp_slots ();
4298 return;
4301 else if (TREE_CODE (to) == MISALIGNED_INDIRECT_REF)
4303 addr_space_t as = ADDR_SPACE_GENERIC;
4304 enum machine_mode mode, op_mode1;
4305 enum insn_code icode;
4306 rtx reg, addr, mem, insn;
4308 if (POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (to, 0))))
4309 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (to, 0))));
4311 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4312 reg = force_not_mem (reg);
4314 mode = TYPE_MODE (TREE_TYPE (to));
4315 addr = expand_expr (TREE_OPERAND (to, 0), NULL_RTX, VOIDmode,
4316 EXPAND_SUM);
4317 addr = memory_address_addr_space (mode, addr, as);
4318 mem = gen_rtx_MEM (mode, addr);
4320 set_mem_attributes (mem, to, 0);
4321 set_mem_addr_space (mem, as);
4323 icode = optab_handler (movmisalign_optab, mode);
4324 gcc_assert (icode != CODE_FOR_nothing);
4326 op_mode1 = insn_data[icode].operand[1].mode;
4327 if (! (*insn_data[icode].operand[1].predicate) (reg, op_mode1)
4328 && op_mode1 != VOIDmode)
4329 reg = copy_to_mode_reg (op_mode1, reg);
4331 insn = GEN_FCN (icode) (mem, reg);
4332 emit_insn (insn);
4333 return;
4336 /* If the rhs is a function call and its value is not an aggregate,
4337 call the function before we start to compute the lhs.
4338 This is needed for correct code for cases such as
4339 val = setjmp (buf) on machines where reference to val
4340 requires loading up part of an address in a separate insn.
4342 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4343 since it might be a promoted variable where the zero- or sign- extension
4344 needs to be done. Handling this in the normal way is safe because no
4345 computation is done before the call. The same is true for SSA names. */
4346 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4347 && COMPLETE_TYPE_P (TREE_TYPE (from))
4348 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4349 && ! (((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4350 && REG_P (DECL_RTL (to)))
4351 || TREE_CODE (to) == SSA_NAME))
4353 rtx value;
4355 push_temp_slots ();
4356 value = expand_normal (from);
4357 if (to_rtx == 0)
4358 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4360 /* Handle calls that return values in multiple non-contiguous locations.
4361 The Irix 6 ABI has examples of this. */
4362 if (GET_CODE (to_rtx) == PARALLEL)
4363 emit_group_load (to_rtx, value, TREE_TYPE (from),
4364 int_size_in_bytes (TREE_TYPE (from)));
4365 else if (GET_MODE (to_rtx) == BLKmode)
4366 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4367 else
4369 if (POINTER_TYPE_P (TREE_TYPE (to)))
4370 value = convert_memory_address_addr_space
4371 (GET_MODE (to_rtx), value,
4372 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
4374 emit_move_insn (to_rtx, value);
4376 preserve_temp_slots (to_rtx);
4377 free_temp_slots ();
4378 pop_temp_slots ();
4379 return;
4382 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4383 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4385 if (to_rtx == 0)
4386 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4388 /* Don't move directly into a return register. */
4389 if (TREE_CODE (to) == RESULT_DECL
4390 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4392 rtx temp;
4394 push_temp_slots ();
4395 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4397 if (GET_CODE (to_rtx) == PARALLEL)
4398 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4399 int_size_in_bytes (TREE_TYPE (from)));
4400 else
4401 emit_move_insn (to_rtx, temp);
4403 preserve_temp_slots (to_rtx);
4404 free_temp_slots ();
4405 pop_temp_slots ();
4406 return;
4409 /* In case we are returning the contents of an object which overlaps
4410 the place the value is being stored, use a safe function when copying
4411 a value through a pointer into a structure value return block. */
4412 if (TREE_CODE (to) == RESULT_DECL
4413 && TREE_CODE (from) == INDIRECT_REF
4414 && ADDR_SPACE_GENERIC_P
4415 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
4416 && refs_may_alias_p (to, from)
4417 && cfun->returns_struct
4418 && !cfun->returns_pcc_struct)
4420 rtx from_rtx, size;
4422 push_temp_slots ();
4423 size = expr_size (from);
4424 from_rtx = expand_normal (from);
4426 emit_library_call (memmove_libfunc, LCT_NORMAL,
4427 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4428 XEXP (from_rtx, 0), Pmode,
4429 convert_to_mode (TYPE_MODE (sizetype),
4430 size, TYPE_UNSIGNED (sizetype)),
4431 TYPE_MODE (sizetype));
4433 preserve_temp_slots (to_rtx);
4434 free_temp_slots ();
4435 pop_temp_slots ();
4436 return;
4439 /* Compute FROM and store the value in the rtx we got. */
4441 push_temp_slots ();
4442 result = store_expr (from, to_rtx, 0, nontemporal);
4443 preserve_temp_slots (result);
4444 free_temp_slots ();
4445 pop_temp_slots ();
4446 return;
4449 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4450 succeeded, false otherwise. */
4452 bool
4453 emit_storent_insn (rtx to, rtx from)
4455 enum machine_mode mode = GET_MODE (to), imode;
4456 enum insn_code code = optab_handler (storent_optab, mode);
4457 rtx pattern;
4459 if (code == CODE_FOR_nothing)
4460 return false;
4462 imode = insn_data[code].operand[0].mode;
4463 if (!insn_data[code].operand[0].predicate (to, imode))
4464 return false;
4466 imode = insn_data[code].operand[1].mode;
4467 if (!insn_data[code].operand[1].predicate (from, imode))
4469 from = copy_to_mode_reg (imode, from);
4470 if (!insn_data[code].operand[1].predicate (from, imode))
4471 return false;
4474 pattern = GEN_FCN (code) (to, from);
4475 if (pattern == NULL_RTX)
4476 return false;
4478 emit_insn (pattern);
4479 return true;
4482 /* Generate code for computing expression EXP,
4483 and storing the value into TARGET.
4485 If the mode is BLKmode then we may return TARGET itself.
4486 It turns out that in BLKmode it doesn't cause a problem.
4487 because C has no operators that could combine two different
4488 assignments into the same BLKmode object with different values
4489 with no sequence point. Will other languages need this to
4490 be more thorough?
4492 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4493 stack, and block moves may need to be treated specially.
4495 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4498 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4500 rtx temp;
4501 rtx alt_rtl = NULL_RTX;
4502 location_t loc = EXPR_LOCATION (exp);
4504 if (VOID_TYPE_P (TREE_TYPE (exp)))
4506 /* C++ can generate ?: expressions with a throw expression in one
4507 branch and an rvalue in the other. Here, we resolve attempts to
4508 store the throw expression's nonexistent result. */
4509 gcc_assert (!call_param_p);
4510 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4511 return NULL_RTX;
4513 if (TREE_CODE (exp) == COMPOUND_EXPR)
4515 /* Perform first part of compound expression, then assign from second
4516 part. */
4517 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4518 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4519 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4520 nontemporal);
4522 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4524 /* For conditional expression, get safe form of the target. Then
4525 test the condition, doing the appropriate assignment on either
4526 side. This avoids the creation of unnecessary temporaries.
4527 For non-BLKmode, it is more efficient not to do this. */
4529 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4531 do_pending_stack_adjust ();
4532 NO_DEFER_POP;
4533 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
4534 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4535 nontemporal);
4536 emit_jump_insn (gen_jump (lab2));
4537 emit_barrier ();
4538 emit_label (lab1);
4539 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
4540 nontemporal);
4541 emit_label (lab2);
4542 OK_DEFER_POP;
4544 return NULL_RTX;
4546 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4547 /* If this is a scalar in a register that is stored in a wider mode
4548 than the declared mode, compute the result into its declared mode
4549 and then convert to the wider mode. Our value is the computed
4550 expression. */
4552 rtx inner_target = 0;
4554 /* We can do the conversion inside EXP, which will often result
4555 in some optimizations. Do the conversion in two steps: first
4556 change the signedness, if needed, then the extend. But don't
4557 do this if the type of EXP is a subtype of something else
4558 since then the conversion might involve more than just
4559 converting modes. */
4560 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4561 && TREE_TYPE (TREE_TYPE (exp)) == 0
4562 && GET_MODE_PRECISION (GET_MODE (target))
4563 == TYPE_PRECISION (TREE_TYPE (exp)))
4565 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4566 != SUBREG_PROMOTED_UNSIGNED_P (target))
4568 /* Some types, e.g. Fortran's logical*4, won't have a signed
4569 version, so use the mode instead. */
4570 tree ntype
4571 = (signed_or_unsigned_type_for
4572 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4573 if (ntype == NULL)
4574 ntype = lang_hooks.types.type_for_mode
4575 (TYPE_MODE (TREE_TYPE (exp)),
4576 SUBREG_PROMOTED_UNSIGNED_P (target));
4578 exp = fold_convert_loc (loc, ntype, exp);
4581 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
4582 (GET_MODE (SUBREG_REG (target)),
4583 SUBREG_PROMOTED_UNSIGNED_P (target)),
4584 exp);
4586 inner_target = SUBREG_REG (target);
4589 temp = expand_expr (exp, inner_target, VOIDmode,
4590 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4592 /* If TEMP is a VOIDmode constant, use convert_modes to make
4593 sure that we properly convert it. */
4594 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4596 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4597 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4598 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4599 GET_MODE (target), temp,
4600 SUBREG_PROMOTED_UNSIGNED_P (target));
4603 convert_move (SUBREG_REG (target), temp,
4604 SUBREG_PROMOTED_UNSIGNED_P (target));
4606 return NULL_RTX;
4608 else if (TREE_CODE (exp) == STRING_CST
4609 && !nontemporal && !call_param_p
4610 && TREE_STRING_LENGTH (exp) > 0
4611 && TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
4613 /* Optimize initialization of an array with a STRING_CST. */
4614 HOST_WIDE_INT exp_len, str_copy_len;
4615 rtx dest_mem;
4617 exp_len = int_expr_size (exp);
4618 if (exp_len <= 0)
4619 goto normal_expr;
4621 str_copy_len = strlen (TREE_STRING_POINTER (exp));
4622 if (str_copy_len < TREE_STRING_LENGTH (exp) - 1)
4623 goto normal_expr;
4625 str_copy_len = TREE_STRING_LENGTH (exp);
4626 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0)
4628 str_copy_len += STORE_MAX_PIECES - 1;
4629 str_copy_len &= ~(STORE_MAX_PIECES - 1);
4631 str_copy_len = MIN (str_copy_len, exp_len);
4632 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
4633 CONST_CAST(char *, TREE_STRING_POINTER (exp)),
4634 MEM_ALIGN (target), false))
4635 goto normal_expr;
4637 dest_mem = target;
4639 dest_mem = store_by_pieces (dest_mem,
4640 str_copy_len, builtin_strncpy_read_str,
4641 CONST_CAST(char *, TREE_STRING_POINTER (exp)),
4642 MEM_ALIGN (target), false,
4643 exp_len > str_copy_len ? 1 : 0);
4644 if (exp_len > str_copy_len)
4645 clear_storage (adjust_address (dest_mem, BLKmode, 0),
4646 GEN_INT (exp_len - str_copy_len),
4647 BLOCK_OP_NORMAL);
4648 return NULL_RTX;
4650 else if (TREE_CODE (exp) == MEM_REF
4651 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4652 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == STRING_CST
4653 && integer_zerop (TREE_OPERAND (exp, 1))
4654 && !nontemporal && !call_param_p
4655 && TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
4657 /* Optimize initialization of an array with a STRING_CST. */
4658 HOST_WIDE_INT exp_len, str_copy_len;
4659 rtx dest_mem;
4660 tree str = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4662 exp_len = int_expr_size (exp);
4663 if (exp_len <= 0)
4664 goto normal_expr;
4666 str_copy_len = strlen (TREE_STRING_POINTER (str));
4667 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
4668 goto normal_expr;
4670 str_copy_len = TREE_STRING_LENGTH (str);
4671 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0)
4673 str_copy_len += STORE_MAX_PIECES - 1;
4674 str_copy_len &= ~(STORE_MAX_PIECES - 1);
4676 str_copy_len = MIN (str_copy_len, exp_len);
4677 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
4678 CONST_CAST(char *, TREE_STRING_POINTER (str)),
4679 MEM_ALIGN (target), false))
4680 goto normal_expr;
4682 dest_mem = target;
4684 dest_mem = store_by_pieces (dest_mem,
4685 str_copy_len, builtin_strncpy_read_str,
4686 CONST_CAST(char *, TREE_STRING_POINTER (str)),
4687 MEM_ALIGN (target), false,
4688 exp_len > str_copy_len ? 1 : 0);
4689 if (exp_len > str_copy_len)
4690 clear_storage (adjust_address (dest_mem, BLKmode, 0),
4691 GEN_INT (exp_len - str_copy_len),
4692 BLOCK_OP_NORMAL);
4693 return NULL_RTX;
4695 else
4697 rtx tmp_target;
4699 normal_expr:
4700 /* If we want to use a nontemporal store, force the value to
4701 register first. */
4702 tmp_target = nontemporal ? NULL_RTX : target;
4703 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
4704 (call_param_p
4705 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4706 &alt_rtl);
4709 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4710 the same as that of TARGET, adjust the constant. This is needed, for
4711 example, in case it is a CONST_DOUBLE and we want only a word-sized
4712 value. */
4713 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4714 && TREE_CODE (exp) != ERROR_MARK
4715 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4716 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4717 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4719 /* If value was not generated in the target, store it there.
4720 Convert the value to TARGET's type first if necessary and emit the
4721 pending incrementations that have been queued when expanding EXP.
4722 Note that we cannot emit the whole queue blindly because this will
4723 effectively disable the POST_INC optimization later.
4725 If TEMP and TARGET compare equal according to rtx_equal_p, but
4726 one or both of them are volatile memory refs, we have to distinguish
4727 two cases:
4728 - expand_expr has used TARGET. In this case, we must not generate
4729 another copy. This can be detected by TARGET being equal according
4730 to == .
4731 - expand_expr has not used TARGET - that means that the source just
4732 happens to have the same RTX form. Since temp will have been created
4733 by expand_expr, it will compare unequal according to == .
4734 We must generate a copy in this case, to reach the correct number
4735 of volatile memory references. */
4737 if ((! rtx_equal_p (temp, target)
4738 || (temp != target && (side_effects_p (temp)
4739 || side_effects_p (target))))
4740 && TREE_CODE (exp) != ERROR_MARK
4741 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4742 but TARGET is not valid memory reference, TEMP will differ
4743 from TARGET although it is really the same location. */
4744 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4745 /* If there's nothing to copy, don't bother. Don't call
4746 expr_size unless necessary, because some front-ends (C++)
4747 expr_size-hook must not be given objects that are not
4748 supposed to be bit-copied or bit-initialized. */
4749 && expr_size (exp) != const0_rtx)
4751 if (GET_MODE (temp) != GET_MODE (target)
4752 && GET_MODE (temp) != VOIDmode)
4754 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4755 if (GET_MODE (target) == BLKmode
4756 || GET_MODE (temp) == BLKmode)
4757 emit_block_move (target, temp, expr_size (exp),
4758 (call_param_p
4759 ? BLOCK_OP_CALL_PARM
4760 : BLOCK_OP_NORMAL));
4761 else
4762 convert_move (target, temp, unsignedp);
4765 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4767 /* Handle copying a string constant into an array. The string
4768 constant may be shorter than the array. So copy just the string's
4769 actual length, and clear the rest. First get the size of the data
4770 type of the string, which is actually the size of the target. */
4771 rtx size = expr_size (exp);
4773 if (CONST_INT_P (size)
4774 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4775 emit_block_move (target, temp, size,
4776 (call_param_p
4777 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4778 else
4780 enum machine_mode pointer_mode
4781 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
4782 enum machine_mode address_mode
4783 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (target));
4785 /* Compute the size of the data to copy from the string. */
4786 tree copy_size
4787 = size_binop_loc (loc, MIN_EXPR,
4788 make_tree (sizetype, size),
4789 size_int (TREE_STRING_LENGTH (exp)));
4790 rtx copy_size_rtx
4791 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4792 (call_param_p
4793 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4794 rtx label = 0;
4796 /* Copy that much. */
4797 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
4798 TYPE_UNSIGNED (sizetype));
4799 emit_block_move (target, temp, copy_size_rtx,
4800 (call_param_p
4801 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4803 /* Figure out how much is left in TARGET that we have to clear.
4804 Do all calculations in pointer_mode. */
4805 if (CONST_INT_P (copy_size_rtx))
4807 size = plus_constant (size, -INTVAL (copy_size_rtx));
4808 target = adjust_address (target, BLKmode,
4809 INTVAL (copy_size_rtx));
4811 else
4813 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4814 copy_size_rtx, NULL_RTX, 0,
4815 OPTAB_LIB_WIDEN);
4817 if (GET_MODE (copy_size_rtx) != address_mode)
4818 copy_size_rtx = convert_to_mode (address_mode,
4819 copy_size_rtx,
4820 TYPE_UNSIGNED (sizetype));
4822 target = offset_address (target, copy_size_rtx,
4823 highest_pow2_factor (copy_size));
4824 label = gen_label_rtx ();
4825 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4826 GET_MODE (size), 0, label);
4829 if (size != const0_rtx)
4830 clear_storage (target, size, BLOCK_OP_NORMAL);
4832 if (label)
4833 emit_label (label);
4836 /* Handle calls that return values in multiple non-contiguous locations.
4837 The Irix 6 ABI has examples of this. */
4838 else if (GET_CODE (target) == PARALLEL)
4839 emit_group_load (target, temp, TREE_TYPE (exp),
4840 int_size_in_bytes (TREE_TYPE (exp)));
4841 else if (GET_MODE (temp) == BLKmode)
4842 emit_block_move (target, temp, expr_size (exp),
4843 (call_param_p
4844 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4845 else if (nontemporal
4846 && emit_storent_insn (target, temp))
4847 /* If we managed to emit a nontemporal store, there is nothing else to
4848 do. */
4850 else
4852 temp = force_operand (temp, target);
4853 if (temp != target)
4854 emit_move_insn (target, temp);
4858 return NULL_RTX;
4861 /* Helper for categorize_ctor_elements. Identical interface. */
4863 static bool
4864 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4865 HOST_WIDE_INT *p_elt_count,
4866 bool *p_must_clear)
4868 unsigned HOST_WIDE_INT idx;
4869 HOST_WIDE_INT nz_elts, elt_count;
4870 tree value, purpose;
4872 /* Whether CTOR is a valid constant initializer, in accordance with what
4873 initializer_constant_valid_p does. If inferred from the constructor
4874 elements, true until proven otherwise. */
4875 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4876 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4878 nz_elts = 0;
4879 elt_count = 0;
4881 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4883 HOST_WIDE_INT mult = 1;
4885 if (TREE_CODE (purpose) == RANGE_EXPR)
4887 tree lo_index = TREE_OPERAND (purpose, 0);
4888 tree hi_index = TREE_OPERAND (purpose, 1);
4890 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4891 mult = (tree_low_cst (hi_index, 1)
4892 - tree_low_cst (lo_index, 1) + 1);
4895 switch (TREE_CODE (value))
4897 case CONSTRUCTOR:
4899 HOST_WIDE_INT nz = 0, ic = 0;
4901 bool const_elt_p
4902 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4904 nz_elts += mult * nz;
4905 elt_count += mult * ic;
4907 if (const_from_elts_p && const_p)
4908 const_p = const_elt_p;
4910 break;
4912 case INTEGER_CST:
4913 case REAL_CST:
4914 case FIXED_CST:
4915 if (!initializer_zerop (value))
4916 nz_elts += mult;
4917 elt_count += mult;
4918 break;
4920 case STRING_CST:
4921 nz_elts += mult * TREE_STRING_LENGTH (value);
4922 elt_count += mult * TREE_STRING_LENGTH (value);
4923 break;
4925 case COMPLEX_CST:
4926 if (!initializer_zerop (TREE_REALPART (value)))
4927 nz_elts += mult;
4928 if (!initializer_zerop (TREE_IMAGPART (value)))
4929 nz_elts += mult;
4930 elt_count += mult;
4931 break;
4933 case VECTOR_CST:
4935 tree v;
4936 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4938 if (!initializer_zerop (TREE_VALUE (v)))
4939 nz_elts += mult;
4940 elt_count += mult;
4943 break;
4945 default:
4947 HOST_WIDE_INT tc = count_type_elements (TREE_TYPE (value), true);
4948 if (tc < 1)
4949 tc = 1;
4950 nz_elts += mult * tc;
4951 elt_count += mult * tc;
4953 if (const_from_elts_p && const_p)
4954 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4955 != NULL_TREE;
4957 break;
4961 if (!*p_must_clear
4962 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4963 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4965 tree init_sub_type;
4966 bool clear_this = true;
4968 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4970 /* We don't expect more than one element of the union to be
4971 initialized. Not sure what we should do otherwise... */
4972 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4973 == 1);
4975 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4976 CONSTRUCTOR_ELTS (ctor),
4977 0)->value);
4979 /* ??? We could look at each element of the union, and find the
4980 largest element. Which would avoid comparing the size of the
4981 initialized element against any tail padding in the union.
4982 Doesn't seem worth the effort... */
4983 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4984 TYPE_SIZE (init_sub_type)) == 1)
4986 /* And now we have to find out if the element itself is fully
4987 constructed. E.g. for union { struct { int a, b; } s; } u
4988 = { .s = { .a = 1 } }. */
4989 if (elt_count == count_type_elements (init_sub_type, false))
4990 clear_this = false;
4994 *p_must_clear = clear_this;
4997 *p_nz_elts += nz_elts;
4998 *p_elt_count += elt_count;
5000 return const_p;
5003 /* Examine CTOR to discover:
5004 * how many scalar fields are set to nonzero values,
5005 and place it in *P_NZ_ELTS;
5006 * how many scalar fields in total are in CTOR,
5007 and place it in *P_ELT_COUNT.
5008 * if a type is a union, and the initializer from the constructor
5009 is not the largest element in the union, then set *p_must_clear.
5011 Return whether or not CTOR is a valid static constant initializer, the same
5012 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5014 bool
5015 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5016 HOST_WIDE_INT *p_elt_count,
5017 bool *p_must_clear)
5019 *p_nz_elts = 0;
5020 *p_elt_count = 0;
5021 *p_must_clear = false;
5023 return
5024 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
5027 /* Count the number of scalars in TYPE. Return -1 on overflow or
5028 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
5029 array member at the end of the structure. */
5031 HOST_WIDE_INT
5032 count_type_elements (const_tree type, bool allow_flexarr)
5034 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
5035 switch (TREE_CODE (type))
5037 case ARRAY_TYPE:
5039 tree telts = array_type_nelts (type);
5040 if (telts && host_integerp (telts, 1))
5042 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
5043 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
5044 if (n == 0)
5045 return 0;
5046 else if (max / n > m)
5047 return n * m;
5049 return -1;
5052 case RECORD_TYPE:
5054 HOST_WIDE_INT n = 0, t;
5055 tree f;
5057 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5058 if (TREE_CODE (f) == FIELD_DECL)
5060 t = count_type_elements (TREE_TYPE (f), false);
5061 if (t < 0)
5063 /* Check for structures with flexible array member. */
5064 tree tf = TREE_TYPE (f);
5065 if (allow_flexarr
5066 && TREE_CHAIN (f) == NULL
5067 && TREE_CODE (tf) == ARRAY_TYPE
5068 && TYPE_DOMAIN (tf)
5069 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5070 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5071 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5072 && int_size_in_bytes (type) >= 0)
5073 break;
5075 return -1;
5077 n += t;
5080 return n;
5083 case UNION_TYPE:
5084 case QUAL_UNION_TYPE:
5085 return -1;
5087 case COMPLEX_TYPE:
5088 return 2;
5090 case VECTOR_TYPE:
5091 return TYPE_VECTOR_SUBPARTS (type);
5093 case INTEGER_TYPE:
5094 case REAL_TYPE:
5095 case FIXED_POINT_TYPE:
5096 case ENUMERAL_TYPE:
5097 case BOOLEAN_TYPE:
5098 case POINTER_TYPE:
5099 case OFFSET_TYPE:
5100 case REFERENCE_TYPE:
5101 return 1;
5103 case ERROR_MARK:
5104 return 0;
5106 case VOID_TYPE:
5107 case METHOD_TYPE:
5108 case FUNCTION_TYPE:
5109 case LANG_TYPE:
5110 default:
5111 gcc_unreachable ();
5115 /* Return 1 if EXP contains mostly (3/4) zeros. */
5117 static int
5118 mostly_zeros_p (const_tree exp)
5120 if (TREE_CODE (exp) == CONSTRUCTOR)
5123 HOST_WIDE_INT nz_elts, count, elts;
5124 bool must_clear;
5126 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5127 if (must_clear)
5128 return 1;
5130 elts = count_type_elements (TREE_TYPE (exp), false);
5132 return nz_elts < elts / 4;
5135 return initializer_zerop (exp);
5138 /* Return 1 if EXP contains all zeros. */
5140 static int
5141 all_zeros_p (const_tree exp)
5143 if (TREE_CODE (exp) == CONSTRUCTOR)
5146 HOST_WIDE_INT nz_elts, count;
5147 bool must_clear;
5149 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5150 return nz_elts == 0;
5153 return initializer_zerop (exp);
5156 /* Helper function for store_constructor.
5157 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5158 TYPE is the type of the CONSTRUCTOR, not the element type.
5159 CLEARED is as for store_constructor.
5160 ALIAS_SET is the alias set to use for any stores.
5162 This provides a recursive shortcut back to store_constructor when it isn't
5163 necessary to go through store_field. This is so that we can pass through
5164 the cleared field to let store_constructor know that we may not have to
5165 clear a substructure if the outer structure has already been cleared. */
5167 static void
5168 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5169 HOST_WIDE_INT bitpos, enum machine_mode mode,
5170 tree exp, tree type, int cleared,
5171 alias_set_type alias_set)
5173 if (TREE_CODE (exp) == CONSTRUCTOR
5174 /* We can only call store_constructor recursively if the size and
5175 bit position are on a byte boundary. */
5176 && bitpos % BITS_PER_UNIT == 0
5177 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5178 /* If we have a nonzero bitpos for a register target, then we just
5179 let store_field do the bitfield handling. This is unlikely to
5180 generate unnecessary clear instructions anyways. */
5181 && (bitpos == 0 || MEM_P (target)))
5183 if (MEM_P (target))
5184 target
5185 = adjust_address (target,
5186 GET_MODE (target) == BLKmode
5187 || 0 != (bitpos
5188 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5189 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5192 /* Update the alias set, if required. */
5193 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5194 && MEM_ALIAS_SET (target) != 0)
5196 target = copy_rtx (target);
5197 set_mem_alias_set (target, alias_set);
5200 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5202 else
5203 store_field (target, bitsize, bitpos, mode, exp, type, alias_set, false);
5206 /* Store the value of constructor EXP into the rtx TARGET.
5207 TARGET is either a REG or a MEM; we know it cannot conflict, since
5208 safe_from_p has been called.
5209 CLEARED is true if TARGET is known to have been zero'd.
5210 SIZE is the number of bytes of TARGET we are allowed to modify: this
5211 may not be the same as the size of EXP if we are assigning to a field
5212 which has been packed to exclude padding bits. */
5214 static void
5215 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5217 tree type = TREE_TYPE (exp);
5218 #ifdef WORD_REGISTER_OPERATIONS
5219 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5220 #endif
5222 switch (TREE_CODE (type))
5224 case RECORD_TYPE:
5225 case UNION_TYPE:
5226 case QUAL_UNION_TYPE:
5228 unsigned HOST_WIDE_INT idx;
5229 tree field, value;
5231 /* If size is zero or the target is already cleared, do nothing. */
5232 if (size == 0 || cleared)
5233 cleared = 1;
5234 /* We either clear the aggregate or indicate the value is dead. */
5235 else if ((TREE_CODE (type) == UNION_TYPE
5236 || TREE_CODE (type) == QUAL_UNION_TYPE)
5237 && ! CONSTRUCTOR_ELTS (exp))
5238 /* If the constructor is empty, clear the union. */
5240 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5241 cleared = 1;
5244 /* If we are building a static constructor into a register,
5245 set the initial value as zero so we can fold the value into
5246 a constant. But if more than one register is involved,
5247 this probably loses. */
5248 else if (REG_P (target) && TREE_STATIC (exp)
5249 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5251 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5252 cleared = 1;
5255 /* If the constructor has fewer fields than the structure or
5256 if we are initializing the structure to mostly zeros, clear
5257 the whole structure first. Don't do this if TARGET is a
5258 register whose mode size isn't equal to SIZE since
5259 clear_storage can't handle this case. */
5260 else if (size > 0
5261 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5262 != fields_length (type))
5263 || mostly_zeros_p (exp))
5264 && (!REG_P (target)
5265 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5266 == size)))
5268 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5269 cleared = 1;
5272 if (REG_P (target) && !cleared)
5273 emit_clobber (target);
5275 /* Store each element of the constructor into the
5276 corresponding field of TARGET. */
5277 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5279 enum machine_mode mode;
5280 HOST_WIDE_INT bitsize;
5281 HOST_WIDE_INT bitpos = 0;
5282 tree offset;
5283 rtx to_rtx = target;
5285 /* Just ignore missing fields. We cleared the whole
5286 structure, above, if any fields are missing. */
5287 if (field == 0)
5288 continue;
5290 if (cleared && initializer_zerop (value))
5291 continue;
5293 if (host_integerp (DECL_SIZE (field), 1))
5294 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5295 else
5296 bitsize = -1;
5298 mode = DECL_MODE (field);
5299 if (DECL_BIT_FIELD (field))
5300 mode = VOIDmode;
5302 offset = DECL_FIELD_OFFSET (field);
5303 if (host_integerp (offset, 0)
5304 && host_integerp (bit_position (field), 0))
5306 bitpos = int_bit_position (field);
5307 offset = 0;
5309 else
5310 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5312 if (offset)
5314 enum machine_mode address_mode;
5315 rtx offset_rtx;
5317 offset
5318 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5319 make_tree (TREE_TYPE (exp),
5320 target));
5322 offset_rtx = expand_normal (offset);
5323 gcc_assert (MEM_P (to_rtx));
5325 address_mode
5326 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
5327 if (GET_MODE (offset_rtx) != address_mode)
5328 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5330 to_rtx = offset_address (to_rtx, offset_rtx,
5331 highest_pow2_factor (offset));
5334 #ifdef WORD_REGISTER_OPERATIONS
5335 /* If this initializes a field that is smaller than a
5336 word, at the start of a word, try to widen it to a full
5337 word. This special case allows us to output C++ member
5338 function initializations in a form that the optimizers
5339 can understand. */
5340 if (REG_P (target)
5341 && bitsize < BITS_PER_WORD
5342 && bitpos % BITS_PER_WORD == 0
5343 && GET_MODE_CLASS (mode) == MODE_INT
5344 && TREE_CODE (value) == INTEGER_CST
5345 && exp_size >= 0
5346 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5348 tree type = TREE_TYPE (value);
5350 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5352 type = lang_hooks.types.type_for_size
5353 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5354 value = fold_convert (type, value);
5357 if (BYTES_BIG_ENDIAN)
5358 value
5359 = fold_build2 (LSHIFT_EXPR, type, value,
5360 build_int_cst (type,
5361 BITS_PER_WORD - bitsize));
5362 bitsize = BITS_PER_WORD;
5363 mode = word_mode;
5365 #endif
5367 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5368 && DECL_NONADDRESSABLE_P (field))
5370 to_rtx = copy_rtx (to_rtx);
5371 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5374 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5375 value, type, cleared,
5376 get_alias_set (TREE_TYPE (field)));
5378 break;
5380 case ARRAY_TYPE:
5382 tree value, index;
5383 unsigned HOST_WIDE_INT i;
5384 int need_to_clear;
5385 tree domain;
5386 tree elttype = TREE_TYPE (type);
5387 int const_bounds_p;
5388 HOST_WIDE_INT minelt = 0;
5389 HOST_WIDE_INT maxelt = 0;
5391 domain = TYPE_DOMAIN (type);
5392 const_bounds_p = (TYPE_MIN_VALUE (domain)
5393 && TYPE_MAX_VALUE (domain)
5394 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5395 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5397 /* If we have constant bounds for the range of the type, get them. */
5398 if (const_bounds_p)
5400 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5401 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5404 /* If the constructor has fewer elements than the array, clear
5405 the whole array first. Similarly if this is static
5406 constructor of a non-BLKmode object. */
5407 if (cleared)
5408 need_to_clear = 0;
5409 else if (REG_P (target) && TREE_STATIC (exp))
5410 need_to_clear = 1;
5411 else
5413 unsigned HOST_WIDE_INT idx;
5414 tree index, value;
5415 HOST_WIDE_INT count = 0, zero_count = 0;
5416 need_to_clear = ! const_bounds_p;
5418 /* This loop is a more accurate version of the loop in
5419 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5420 is also needed to check for missing elements. */
5421 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5423 HOST_WIDE_INT this_node_count;
5425 if (need_to_clear)
5426 break;
5428 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5430 tree lo_index = TREE_OPERAND (index, 0);
5431 tree hi_index = TREE_OPERAND (index, 1);
5433 if (! host_integerp (lo_index, 1)
5434 || ! host_integerp (hi_index, 1))
5436 need_to_clear = 1;
5437 break;
5440 this_node_count = (tree_low_cst (hi_index, 1)
5441 - tree_low_cst (lo_index, 1) + 1);
5443 else
5444 this_node_count = 1;
5446 count += this_node_count;
5447 if (mostly_zeros_p (value))
5448 zero_count += this_node_count;
5451 /* Clear the entire array first if there are any missing
5452 elements, or if the incidence of zero elements is >=
5453 75%. */
5454 if (! need_to_clear
5455 && (count < maxelt - minelt + 1
5456 || 4 * zero_count >= 3 * count))
5457 need_to_clear = 1;
5460 if (need_to_clear && size > 0)
5462 if (REG_P (target))
5463 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5464 else
5465 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5466 cleared = 1;
5469 if (!cleared && REG_P (target))
5470 /* Inform later passes that the old value is dead. */
5471 emit_clobber (target);
5473 /* Store each element of the constructor into the
5474 corresponding element of TARGET, determined by counting the
5475 elements. */
5476 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5478 enum machine_mode mode;
5479 HOST_WIDE_INT bitsize;
5480 HOST_WIDE_INT bitpos;
5481 rtx xtarget = target;
5483 if (cleared && initializer_zerop (value))
5484 continue;
5486 mode = TYPE_MODE (elttype);
5487 if (mode == BLKmode)
5488 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5489 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5490 : -1);
5491 else
5492 bitsize = GET_MODE_BITSIZE (mode);
5494 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5496 tree lo_index = TREE_OPERAND (index, 0);
5497 tree hi_index = TREE_OPERAND (index, 1);
5498 rtx index_r, pos_rtx;
5499 HOST_WIDE_INT lo, hi, count;
5500 tree position;
5502 /* If the range is constant and "small", unroll the loop. */
5503 if (const_bounds_p
5504 && host_integerp (lo_index, 0)
5505 && host_integerp (hi_index, 0)
5506 && (lo = tree_low_cst (lo_index, 0),
5507 hi = tree_low_cst (hi_index, 0),
5508 count = hi - lo + 1,
5509 (!MEM_P (target)
5510 || count <= 2
5511 || (host_integerp (TYPE_SIZE (elttype), 1)
5512 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5513 <= 40 * 8)))))
5515 lo -= minelt; hi -= minelt;
5516 for (; lo <= hi; lo++)
5518 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5520 if (MEM_P (target)
5521 && !MEM_KEEP_ALIAS_SET_P (target)
5522 && TREE_CODE (type) == ARRAY_TYPE
5523 && TYPE_NONALIASED_COMPONENT (type))
5525 target = copy_rtx (target);
5526 MEM_KEEP_ALIAS_SET_P (target) = 1;
5529 store_constructor_field
5530 (target, bitsize, bitpos, mode, value, type, cleared,
5531 get_alias_set (elttype));
5534 else
5536 rtx loop_start = gen_label_rtx ();
5537 rtx loop_end = gen_label_rtx ();
5538 tree exit_cond;
5540 expand_normal (hi_index);
5542 index = build_decl (EXPR_LOCATION (exp),
5543 VAR_DECL, NULL_TREE, domain);
5544 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
5545 SET_DECL_RTL (index, index_r);
5546 store_expr (lo_index, index_r, 0, false);
5548 /* Build the head of the loop. */
5549 do_pending_stack_adjust ();
5550 emit_label (loop_start);
5552 /* Assign value to element index. */
5553 position =
5554 fold_convert (ssizetype,
5555 fold_build2 (MINUS_EXPR,
5556 TREE_TYPE (index),
5557 index,
5558 TYPE_MIN_VALUE (domain)));
5560 position =
5561 size_binop (MULT_EXPR, position,
5562 fold_convert (ssizetype,
5563 TYPE_SIZE_UNIT (elttype)));
5565 pos_rtx = expand_normal (position);
5566 xtarget = offset_address (target, pos_rtx,
5567 highest_pow2_factor (position));
5568 xtarget = adjust_address (xtarget, mode, 0);
5569 if (TREE_CODE (value) == CONSTRUCTOR)
5570 store_constructor (value, xtarget, cleared,
5571 bitsize / BITS_PER_UNIT);
5572 else
5573 store_expr (value, xtarget, 0, false);
5575 /* Generate a conditional jump to exit the loop. */
5576 exit_cond = build2 (LT_EXPR, integer_type_node,
5577 index, hi_index);
5578 jumpif (exit_cond, loop_end, -1);
5580 /* Update the loop counter, and jump to the head of
5581 the loop. */
5582 expand_assignment (index,
5583 build2 (PLUS_EXPR, TREE_TYPE (index),
5584 index, integer_one_node),
5585 false);
5587 emit_jump (loop_start);
5589 /* Build the end of the loop. */
5590 emit_label (loop_end);
5593 else if ((index != 0 && ! host_integerp (index, 0))
5594 || ! host_integerp (TYPE_SIZE (elttype), 1))
5596 tree position;
5598 if (index == 0)
5599 index = ssize_int (1);
5601 if (minelt)
5602 index = fold_convert (ssizetype,
5603 fold_build2 (MINUS_EXPR,
5604 TREE_TYPE (index),
5605 index,
5606 TYPE_MIN_VALUE (domain)));
5608 position =
5609 size_binop (MULT_EXPR, index,
5610 fold_convert (ssizetype,
5611 TYPE_SIZE_UNIT (elttype)));
5612 xtarget = offset_address (target,
5613 expand_normal (position),
5614 highest_pow2_factor (position));
5615 xtarget = adjust_address (xtarget, mode, 0);
5616 store_expr (value, xtarget, 0, false);
5618 else
5620 if (index != 0)
5621 bitpos = ((tree_low_cst (index, 0) - minelt)
5622 * tree_low_cst (TYPE_SIZE (elttype), 1));
5623 else
5624 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5626 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5627 && TREE_CODE (type) == ARRAY_TYPE
5628 && TYPE_NONALIASED_COMPONENT (type))
5630 target = copy_rtx (target);
5631 MEM_KEEP_ALIAS_SET_P (target) = 1;
5633 store_constructor_field (target, bitsize, bitpos, mode, value,
5634 type, cleared, get_alias_set (elttype));
5637 break;
5640 case VECTOR_TYPE:
5642 unsigned HOST_WIDE_INT idx;
5643 constructor_elt *ce;
5644 int i;
5645 int need_to_clear;
5646 int icode = 0;
5647 tree elttype = TREE_TYPE (type);
5648 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5649 enum machine_mode eltmode = TYPE_MODE (elttype);
5650 HOST_WIDE_INT bitsize;
5651 HOST_WIDE_INT bitpos;
5652 rtvec vector = NULL;
5653 unsigned n_elts;
5654 alias_set_type alias;
5656 gcc_assert (eltmode != BLKmode);
5658 n_elts = TYPE_VECTOR_SUBPARTS (type);
5659 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5661 enum machine_mode mode = GET_MODE (target);
5663 icode = (int) optab_handler (vec_init_optab, mode);
5664 if (icode != CODE_FOR_nothing)
5666 unsigned int i;
5668 vector = rtvec_alloc (n_elts);
5669 for (i = 0; i < n_elts; i++)
5670 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5674 /* If the constructor has fewer elements than the vector,
5675 clear the whole array first. Similarly if this is static
5676 constructor of a non-BLKmode object. */
5677 if (cleared)
5678 need_to_clear = 0;
5679 else if (REG_P (target) && TREE_STATIC (exp))
5680 need_to_clear = 1;
5681 else
5683 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5684 tree value;
5686 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5688 int n_elts_here = tree_low_cst
5689 (int_const_binop (TRUNC_DIV_EXPR,
5690 TYPE_SIZE (TREE_TYPE (value)),
5691 TYPE_SIZE (elttype), 0), 1);
5693 count += n_elts_here;
5694 if (mostly_zeros_p (value))
5695 zero_count += n_elts_here;
5698 /* Clear the entire vector first if there are any missing elements,
5699 or if the incidence of zero elements is >= 75%. */
5700 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5703 if (need_to_clear && size > 0 && !vector)
5705 if (REG_P (target))
5706 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5707 else
5708 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5709 cleared = 1;
5712 /* Inform later passes that the old value is dead. */
5713 if (!cleared && !vector && REG_P (target))
5714 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5716 if (MEM_P (target))
5717 alias = MEM_ALIAS_SET (target);
5718 else
5719 alias = get_alias_set (elttype);
5721 /* Store each element of the constructor into the corresponding
5722 element of TARGET, determined by counting the elements. */
5723 for (idx = 0, i = 0;
5724 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5725 idx++, i += bitsize / elt_size)
5727 HOST_WIDE_INT eltpos;
5728 tree value = ce->value;
5730 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5731 if (cleared && initializer_zerop (value))
5732 continue;
5734 if (ce->index)
5735 eltpos = tree_low_cst (ce->index, 1);
5736 else
5737 eltpos = i;
5739 if (vector)
5741 /* Vector CONSTRUCTORs should only be built from smaller
5742 vectors in the case of BLKmode vectors. */
5743 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5744 RTVEC_ELT (vector, eltpos)
5745 = expand_normal (value);
5747 else
5749 enum machine_mode value_mode =
5750 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5751 ? TYPE_MODE (TREE_TYPE (value))
5752 : eltmode;
5753 bitpos = eltpos * elt_size;
5754 store_constructor_field (target, bitsize, bitpos,
5755 value_mode, value, type,
5756 cleared, alias);
5760 if (vector)
5761 emit_insn (GEN_FCN (icode)
5762 (target,
5763 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5764 break;
5767 default:
5768 gcc_unreachable ();
5772 /* Store the value of EXP (an expression tree)
5773 into a subfield of TARGET which has mode MODE and occupies
5774 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5775 If MODE is VOIDmode, it means that we are storing into a bit-field.
5777 Always return const0_rtx unless we have something particular to
5778 return.
5780 TYPE is the type of the underlying object,
5782 ALIAS_SET is the alias set for the destination. This value will
5783 (in general) be different from that for TARGET, since TARGET is a
5784 reference to the containing structure.
5786 If NONTEMPORAL is true, try generating a nontemporal store. */
5788 static rtx
5789 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5790 enum machine_mode mode, tree exp, tree type,
5791 alias_set_type alias_set, bool nontemporal)
5793 if (TREE_CODE (exp) == ERROR_MARK)
5794 return const0_rtx;
5796 /* If we have nothing to store, do nothing unless the expression has
5797 side-effects. */
5798 if (bitsize == 0)
5799 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5801 /* If we are storing into an unaligned field of an aligned union that is
5802 in a register, we may have the mode of TARGET being an integer mode but
5803 MODE == BLKmode. In that case, get an aligned object whose size and
5804 alignment are the same as TARGET and store TARGET into it (we can avoid
5805 the store if the field being stored is the entire width of TARGET). Then
5806 call ourselves recursively to store the field into a BLKmode version of
5807 that object. Finally, load from the object into TARGET. This is not
5808 very efficient in general, but should only be slightly more expensive
5809 than the otherwise-required unaligned accesses. Perhaps this can be
5810 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5811 twice, once with emit_move_insn and once via store_field. */
5813 if (mode == BLKmode
5814 && (REG_P (target) || GET_CODE (target) == SUBREG))
5816 rtx object = assign_temp (type, 0, 1, 1);
5817 rtx blk_object = adjust_address (object, BLKmode, 0);
5819 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5820 emit_move_insn (object, target);
5822 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set,
5823 nontemporal);
5825 emit_move_insn (target, object);
5827 /* We want to return the BLKmode version of the data. */
5828 return blk_object;
5831 if (GET_CODE (target) == CONCAT)
5833 /* We're storing into a struct containing a single __complex. */
5835 gcc_assert (!bitpos);
5836 return store_expr (exp, target, 0, nontemporal);
5839 /* If the structure is in a register or if the component
5840 is a bit field, we cannot use addressing to access it.
5841 Use bit-field techniques or SUBREG to store in it. */
5843 if (mode == VOIDmode
5844 || (mode != BLKmode && ! direct_store[(int) mode]
5845 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5846 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5847 || REG_P (target)
5848 || GET_CODE (target) == SUBREG
5849 /* If the field isn't aligned enough to store as an ordinary memref,
5850 store it as a bit field. */
5851 || (mode != BLKmode
5852 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5853 || bitpos % GET_MODE_ALIGNMENT (mode))
5854 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5855 || (bitpos % BITS_PER_UNIT != 0)))
5856 /* If the RHS and field are a constant size and the size of the
5857 RHS isn't the same size as the bitfield, we must use bitfield
5858 operations. */
5859 || (bitsize >= 0
5860 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5861 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
5862 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
5863 decl we must use bitfield operations. */
5864 || (bitsize >= 0
5865 && TREE_CODE (exp) == MEM_REF
5866 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5867 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5868 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
5869 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
5871 rtx temp;
5872 gimple nop_def;
5874 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5875 implies a mask operation. If the precision is the same size as
5876 the field we're storing into, that mask is redundant. This is
5877 particularly common with bit field assignments generated by the
5878 C front end. */
5879 nop_def = get_def_for_expr (exp, NOP_EXPR);
5880 if (nop_def)
5882 tree type = TREE_TYPE (exp);
5883 if (INTEGRAL_TYPE_P (type)
5884 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5885 && bitsize == TYPE_PRECISION (type))
5887 tree op = gimple_assign_rhs1 (nop_def);
5888 type = TREE_TYPE (op);
5889 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5890 exp = op;
5894 temp = expand_normal (exp);
5896 /* If BITSIZE is narrower than the size of the type of EXP
5897 we will be narrowing TEMP. Normally, what's wanted are the
5898 low-order bits. However, if EXP's type is a record and this is
5899 big-endian machine, we want the upper BITSIZE bits. */
5900 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5901 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5902 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5903 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5904 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5905 - bitsize),
5906 NULL_RTX, 1);
5908 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5909 MODE. */
5910 if (mode != VOIDmode && mode != BLKmode
5911 && mode != TYPE_MODE (TREE_TYPE (exp)))
5912 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5914 /* If the modes of TEMP and TARGET are both BLKmode, both
5915 must be in memory and BITPOS must be aligned on a byte
5916 boundary. If so, we simply do a block copy. Likewise
5917 for a BLKmode-like TARGET. */
5918 if (GET_MODE (temp) == BLKmode
5919 && (GET_MODE (target) == BLKmode
5920 || (MEM_P (target)
5921 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
5922 && (bitpos % BITS_PER_UNIT) == 0
5923 && (bitsize % BITS_PER_UNIT) == 0)))
5925 gcc_assert (MEM_P (target) && MEM_P (temp)
5926 && (bitpos % BITS_PER_UNIT) == 0);
5928 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5929 emit_block_move (target, temp,
5930 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5931 / BITS_PER_UNIT),
5932 BLOCK_OP_NORMAL);
5934 return const0_rtx;
5937 /* Store the value in the bitfield. */
5938 store_bit_field (target, bitsize, bitpos, mode, temp);
5940 return const0_rtx;
5942 else
5944 /* Now build a reference to just the desired component. */
5945 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5947 if (to_rtx == target)
5948 to_rtx = copy_rtx (to_rtx);
5950 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5951 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5952 set_mem_alias_set (to_rtx, alias_set);
5954 return store_expr (exp, to_rtx, 0, nontemporal);
5958 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5959 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5960 codes and find the ultimate containing object, which we return.
5962 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5963 bit position, and *PUNSIGNEDP to the signedness of the field.
5964 If the position of the field is variable, we store a tree
5965 giving the variable offset (in units) in *POFFSET.
5966 This offset is in addition to the bit position.
5967 If the position is not variable, we store 0 in *POFFSET.
5969 If any of the extraction expressions is volatile,
5970 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5972 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
5973 Otherwise, it is a mode that can be used to access the field.
5975 If the field describes a variable-sized object, *PMODE is set to
5976 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
5977 this case, but the address of the object can be found.
5979 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5980 look through nodes that serve as markers of a greater alignment than
5981 the one that can be deduced from the expression. These nodes make it
5982 possible for front-ends to prevent temporaries from being created by
5983 the middle-end on alignment considerations. For that purpose, the
5984 normal operating mode at high-level is to always pass FALSE so that
5985 the ultimate containing object is really returned; moreover, the
5986 associated predicate handled_component_p will always return TRUE
5987 on these nodes, thus indicating that they are essentially handled
5988 by get_inner_reference. TRUE should only be passed when the caller
5989 is scanning the expression in order to build another representation
5990 and specifically knows how to handle these nodes; as such, this is
5991 the normal operating mode in the RTL expanders. */
5993 tree
5994 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5995 HOST_WIDE_INT *pbitpos, tree *poffset,
5996 enum machine_mode *pmode, int *punsignedp,
5997 int *pvolatilep, bool keep_aligning)
5999 tree size_tree = 0;
6000 enum machine_mode mode = VOIDmode;
6001 bool blkmode_bitfield = false;
6002 tree offset = size_zero_node;
6003 double_int bit_offset = double_int_zero;
6005 /* First get the mode, signedness, and size. We do this from just the
6006 outermost expression. */
6007 *pbitsize = -1;
6008 if (TREE_CODE (exp) == COMPONENT_REF)
6010 tree field = TREE_OPERAND (exp, 1);
6011 size_tree = DECL_SIZE (field);
6012 if (!DECL_BIT_FIELD (field))
6013 mode = DECL_MODE (field);
6014 else if (DECL_MODE (field) == BLKmode)
6015 blkmode_bitfield = true;
6016 else if (TREE_THIS_VOLATILE (exp)
6017 && flag_strict_volatile_bitfields > 0)
6018 /* Volatile bitfields should be accessed in the mode of the
6019 field's type, not the mode computed based on the bit
6020 size. */
6021 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6023 *punsignedp = DECL_UNSIGNED (field);
6025 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6027 size_tree = TREE_OPERAND (exp, 1);
6028 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6029 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6031 /* For vector types, with the correct size of access, use the mode of
6032 inner type. */
6033 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6034 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6035 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6036 mode = TYPE_MODE (TREE_TYPE (exp));
6038 else
6040 mode = TYPE_MODE (TREE_TYPE (exp));
6041 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6043 if (mode == BLKmode)
6044 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6045 else
6046 *pbitsize = GET_MODE_BITSIZE (mode);
6049 if (size_tree != 0)
6051 if (! host_integerp (size_tree, 1))
6052 mode = BLKmode, *pbitsize = -1;
6053 else
6054 *pbitsize = tree_low_cst (size_tree, 1);
6057 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6058 and find the ultimate containing object. */
6059 while (1)
6061 switch (TREE_CODE (exp))
6063 case BIT_FIELD_REF:
6064 bit_offset
6065 = double_int_add (bit_offset,
6066 tree_to_double_int (TREE_OPERAND (exp, 2)));
6067 break;
6069 case COMPONENT_REF:
6071 tree field = TREE_OPERAND (exp, 1);
6072 tree this_offset = component_ref_field_offset (exp);
6074 /* If this field hasn't been filled in yet, don't go past it.
6075 This should only happen when folding expressions made during
6076 type construction. */
6077 if (this_offset == 0)
6078 break;
6080 offset = size_binop (PLUS_EXPR, offset, this_offset);
6081 bit_offset = double_int_add (bit_offset,
6082 tree_to_double_int
6083 (DECL_FIELD_BIT_OFFSET (field)));
6085 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6087 break;
6089 case ARRAY_REF:
6090 case ARRAY_RANGE_REF:
6092 tree index = TREE_OPERAND (exp, 1);
6093 tree low_bound = array_ref_low_bound (exp);
6094 tree unit_size = array_ref_element_size (exp);
6096 /* We assume all arrays have sizes that are a multiple of a byte.
6097 First subtract the lower bound, if any, in the type of the
6098 index, then convert to sizetype and multiply by the size of
6099 the array element. */
6100 if (! integer_zerop (low_bound))
6101 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6102 index, low_bound);
6104 offset = size_binop (PLUS_EXPR, offset,
6105 size_binop (MULT_EXPR,
6106 fold_convert (sizetype, index),
6107 unit_size));
6109 break;
6111 case REALPART_EXPR:
6112 break;
6114 case IMAGPART_EXPR:
6115 bit_offset = double_int_add (bit_offset,
6116 uhwi_to_double_int (*pbitsize));
6117 break;
6119 case VIEW_CONVERT_EXPR:
6120 if (keep_aligning && STRICT_ALIGNMENT
6121 && (TYPE_ALIGN (TREE_TYPE (exp))
6122 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6123 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6124 < BIGGEST_ALIGNMENT)
6125 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6126 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6127 goto done;
6128 break;
6130 case MEM_REF:
6131 /* Hand back the decl for MEM[&decl, off]. */
6132 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6134 tree off = TREE_OPERAND (exp, 1);
6135 if (!integer_zerop (off))
6137 double_int boff, coff = mem_ref_offset (exp);
6138 boff = double_int_lshift (coff,
6139 BITS_PER_UNIT == 8
6140 ? 3 : exact_log2 (BITS_PER_UNIT),
6141 HOST_BITS_PER_DOUBLE_INT, true);
6142 bit_offset = double_int_add (bit_offset, boff);
6144 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6146 goto done;
6148 default:
6149 goto done;
6152 /* If any reference in the chain is volatile, the effect is volatile. */
6153 if (TREE_THIS_VOLATILE (exp))
6154 *pvolatilep = 1;
6156 exp = TREE_OPERAND (exp, 0);
6158 done:
6160 /* If OFFSET is constant, see if we can return the whole thing as a
6161 constant bit position. Make sure to handle overflow during
6162 this conversion. */
6163 if (host_integerp (offset, 0))
6165 double_int tem = double_int_lshift (tree_to_double_int (offset),
6166 BITS_PER_UNIT == 8
6167 ? 3 : exact_log2 (BITS_PER_UNIT),
6168 HOST_BITS_PER_DOUBLE_INT, true);
6169 tem = double_int_add (tem, bit_offset);
6170 if (double_int_fits_in_shwi_p (tem))
6172 *pbitpos = double_int_to_shwi (tem);
6173 *poffset = offset = NULL_TREE;
6177 /* Otherwise, split it up. */
6178 if (offset)
6180 *pbitpos = double_int_to_shwi (bit_offset);
6181 *poffset = offset;
6184 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6185 if (mode == VOIDmode
6186 && blkmode_bitfield
6187 && (*pbitpos % BITS_PER_UNIT) == 0
6188 && (*pbitsize % BITS_PER_UNIT) == 0)
6189 *pmode = BLKmode;
6190 else
6191 *pmode = mode;
6193 return exp;
6196 /* Given an expression EXP that may be a COMPONENT_REF, an ARRAY_REF or an
6197 ARRAY_RANGE_REF, look for whether EXP or any nested component-refs within
6198 EXP is marked as PACKED. */
6200 bool
6201 contains_packed_reference (const_tree exp)
6203 bool packed_p = false;
6205 while (1)
6207 switch (TREE_CODE (exp))
6209 case COMPONENT_REF:
6211 tree field = TREE_OPERAND (exp, 1);
6212 packed_p = DECL_PACKED (field)
6213 || TYPE_PACKED (TREE_TYPE (field))
6214 || TYPE_PACKED (TREE_TYPE (exp));
6215 if (packed_p)
6216 goto done;
6218 break;
6220 case BIT_FIELD_REF:
6221 case ARRAY_REF:
6222 case ARRAY_RANGE_REF:
6223 case REALPART_EXPR:
6224 case IMAGPART_EXPR:
6225 case VIEW_CONVERT_EXPR:
6226 break;
6228 default:
6229 goto done;
6231 exp = TREE_OPERAND (exp, 0);
6233 done:
6234 return packed_p;
6237 /* Return a tree of sizetype representing the size, in bytes, of the element
6238 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6240 tree
6241 array_ref_element_size (tree exp)
6243 tree aligned_size = TREE_OPERAND (exp, 3);
6244 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6245 location_t loc = EXPR_LOCATION (exp);
6247 /* If a size was specified in the ARRAY_REF, it's the size measured
6248 in alignment units of the element type. So multiply by that value. */
6249 if (aligned_size)
6251 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6252 sizetype from another type of the same width and signedness. */
6253 if (TREE_TYPE (aligned_size) != sizetype)
6254 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6255 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6256 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6259 /* Otherwise, take the size from that of the element type. Substitute
6260 any PLACEHOLDER_EXPR that we have. */
6261 else
6262 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6265 /* Return a tree representing the lower bound of the array mentioned in
6266 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6268 tree
6269 array_ref_low_bound (tree exp)
6271 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6273 /* If a lower bound is specified in EXP, use it. */
6274 if (TREE_OPERAND (exp, 2))
6275 return TREE_OPERAND (exp, 2);
6277 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6278 substituting for a PLACEHOLDER_EXPR as needed. */
6279 if (domain_type && TYPE_MIN_VALUE (domain_type))
6280 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6282 /* Otherwise, return a zero of the appropriate type. */
6283 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6286 /* Return a tree representing the upper bound of the array mentioned in
6287 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6289 tree
6290 array_ref_up_bound (tree exp)
6292 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6294 /* If there is a domain type and it has an upper bound, use it, substituting
6295 for a PLACEHOLDER_EXPR as needed. */
6296 if (domain_type && TYPE_MAX_VALUE (domain_type))
6297 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6299 /* Otherwise fail. */
6300 return NULL_TREE;
6303 /* Return a tree representing the offset, in bytes, of the field referenced
6304 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6306 tree
6307 component_ref_field_offset (tree exp)
6309 tree aligned_offset = TREE_OPERAND (exp, 2);
6310 tree field = TREE_OPERAND (exp, 1);
6311 location_t loc = EXPR_LOCATION (exp);
6313 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6314 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6315 value. */
6316 if (aligned_offset)
6318 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6319 sizetype from another type of the same width and signedness. */
6320 if (TREE_TYPE (aligned_offset) != sizetype)
6321 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
6322 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
6323 size_int (DECL_OFFSET_ALIGN (field)
6324 / BITS_PER_UNIT));
6327 /* Otherwise, take the offset from that of the field. Substitute
6328 any PLACEHOLDER_EXPR that we have. */
6329 else
6330 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6333 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6335 static unsigned HOST_WIDE_INT
6336 target_align (const_tree target)
6338 /* We might have a chain of nested references with intermediate misaligning
6339 bitfields components, so need to recurse to find out. */
6341 unsigned HOST_WIDE_INT this_align, outer_align;
6343 switch (TREE_CODE (target))
6345 case BIT_FIELD_REF:
6346 return 1;
6348 case COMPONENT_REF:
6349 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
6350 outer_align = target_align (TREE_OPERAND (target, 0));
6351 return MIN (this_align, outer_align);
6353 case ARRAY_REF:
6354 case ARRAY_RANGE_REF:
6355 this_align = TYPE_ALIGN (TREE_TYPE (target));
6356 outer_align = target_align (TREE_OPERAND (target, 0));
6357 return MIN (this_align, outer_align);
6359 CASE_CONVERT:
6360 case NON_LVALUE_EXPR:
6361 case VIEW_CONVERT_EXPR:
6362 this_align = TYPE_ALIGN (TREE_TYPE (target));
6363 outer_align = target_align (TREE_OPERAND (target, 0));
6364 return MAX (this_align, outer_align);
6366 default:
6367 return TYPE_ALIGN (TREE_TYPE (target));
6372 /* Given an rtx VALUE that may contain additions and multiplications, return
6373 an equivalent value that just refers to a register, memory, or constant.
6374 This is done by generating instructions to perform the arithmetic and
6375 returning a pseudo-register containing the value.
6377 The returned value may be a REG, SUBREG, MEM or constant. */
6380 force_operand (rtx value, rtx target)
6382 rtx op1, op2;
6383 /* Use subtarget as the target for operand 0 of a binary operation. */
6384 rtx subtarget = get_subtarget (target);
6385 enum rtx_code code = GET_CODE (value);
6387 /* Check for subreg applied to an expression produced by loop optimizer. */
6388 if (code == SUBREG
6389 && !REG_P (SUBREG_REG (value))
6390 && !MEM_P (SUBREG_REG (value)))
6392 value
6393 = simplify_gen_subreg (GET_MODE (value),
6394 force_reg (GET_MODE (SUBREG_REG (value)),
6395 force_operand (SUBREG_REG (value),
6396 NULL_RTX)),
6397 GET_MODE (SUBREG_REG (value)),
6398 SUBREG_BYTE (value));
6399 code = GET_CODE (value);
6402 /* Check for a PIC address load. */
6403 if ((code == PLUS || code == MINUS)
6404 && XEXP (value, 0) == pic_offset_table_rtx
6405 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6406 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6407 || GET_CODE (XEXP (value, 1)) == CONST))
6409 if (!subtarget)
6410 subtarget = gen_reg_rtx (GET_MODE (value));
6411 emit_move_insn (subtarget, value);
6412 return subtarget;
6415 if (ARITHMETIC_P (value))
6417 op2 = XEXP (value, 1);
6418 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6419 subtarget = 0;
6420 if (code == MINUS && CONST_INT_P (op2))
6422 code = PLUS;
6423 op2 = negate_rtx (GET_MODE (value), op2);
6426 /* Check for an addition with OP2 a constant integer and our first
6427 operand a PLUS of a virtual register and something else. In that
6428 case, we want to emit the sum of the virtual register and the
6429 constant first and then add the other value. This allows virtual
6430 register instantiation to simply modify the constant rather than
6431 creating another one around this addition. */
6432 if (code == PLUS && CONST_INT_P (op2)
6433 && GET_CODE (XEXP (value, 0)) == PLUS
6434 && REG_P (XEXP (XEXP (value, 0), 0))
6435 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6436 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6438 rtx temp = expand_simple_binop (GET_MODE (value), code,
6439 XEXP (XEXP (value, 0), 0), op2,
6440 subtarget, 0, OPTAB_LIB_WIDEN);
6441 return expand_simple_binop (GET_MODE (value), code, temp,
6442 force_operand (XEXP (XEXP (value,
6443 0), 1), 0),
6444 target, 0, OPTAB_LIB_WIDEN);
6447 op1 = force_operand (XEXP (value, 0), subtarget);
6448 op2 = force_operand (op2, NULL_RTX);
6449 switch (code)
6451 case MULT:
6452 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6453 case DIV:
6454 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6455 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6456 target, 1, OPTAB_LIB_WIDEN);
6457 else
6458 return expand_divmod (0,
6459 FLOAT_MODE_P (GET_MODE (value))
6460 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6461 GET_MODE (value), op1, op2, target, 0);
6462 case MOD:
6463 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6464 target, 0);
6465 case UDIV:
6466 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6467 target, 1);
6468 case UMOD:
6469 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6470 target, 1);
6471 case ASHIFTRT:
6472 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6473 target, 0, OPTAB_LIB_WIDEN);
6474 default:
6475 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6476 target, 1, OPTAB_LIB_WIDEN);
6479 if (UNARY_P (value))
6481 if (!target)
6482 target = gen_reg_rtx (GET_MODE (value));
6483 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6484 switch (code)
6486 case ZERO_EXTEND:
6487 case SIGN_EXTEND:
6488 case TRUNCATE:
6489 case FLOAT_EXTEND:
6490 case FLOAT_TRUNCATE:
6491 convert_move (target, op1, code == ZERO_EXTEND);
6492 return target;
6494 case FIX:
6495 case UNSIGNED_FIX:
6496 expand_fix (target, op1, code == UNSIGNED_FIX);
6497 return target;
6499 case FLOAT:
6500 case UNSIGNED_FLOAT:
6501 expand_float (target, op1, code == UNSIGNED_FLOAT);
6502 return target;
6504 default:
6505 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6509 #ifdef INSN_SCHEDULING
6510 /* On machines that have insn scheduling, we want all memory reference to be
6511 explicit, so we need to deal with such paradoxical SUBREGs. */
6512 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6513 && (GET_MODE_SIZE (GET_MODE (value))
6514 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6515 value
6516 = simplify_gen_subreg (GET_MODE (value),
6517 force_reg (GET_MODE (SUBREG_REG (value)),
6518 force_operand (SUBREG_REG (value),
6519 NULL_RTX)),
6520 GET_MODE (SUBREG_REG (value)),
6521 SUBREG_BYTE (value));
6522 #endif
6524 return value;
6527 /* Subroutine of expand_expr: return nonzero iff there is no way that
6528 EXP can reference X, which is being modified. TOP_P is nonzero if this
6529 call is going to be used to determine whether we need a temporary
6530 for EXP, as opposed to a recursive call to this function.
6532 It is always safe for this routine to return zero since it merely
6533 searches for optimization opportunities. */
6536 safe_from_p (const_rtx x, tree exp, int top_p)
6538 rtx exp_rtl = 0;
6539 int i, nops;
6541 if (x == 0
6542 /* If EXP has varying size, we MUST use a target since we currently
6543 have no way of allocating temporaries of variable size
6544 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6545 So we assume here that something at a higher level has prevented a
6546 clash. This is somewhat bogus, but the best we can do. Only
6547 do this when X is BLKmode and when we are at the top level. */
6548 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6549 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6550 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6551 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6552 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6553 != INTEGER_CST)
6554 && GET_MODE (x) == BLKmode)
6555 /* If X is in the outgoing argument area, it is always safe. */
6556 || (MEM_P (x)
6557 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6558 || (GET_CODE (XEXP (x, 0)) == PLUS
6559 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6560 return 1;
6562 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6563 find the underlying pseudo. */
6564 if (GET_CODE (x) == SUBREG)
6566 x = SUBREG_REG (x);
6567 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6568 return 0;
6571 /* Now look at our tree code and possibly recurse. */
6572 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6574 case tcc_declaration:
6575 exp_rtl = DECL_RTL_IF_SET (exp);
6576 break;
6578 case tcc_constant:
6579 return 1;
6581 case tcc_exceptional:
6582 if (TREE_CODE (exp) == TREE_LIST)
6584 while (1)
6586 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6587 return 0;
6588 exp = TREE_CHAIN (exp);
6589 if (!exp)
6590 return 1;
6591 if (TREE_CODE (exp) != TREE_LIST)
6592 return safe_from_p (x, exp, 0);
6595 else if (TREE_CODE (exp) == CONSTRUCTOR)
6597 constructor_elt *ce;
6598 unsigned HOST_WIDE_INT idx;
6600 for (idx = 0;
6601 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6602 idx++)
6603 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6604 || !safe_from_p (x, ce->value, 0))
6605 return 0;
6606 return 1;
6608 else if (TREE_CODE (exp) == ERROR_MARK)
6609 return 1; /* An already-visited SAVE_EXPR? */
6610 else
6611 return 0;
6613 case tcc_statement:
6614 /* The only case we look at here is the DECL_INITIAL inside a
6615 DECL_EXPR. */
6616 return (TREE_CODE (exp) != DECL_EXPR
6617 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6618 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6619 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6621 case tcc_binary:
6622 case tcc_comparison:
6623 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6624 return 0;
6625 /* Fall through. */
6627 case tcc_unary:
6628 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6630 case tcc_expression:
6631 case tcc_reference:
6632 case tcc_vl_exp:
6633 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6634 the expression. If it is set, we conflict iff we are that rtx or
6635 both are in memory. Otherwise, we check all operands of the
6636 expression recursively. */
6638 switch (TREE_CODE (exp))
6640 case ADDR_EXPR:
6641 /* If the operand is static or we are static, we can't conflict.
6642 Likewise if we don't conflict with the operand at all. */
6643 if (staticp (TREE_OPERAND (exp, 0))
6644 || TREE_STATIC (exp)
6645 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6646 return 1;
6648 /* Otherwise, the only way this can conflict is if we are taking
6649 the address of a DECL a that address if part of X, which is
6650 very rare. */
6651 exp = TREE_OPERAND (exp, 0);
6652 if (DECL_P (exp))
6654 if (!DECL_RTL_SET_P (exp)
6655 || !MEM_P (DECL_RTL (exp)))
6656 return 0;
6657 else
6658 exp_rtl = XEXP (DECL_RTL (exp), 0);
6660 break;
6662 case MISALIGNED_INDIRECT_REF:
6663 case INDIRECT_REF:
6664 if (MEM_P (x)
6665 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6666 get_alias_set (exp)))
6667 return 0;
6668 break;
6670 case CALL_EXPR:
6671 /* Assume that the call will clobber all hard registers and
6672 all of memory. */
6673 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6674 || MEM_P (x))
6675 return 0;
6676 break;
6678 case WITH_CLEANUP_EXPR:
6679 case CLEANUP_POINT_EXPR:
6680 /* Lowered by gimplify.c. */
6681 gcc_unreachable ();
6683 case SAVE_EXPR:
6684 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6686 default:
6687 break;
6690 /* If we have an rtx, we do not need to scan our operands. */
6691 if (exp_rtl)
6692 break;
6694 nops = TREE_OPERAND_LENGTH (exp);
6695 for (i = 0; i < nops; i++)
6696 if (TREE_OPERAND (exp, i) != 0
6697 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6698 return 0;
6700 break;
6702 case tcc_type:
6703 /* Should never get a type here. */
6704 gcc_unreachable ();
6707 /* If we have an rtl, find any enclosed object. Then see if we conflict
6708 with it. */
6709 if (exp_rtl)
6711 if (GET_CODE (exp_rtl) == SUBREG)
6713 exp_rtl = SUBREG_REG (exp_rtl);
6714 if (REG_P (exp_rtl)
6715 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6716 return 0;
6719 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6720 are memory and they conflict. */
6721 return ! (rtx_equal_p (x, exp_rtl)
6722 || (MEM_P (x) && MEM_P (exp_rtl)
6723 && true_dependence (exp_rtl, VOIDmode, x,
6724 rtx_addr_varies_p)));
6727 /* If we reach here, it is safe. */
6728 return 1;
6732 /* Return the highest power of two that EXP is known to be a multiple of.
6733 This is used in updating alignment of MEMs in array references. */
6735 unsigned HOST_WIDE_INT
6736 highest_pow2_factor (const_tree exp)
6738 unsigned HOST_WIDE_INT c0, c1;
6740 switch (TREE_CODE (exp))
6742 case INTEGER_CST:
6743 /* We can find the lowest bit that's a one. If the low
6744 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6745 We need to handle this case since we can find it in a COND_EXPR,
6746 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6747 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6748 later ICE. */
6749 if (TREE_OVERFLOW (exp))
6750 return BIGGEST_ALIGNMENT;
6751 else
6753 /* Note: tree_low_cst is intentionally not used here,
6754 we don't care about the upper bits. */
6755 c0 = TREE_INT_CST_LOW (exp);
6756 c0 &= -c0;
6757 return c0 ? c0 : BIGGEST_ALIGNMENT;
6759 break;
6761 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6762 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6763 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6764 return MIN (c0, c1);
6766 case MULT_EXPR:
6767 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6768 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6769 return c0 * c1;
6771 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6772 case CEIL_DIV_EXPR:
6773 if (integer_pow2p (TREE_OPERAND (exp, 1))
6774 && host_integerp (TREE_OPERAND (exp, 1), 1))
6776 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6777 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6778 return MAX (1, c0 / c1);
6780 break;
6782 case BIT_AND_EXPR:
6783 /* The highest power of two of a bit-and expression is the maximum of
6784 that of its operands. We typically get here for a complex LHS and
6785 a constant negative power of two on the RHS to force an explicit
6786 alignment, so don't bother looking at the LHS. */
6787 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6789 CASE_CONVERT:
6790 case SAVE_EXPR:
6791 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6793 case COMPOUND_EXPR:
6794 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6796 case COND_EXPR:
6797 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6798 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6799 return MIN (c0, c1);
6801 default:
6802 break;
6805 return 1;
6808 /* Similar, except that the alignment requirements of TARGET are
6809 taken into account. Assume it is at least as aligned as its
6810 type, unless it is a COMPONENT_REF in which case the layout of
6811 the structure gives the alignment. */
6813 static unsigned HOST_WIDE_INT
6814 highest_pow2_factor_for_target (const_tree target, const_tree exp)
6816 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
6817 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
6819 return MAX (factor, talign);
6822 /* Return &VAR expression for emulated thread local VAR. */
6824 static tree
6825 emutls_var_address (tree var)
6827 tree emuvar = emutls_decl (var);
6828 tree fn = built_in_decls [BUILT_IN_EMUTLS_GET_ADDRESS];
6829 tree arg = build_fold_addr_expr_with_type (emuvar, ptr_type_node);
6830 tree call = build_call_expr (fn, 1, arg);
6831 return fold_convert (build_pointer_type (TREE_TYPE (var)), call);
6835 /* Subroutine of expand_expr. Expand the two operands of a binary
6836 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6837 The value may be stored in TARGET if TARGET is nonzero. The
6838 MODIFIER argument is as documented by expand_expr. */
6840 static void
6841 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6842 enum expand_modifier modifier)
6844 if (! safe_from_p (target, exp1, 1))
6845 target = 0;
6846 if (operand_equal_p (exp0, exp1, 0))
6848 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6849 *op1 = copy_rtx (*op0);
6851 else
6853 /* If we need to preserve evaluation order, copy exp0 into its own
6854 temporary variable so that it can't be clobbered by exp1. */
6855 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6856 exp0 = save_expr (exp0);
6857 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6858 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6863 /* Return a MEM that contains constant EXP. DEFER is as for
6864 output_constant_def and MODIFIER is as for expand_expr. */
6866 static rtx
6867 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6869 rtx mem;
6871 mem = output_constant_def (exp, defer);
6872 if (modifier != EXPAND_INITIALIZER)
6873 mem = use_anchored_address (mem);
6874 return mem;
6877 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6878 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6880 static rtx
6881 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6882 enum expand_modifier modifier, addr_space_t as)
6884 rtx result, subtarget;
6885 tree inner, offset;
6886 HOST_WIDE_INT bitsize, bitpos;
6887 int volatilep, unsignedp;
6888 enum machine_mode mode1;
6890 /* If we are taking the address of a constant and are at the top level,
6891 we have to use output_constant_def since we can't call force_const_mem
6892 at top level. */
6893 /* ??? This should be considered a front-end bug. We should not be
6894 generating ADDR_EXPR of something that isn't an LVALUE. The only
6895 exception here is STRING_CST. */
6896 if (CONSTANT_CLASS_P (exp))
6897 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6899 /* Everything must be something allowed by is_gimple_addressable. */
6900 switch (TREE_CODE (exp))
6902 case INDIRECT_REF:
6903 /* This case will happen via recursion for &a->b. */
6904 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6906 case MEM_REF:
6908 tree tem = TREE_OPERAND (exp, 0);
6909 if (!integer_zerop (TREE_OPERAND (exp, 1)))
6910 tem = build2 (POINTER_PLUS_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
6911 tem,
6912 double_int_to_tree (sizetype, mem_ref_offset (exp)));
6913 return expand_expr (tem, target, tmode, modifier);
6916 case CONST_DECL:
6917 /* Expand the initializer like constants above. */
6918 return XEXP (expand_expr_constant (DECL_INITIAL (exp), 0, modifier), 0);
6920 case REALPART_EXPR:
6921 /* The real part of the complex number is always first, therefore
6922 the address is the same as the address of the parent object. */
6923 offset = 0;
6924 bitpos = 0;
6925 inner = TREE_OPERAND (exp, 0);
6926 break;
6928 case IMAGPART_EXPR:
6929 /* The imaginary part of the complex number is always second.
6930 The expression is therefore always offset by the size of the
6931 scalar type. */
6932 offset = 0;
6933 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6934 inner = TREE_OPERAND (exp, 0);
6935 break;
6937 case VAR_DECL:
6938 /* TLS emulation hook - replace __thread VAR's &VAR with
6939 __emutls_get_address (&_emutls.VAR). */
6940 if (! targetm.have_tls
6941 && TREE_CODE (exp) == VAR_DECL
6942 && DECL_THREAD_LOCAL_P (exp))
6944 exp = emutls_var_address (exp);
6945 return expand_expr (exp, target, tmode, modifier);
6947 /* Fall through. */
6949 default:
6950 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6951 expand_expr, as that can have various side effects; LABEL_DECLs for
6952 example, may not have their DECL_RTL set yet. Expand the rtl of
6953 CONSTRUCTORs too, which should yield a memory reference for the
6954 constructor's contents. Assume language specific tree nodes can
6955 be expanded in some interesting way. */
6956 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
6957 if (DECL_P (exp)
6958 || TREE_CODE (exp) == CONSTRUCTOR
6959 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
6961 result = expand_expr (exp, target, tmode,
6962 modifier == EXPAND_INITIALIZER
6963 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6965 /* If the DECL isn't in memory, then the DECL wasn't properly
6966 marked TREE_ADDRESSABLE, which will be either a front-end
6967 or a tree optimizer bug. */
6968 gcc_assert (MEM_P (result));
6969 result = XEXP (result, 0);
6971 /* ??? Is this needed anymore? */
6972 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6974 assemble_external (exp);
6975 TREE_USED (exp) = 1;
6978 if (modifier != EXPAND_INITIALIZER
6979 && modifier != EXPAND_CONST_ADDRESS)
6980 result = force_operand (result, target);
6981 return result;
6984 /* Pass FALSE as the last argument to get_inner_reference although
6985 we are expanding to RTL. The rationale is that we know how to
6986 handle "aligning nodes" here: we can just bypass them because
6987 they won't change the final object whose address will be returned
6988 (they actually exist only for that purpose). */
6989 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6990 &mode1, &unsignedp, &volatilep, false);
6991 break;
6994 /* We must have made progress. */
6995 gcc_assert (inner != exp);
6997 subtarget = offset || bitpos ? NULL_RTX : target;
6998 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
6999 inner alignment, force the inner to be sufficiently aligned. */
7000 if (CONSTANT_CLASS_P (inner)
7001 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7003 inner = copy_node (inner);
7004 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7005 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7006 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7008 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7010 if (offset)
7012 rtx tmp;
7014 if (modifier != EXPAND_NORMAL)
7015 result = force_operand (result, NULL);
7016 tmp = expand_expr (offset, NULL_RTX, tmode,
7017 modifier == EXPAND_INITIALIZER
7018 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7020 result = convert_memory_address_addr_space (tmode, result, as);
7021 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7023 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7024 result = gen_rtx_PLUS (tmode, result, tmp);
7025 else
7027 subtarget = bitpos ? NULL_RTX : target;
7028 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7029 1, OPTAB_LIB_WIDEN);
7033 if (bitpos)
7035 /* Someone beforehand should have rejected taking the address
7036 of such an object. */
7037 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7039 result = plus_constant (result, bitpos / BITS_PER_UNIT);
7040 if (modifier < EXPAND_SUM)
7041 result = force_operand (result, target);
7044 return result;
7047 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7048 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7050 static rtx
7051 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
7052 enum expand_modifier modifier)
7054 addr_space_t as = ADDR_SPACE_GENERIC;
7055 enum machine_mode address_mode = Pmode;
7056 enum machine_mode pointer_mode = ptr_mode;
7057 enum machine_mode rmode;
7058 rtx result;
7060 /* Target mode of VOIDmode says "whatever's natural". */
7061 if (tmode == VOIDmode)
7062 tmode = TYPE_MODE (TREE_TYPE (exp));
7064 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7066 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7067 address_mode = targetm.addr_space.address_mode (as);
7068 pointer_mode = targetm.addr_space.pointer_mode (as);
7071 /* We can get called with some Weird Things if the user does silliness
7072 like "(short) &a". In that case, convert_memory_address won't do
7073 the right thing, so ignore the given target mode. */
7074 if (tmode != address_mode && tmode != pointer_mode)
7075 tmode = address_mode;
7077 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7078 tmode, modifier, as);
7080 /* Despite expand_expr claims concerning ignoring TMODE when not
7081 strictly convenient, stuff breaks if we don't honor it. Note
7082 that combined with the above, we only do this for pointer modes. */
7083 rmode = GET_MODE (result);
7084 if (rmode == VOIDmode)
7085 rmode = tmode;
7086 if (rmode != tmode)
7087 result = convert_memory_address_addr_space (tmode, result, as);
7089 return result;
7092 /* Generate code for computing CONSTRUCTOR EXP.
7093 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7094 is TRUE, instead of creating a temporary variable in memory
7095 NULL is returned and the caller needs to handle it differently. */
7097 static rtx
7098 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7099 bool avoid_temp_mem)
7101 tree type = TREE_TYPE (exp);
7102 enum machine_mode mode = TYPE_MODE (type);
7104 /* Try to avoid creating a temporary at all. This is possible
7105 if all of the initializer is zero.
7106 FIXME: try to handle all [0..255] initializers we can handle
7107 with memset. */
7108 if (TREE_STATIC (exp)
7109 && !TREE_ADDRESSABLE (exp)
7110 && target != 0 && mode == BLKmode
7111 && all_zeros_p (exp))
7113 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7114 return target;
7117 /* All elts simple constants => refer to a constant in memory. But
7118 if this is a non-BLKmode mode, let it store a field at a time
7119 since that should make a CONST_INT or CONST_DOUBLE when we
7120 fold. Likewise, if we have a target we can use, it is best to
7121 store directly into the target unless the type is large enough
7122 that memcpy will be used. If we are making an initializer and
7123 all operands are constant, put it in memory as well.
7125 FIXME: Avoid trying to fill vector constructors piece-meal.
7126 Output them with output_constant_def below unless we're sure
7127 they're zeros. This should go away when vector initializers
7128 are treated like VECTOR_CST instead of arrays. */
7129 if ((TREE_STATIC (exp)
7130 && ((mode == BLKmode
7131 && ! (target != 0 && safe_from_p (target, exp, 1)))
7132 || TREE_ADDRESSABLE (exp)
7133 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7134 && (! MOVE_BY_PIECES_P
7135 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7136 TYPE_ALIGN (type)))
7137 && ! mostly_zeros_p (exp))))
7138 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7139 && TREE_CONSTANT (exp)))
7141 rtx constructor;
7143 if (avoid_temp_mem)
7144 return NULL_RTX;
7146 constructor = expand_expr_constant (exp, 1, modifier);
7148 if (modifier != EXPAND_CONST_ADDRESS
7149 && modifier != EXPAND_INITIALIZER
7150 && modifier != EXPAND_SUM)
7151 constructor = validize_mem (constructor);
7153 return constructor;
7156 /* Handle calls that pass values in multiple non-contiguous
7157 locations. The Irix 6 ABI has examples of this. */
7158 if (target == 0 || ! safe_from_p (target, exp, 1)
7159 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7161 if (avoid_temp_mem)
7162 return NULL_RTX;
7164 target
7165 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
7166 | (TREE_READONLY (exp)
7167 * TYPE_QUAL_CONST))),
7168 0, TREE_ADDRESSABLE (exp), 1);
7171 store_constructor (exp, target, 0, int_expr_size (exp));
7172 return target;
7176 /* expand_expr: generate code for computing expression EXP.
7177 An rtx for the computed value is returned. The value is never null.
7178 In the case of a void EXP, const0_rtx is returned.
7180 The value may be stored in TARGET if TARGET is nonzero.
7181 TARGET is just a suggestion; callers must assume that
7182 the rtx returned may not be the same as TARGET.
7184 If TARGET is CONST0_RTX, it means that the value will be ignored.
7186 If TMODE is not VOIDmode, it suggests generating the
7187 result in mode TMODE. But this is done only when convenient.
7188 Otherwise, TMODE is ignored and the value generated in its natural mode.
7189 TMODE is just a suggestion; callers must assume that
7190 the rtx returned may not have mode TMODE.
7192 Note that TARGET may have neither TMODE nor MODE. In that case, it
7193 probably will not be used.
7195 If MODIFIER is EXPAND_SUM then when EXP is an addition
7196 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7197 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7198 products as above, or REG or MEM, or constant.
7199 Ordinarily in such cases we would output mul or add instructions
7200 and then return a pseudo reg containing the sum.
7202 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7203 it also marks a label as absolutely required (it can't be dead).
7204 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7205 This is used for outputting expressions used in initializers.
7207 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7208 with a constant address even if that address is not normally legitimate.
7209 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7211 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7212 a call parameter. Such targets require special care as we haven't yet
7213 marked TARGET so that it's safe from being trashed by libcalls. We
7214 don't want to use TARGET for anything but the final result;
7215 Intermediate values must go elsewhere. Additionally, calls to
7216 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7218 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7219 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7220 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7221 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7222 recursively. */
7225 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7226 enum expand_modifier modifier, rtx *alt_rtl)
7228 rtx ret;
7230 /* Handle ERROR_MARK before anybody tries to access its type. */
7231 if (TREE_CODE (exp) == ERROR_MARK
7232 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7234 ret = CONST0_RTX (tmode);
7235 return ret ? ret : const0_rtx;
7238 /* If this is an expression of some kind and it has an associated line
7239 number, then emit the line number before expanding the expression.
7241 We need to save and restore the file and line information so that
7242 errors discovered during expansion are emitted with the right
7243 information. It would be better of the diagnostic routines
7244 used the file/line information embedded in the tree nodes rather
7245 than globals. */
7246 if (cfun && EXPR_HAS_LOCATION (exp))
7248 location_t saved_location = input_location;
7249 location_t saved_curr_loc = get_curr_insn_source_location ();
7250 tree saved_block = get_curr_insn_block ();
7251 input_location = EXPR_LOCATION (exp);
7252 set_curr_insn_source_location (input_location);
7254 /* Record where the insns produced belong. */
7255 set_curr_insn_block (TREE_BLOCK (exp));
7257 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7259 input_location = saved_location;
7260 set_curr_insn_block (saved_block);
7261 set_curr_insn_source_location (saved_curr_loc);
7263 else
7265 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7268 return ret;
7272 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
7273 enum expand_modifier modifier)
7275 rtx op0, op1, op2, temp;
7276 tree type;
7277 int unsignedp;
7278 enum machine_mode mode;
7279 enum tree_code code = ops->code;
7280 optab this_optab;
7281 rtx subtarget, original_target;
7282 int ignore;
7283 bool reduce_bit_field;
7284 location_t loc = ops->location;
7285 tree treeop0, treeop1;
7286 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7287 ? reduce_to_bit_field_precision ((expr), \
7288 target, \
7289 type) \
7290 : (expr))
7292 type = ops->type;
7293 mode = TYPE_MODE (type);
7294 unsignedp = TYPE_UNSIGNED (type);
7296 treeop0 = ops->op0;
7297 treeop1 = ops->op1;
7299 /* We should be called only on simple (binary or unary) expressions,
7300 exactly those that are valid in gimple expressions that aren't
7301 GIMPLE_SINGLE_RHS (or invalid). */
7302 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
7303 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
7304 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
7306 ignore = (target == const0_rtx
7307 || ((CONVERT_EXPR_CODE_P (code)
7308 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
7309 && TREE_CODE (type) == VOID_TYPE));
7311 /* We should be called only if we need the result. */
7312 gcc_assert (!ignore);
7314 /* An operation in what may be a bit-field type needs the
7315 result to be reduced to the precision of the bit-field type,
7316 which is narrower than that of the type's mode. */
7317 reduce_bit_field = (TREE_CODE (type) == INTEGER_TYPE
7318 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
7320 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
7321 target = 0;
7323 /* Use subtarget as the target for operand 0 of a binary operation. */
7324 subtarget = get_subtarget (target);
7325 original_target = target;
7327 switch (code)
7329 case NON_LVALUE_EXPR:
7330 case PAREN_EXPR:
7331 CASE_CONVERT:
7332 if (treeop0 == error_mark_node)
7333 return const0_rtx;
7335 if (TREE_CODE (type) == UNION_TYPE)
7337 tree valtype = TREE_TYPE (treeop0);
7339 /* If both input and output are BLKmode, this conversion isn't doing
7340 anything except possibly changing memory attribute. */
7341 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7343 rtx result = expand_expr (treeop0, target, tmode,
7344 modifier);
7346 result = copy_rtx (result);
7347 set_mem_attributes (result, type, 0);
7348 return result;
7351 if (target == 0)
7353 if (TYPE_MODE (type) != BLKmode)
7354 target = gen_reg_rtx (TYPE_MODE (type));
7355 else
7356 target = assign_temp (type, 0, 1, 1);
7359 if (MEM_P (target))
7360 /* Store data into beginning of memory target. */
7361 store_expr (treeop0,
7362 adjust_address (target, TYPE_MODE (valtype), 0),
7363 modifier == EXPAND_STACK_PARM,
7364 false);
7366 else
7368 gcc_assert (REG_P (target));
7370 /* Store this field into a union of the proper type. */
7371 store_field (target,
7372 MIN ((int_size_in_bytes (TREE_TYPE
7373 (treeop0))
7374 * BITS_PER_UNIT),
7375 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7376 0, TYPE_MODE (valtype), treeop0,
7377 type, 0, false);
7380 /* Return the entire union. */
7381 return target;
7384 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
7386 op0 = expand_expr (treeop0, target, VOIDmode,
7387 modifier);
7389 /* If the signedness of the conversion differs and OP0 is
7390 a promoted SUBREG, clear that indication since we now
7391 have to do the proper extension. */
7392 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
7393 && GET_CODE (op0) == SUBREG)
7394 SUBREG_PROMOTED_VAR_P (op0) = 0;
7396 return REDUCE_BIT_FIELD (op0);
7399 op0 = expand_expr (treeop0, NULL_RTX, mode,
7400 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7401 if (GET_MODE (op0) == mode)
7404 /* If OP0 is a constant, just convert it into the proper mode. */
7405 else if (CONSTANT_P (op0))
7407 tree inner_type = TREE_TYPE (treeop0);
7408 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7410 if (modifier == EXPAND_INITIALIZER)
7411 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7412 subreg_lowpart_offset (mode,
7413 inner_mode));
7414 else
7415 op0= convert_modes (mode, inner_mode, op0,
7416 TYPE_UNSIGNED (inner_type));
7419 else if (modifier == EXPAND_INITIALIZER)
7420 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7422 else if (target == 0)
7423 op0 = convert_to_mode (mode, op0,
7424 TYPE_UNSIGNED (TREE_TYPE
7425 (treeop0)));
7426 else
7428 convert_move (target, op0,
7429 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
7430 op0 = target;
7433 return REDUCE_BIT_FIELD (op0);
7435 case ADDR_SPACE_CONVERT_EXPR:
7437 tree treeop0_type = TREE_TYPE (treeop0);
7438 addr_space_t as_to;
7439 addr_space_t as_from;
7441 gcc_assert (POINTER_TYPE_P (type));
7442 gcc_assert (POINTER_TYPE_P (treeop0_type));
7444 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
7445 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
7447 /* Conversions between pointers to the same address space should
7448 have been implemented via CONVERT_EXPR / NOP_EXPR. */
7449 gcc_assert (as_to != as_from);
7451 /* Ask target code to handle conversion between pointers
7452 to overlapping address spaces. */
7453 if (targetm.addr_space.subset_p (as_to, as_from)
7454 || targetm.addr_space.subset_p (as_from, as_to))
7456 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
7457 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
7458 gcc_assert (op0);
7459 return op0;
7462 /* For disjoint address spaces, converting anything but
7463 a null pointer invokes undefined behaviour. We simply
7464 always return a null pointer here. */
7465 return CONST0_RTX (mode);
7468 case POINTER_PLUS_EXPR:
7469 /* Even though the sizetype mode and the pointer's mode can be different
7470 expand is able to handle this correctly and get the correct result out
7471 of the PLUS_EXPR code. */
7472 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
7473 if sizetype precision is smaller than pointer precision. */
7474 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
7475 treeop1 = fold_convert_loc (loc, type,
7476 fold_convert_loc (loc, ssizetype,
7477 treeop1));
7478 case PLUS_EXPR:
7479 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7480 something else, make sure we add the register to the constant and
7481 then to the other thing. This case can occur during strength
7482 reduction and doing it this way will produce better code if the
7483 frame pointer or argument pointer is eliminated.
7485 fold-const.c will ensure that the constant is always in the inner
7486 PLUS_EXPR, so the only case we need to do anything about is if
7487 sp, ap, or fp is our second argument, in which case we must swap
7488 the innermost first argument and our second argument. */
7490 if (TREE_CODE (treeop0) == PLUS_EXPR
7491 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
7492 && TREE_CODE (treeop1) == VAR_DECL
7493 && (DECL_RTL (treeop1) == frame_pointer_rtx
7494 || DECL_RTL (treeop1) == stack_pointer_rtx
7495 || DECL_RTL (treeop1) == arg_pointer_rtx))
7497 tree t = treeop1;
7499 treeop1 = TREE_OPERAND (treeop0, 0);
7500 TREE_OPERAND (treeop0, 0) = t;
7503 /* If the result is to be ptr_mode and we are adding an integer to
7504 something, we might be forming a constant. So try to use
7505 plus_constant. If it produces a sum and we can't accept it,
7506 use force_operand. This allows P = &ARR[const] to generate
7507 efficient code on machines where a SYMBOL_REF is not a valid
7508 address.
7510 If this is an EXPAND_SUM call, always return the sum. */
7511 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7512 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7514 if (modifier == EXPAND_STACK_PARM)
7515 target = 0;
7516 if (TREE_CODE (treeop0) == INTEGER_CST
7517 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7518 && TREE_CONSTANT (treeop1))
7520 rtx constant_part;
7522 op1 = expand_expr (treeop1, subtarget, VOIDmode,
7523 EXPAND_SUM);
7524 /* Use immed_double_const to ensure that the constant is
7525 truncated according to the mode of OP1, then sign extended
7526 to a HOST_WIDE_INT. Using the constant directly can result
7527 in non-canonical RTL in a 64x32 cross compile. */
7528 constant_part
7529 = immed_double_const (TREE_INT_CST_LOW (treeop0),
7530 (HOST_WIDE_INT) 0,
7531 TYPE_MODE (TREE_TYPE (treeop1)));
7532 op1 = plus_constant (op1, INTVAL (constant_part));
7533 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7534 op1 = force_operand (op1, target);
7535 return REDUCE_BIT_FIELD (op1);
7538 else if (TREE_CODE (treeop1) == INTEGER_CST
7539 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7540 && TREE_CONSTANT (treeop0))
7542 rtx constant_part;
7544 op0 = expand_expr (treeop0, subtarget, VOIDmode,
7545 (modifier == EXPAND_INITIALIZER
7546 ? EXPAND_INITIALIZER : EXPAND_SUM));
7547 if (! CONSTANT_P (op0))
7549 op1 = expand_expr (treeop1, NULL_RTX,
7550 VOIDmode, modifier);
7551 /* Return a PLUS if modifier says it's OK. */
7552 if (modifier == EXPAND_SUM
7553 || modifier == EXPAND_INITIALIZER)
7554 return simplify_gen_binary (PLUS, mode, op0, op1);
7555 goto binop2;
7557 /* Use immed_double_const to ensure that the constant is
7558 truncated according to the mode of OP1, then sign extended
7559 to a HOST_WIDE_INT. Using the constant directly can result
7560 in non-canonical RTL in a 64x32 cross compile. */
7561 constant_part
7562 = immed_double_const (TREE_INT_CST_LOW (treeop1),
7563 (HOST_WIDE_INT) 0,
7564 TYPE_MODE (TREE_TYPE (treeop0)));
7565 op0 = plus_constant (op0, INTVAL (constant_part));
7566 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7567 op0 = force_operand (op0, target);
7568 return REDUCE_BIT_FIELD (op0);
7572 /* No sense saving up arithmetic to be done
7573 if it's all in the wrong mode to form part of an address.
7574 And force_operand won't know whether to sign-extend or
7575 zero-extend. */
7576 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7577 || mode != ptr_mode)
7579 expand_operands (treeop0, treeop1,
7580 subtarget, &op0, &op1, EXPAND_NORMAL);
7581 if (op0 == const0_rtx)
7582 return op1;
7583 if (op1 == const0_rtx)
7584 return op0;
7585 goto binop2;
7588 expand_operands (treeop0, treeop1,
7589 subtarget, &op0, &op1, modifier);
7590 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7592 case MINUS_EXPR:
7593 /* For initializers, we are allowed to return a MINUS of two
7594 symbolic constants. Here we handle all cases when both operands
7595 are constant. */
7596 /* Handle difference of two symbolic constants,
7597 for the sake of an initializer. */
7598 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7599 && really_constant_p (treeop0)
7600 && really_constant_p (treeop1))
7602 expand_operands (treeop0, treeop1,
7603 NULL_RTX, &op0, &op1, modifier);
7605 /* If the last operand is a CONST_INT, use plus_constant of
7606 the negated constant. Else make the MINUS. */
7607 if (CONST_INT_P (op1))
7608 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7609 else
7610 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7613 /* No sense saving up arithmetic to be done
7614 if it's all in the wrong mode to form part of an address.
7615 And force_operand won't know whether to sign-extend or
7616 zero-extend. */
7617 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7618 || mode != ptr_mode)
7619 goto binop;
7621 expand_operands (treeop0, treeop1,
7622 subtarget, &op0, &op1, modifier);
7624 /* Convert A - const to A + (-const). */
7625 if (CONST_INT_P (op1))
7627 op1 = negate_rtx (mode, op1);
7628 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7631 goto binop2;
7633 case WIDEN_MULT_PLUS_EXPR:
7634 case WIDEN_MULT_MINUS_EXPR:
7635 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
7636 op2 = expand_normal (ops->op2);
7637 target = expand_widen_pattern_expr (ops, op0, op1, op2,
7638 target, unsignedp);
7639 return target;
7641 case WIDEN_MULT_EXPR:
7642 /* If first operand is constant, swap them.
7643 Thus the following special case checks need only
7644 check the second operand. */
7645 if (TREE_CODE (treeop0) == INTEGER_CST)
7647 tree t1 = treeop0;
7648 treeop0 = treeop1;
7649 treeop1 = t1;
7652 /* First, check if we have a multiplication of one signed and one
7653 unsigned operand. */
7654 if (TREE_CODE (treeop1) != INTEGER_CST
7655 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
7656 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
7658 enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
7659 this_optab = usmul_widen_optab;
7660 if (mode == GET_MODE_2XWIDER_MODE (innermode))
7662 if (optab_handler (this_optab, mode) != CODE_FOR_nothing)
7664 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
7665 expand_operands (treeop0, treeop1, subtarget, &op0, &op1,
7666 EXPAND_NORMAL);
7667 else
7668 expand_operands (treeop0, treeop1, subtarget, &op1, &op0,
7669 EXPAND_NORMAL);
7670 goto binop3;
7674 /* Check for a multiplication with matching signedness. */
7675 else if ((TREE_CODE (treeop1) == INTEGER_CST
7676 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
7677 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
7678 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
7680 tree op0type = TREE_TYPE (treeop0);
7681 enum machine_mode innermode = TYPE_MODE (op0type);
7682 bool zextend_p = TYPE_UNSIGNED (op0type);
7683 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7684 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7686 if (mode == GET_MODE_2XWIDER_MODE (innermode))
7688 if (optab_handler (this_optab, mode) != CODE_FOR_nothing)
7690 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
7691 EXPAND_NORMAL);
7692 temp = expand_widening_mult (mode, op0, op1, target,
7693 unsignedp, this_optab);
7694 return REDUCE_BIT_FIELD (temp);
7696 if (optab_handler (other_optab, mode) != CODE_FOR_nothing
7697 && innermode == word_mode)
7699 rtx htem, hipart;
7700 op0 = expand_normal (treeop0);
7701 if (TREE_CODE (treeop1) == INTEGER_CST)
7702 op1 = convert_modes (innermode, mode,
7703 expand_normal (treeop1), unsignedp);
7704 else
7705 op1 = expand_normal (treeop1);
7706 temp = expand_binop (mode, other_optab, op0, op1, target,
7707 unsignedp, OPTAB_LIB_WIDEN);
7708 hipart = gen_highpart (innermode, temp);
7709 htem = expand_mult_highpart_adjust (innermode, hipart,
7710 op0, op1, hipart,
7711 zextend_p);
7712 if (htem != hipart)
7713 emit_move_insn (hipart, htem);
7714 return REDUCE_BIT_FIELD (temp);
7718 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
7719 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
7720 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
7721 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7723 case MULT_EXPR:
7724 /* If this is a fixed-point operation, then we cannot use the code
7725 below because "expand_mult" doesn't support sat/no-sat fixed-point
7726 multiplications. */
7727 if (ALL_FIXED_POINT_MODE_P (mode))
7728 goto binop;
7730 /* If first operand is constant, swap them.
7731 Thus the following special case checks need only
7732 check the second operand. */
7733 if (TREE_CODE (treeop0) == INTEGER_CST)
7735 tree t1 = treeop0;
7736 treeop0 = treeop1;
7737 treeop1 = t1;
7740 /* Attempt to return something suitable for generating an
7741 indexed address, for machines that support that. */
7743 if (modifier == EXPAND_SUM && mode == ptr_mode
7744 && host_integerp (treeop1, 0))
7746 tree exp1 = treeop1;
7748 op0 = expand_expr (treeop0, subtarget, VOIDmode,
7749 EXPAND_SUM);
7751 if (!REG_P (op0))
7752 op0 = force_operand (op0, NULL_RTX);
7753 if (!REG_P (op0))
7754 op0 = copy_to_mode_reg (mode, op0);
7756 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7757 gen_int_mode (tree_low_cst (exp1, 0),
7758 TYPE_MODE (TREE_TYPE (exp1)))));
7761 if (modifier == EXPAND_STACK_PARM)
7762 target = 0;
7764 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
7765 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7767 case TRUNC_DIV_EXPR:
7768 case FLOOR_DIV_EXPR:
7769 case CEIL_DIV_EXPR:
7770 case ROUND_DIV_EXPR:
7771 case EXACT_DIV_EXPR:
7772 /* If this is a fixed-point operation, then we cannot use the code
7773 below because "expand_divmod" doesn't support sat/no-sat fixed-point
7774 divisions. */
7775 if (ALL_FIXED_POINT_MODE_P (mode))
7776 goto binop;
7778 if (modifier == EXPAND_STACK_PARM)
7779 target = 0;
7780 /* Possible optimization: compute the dividend with EXPAND_SUM
7781 then if the divisor is constant can optimize the case
7782 where some terms of the dividend have coeffs divisible by it. */
7783 expand_operands (treeop0, treeop1,
7784 subtarget, &op0, &op1, EXPAND_NORMAL);
7785 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7787 case RDIV_EXPR:
7788 goto binop;
7790 case TRUNC_MOD_EXPR:
7791 case FLOOR_MOD_EXPR:
7792 case CEIL_MOD_EXPR:
7793 case ROUND_MOD_EXPR:
7794 if (modifier == EXPAND_STACK_PARM)
7795 target = 0;
7796 expand_operands (treeop0, treeop1,
7797 subtarget, &op0, &op1, EXPAND_NORMAL);
7798 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7800 case FIXED_CONVERT_EXPR:
7801 op0 = expand_normal (treeop0);
7802 if (target == 0 || modifier == EXPAND_STACK_PARM)
7803 target = gen_reg_rtx (mode);
7805 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
7806 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
7807 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
7808 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
7809 else
7810 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
7811 return target;
7813 case FIX_TRUNC_EXPR:
7814 op0 = expand_normal (treeop0);
7815 if (target == 0 || modifier == EXPAND_STACK_PARM)
7816 target = gen_reg_rtx (mode);
7817 expand_fix (target, op0, unsignedp);
7818 return target;
7820 case FLOAT_EXPR:
7821 op0 = expand_normal (treeop0);
7822 if (target == 0 || modifier == EXPAND_STACK_PARM)
7823 target = gen_reg_rtx (mode);
7824 /* expand_float can't figure out what to do if FROM has VOIDmode.
7825 So give it the correct mode. With -O, cse will optimize this. */
7826 if (GET_MODE (op0) == VOIDmode)
7827 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
7828 op0);
7829 expand_float (target, op0,
7830 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
7831 return target;
7833 case NEGATE_EXPR:
7834 op0 = expand_expr (treeop0, subtarget,
7835 VOIDmode, EXPAND_NORMAL);
7836 if (modifier == EXPAND_STACK_PARM)
7837 target = 0;
7838 temp = expand_unop (mode,
7839 optab_for_tree_code (NEGATE_EXPR, type,
7840 optab_default),
7841 op0, target, 0);
7842 gcc_assert (temp);
7843 return REDUCE_BIT_FIELD (temp);
7845 case ABS_EXPR:
7846 op0 = expand_expr (treeop0, subtarget,
7847 VOIDmode, EXPAND_NORMAL);
7848 if (modifier == EXPAND_STACK_PARM)
7849 target = 0;
7851 /* ABS_EXPR is not valid for complex arguments. */
7852 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7853 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
7855 /* Unsigned abs is simply the operand. Testing here means we don't
7856 risk generating incorrect code below. */
7857 if (TYPE_UNSIGNED (type))
7858 return op0;
7860 return expand_abs (mode, op0, target, unsignedp,
7861 safe_from_p (target, treeop0, 1));
7863 case MAX_EXPR:
7864 case MIN_EXPR:
7865 target = original_target;
7866 if (target == 0
7867 || modifier == EXPAND_STACK_PARM
7868 || (MEM_P (target) && MEM_VOLATILE_P (target))
7869 || GET_MODE (target) != mode
7870 || (REG_P (target)
7871 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7872 target = gen_reg_rtx (mode);
7873 expand_operands (treeop0, treeop1,
7874 target, &op0, &op1, EXPAND_NORMAL);
7876 /* First try to do it with a special MIN or MAX instruction.
7877 If that does not win, use a conditional jump to select the proper
7878 value. */
7879 this_optab = optab_for_tree_code (code, type, optab_default);
7880 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7881 OPTAB_WIDEN);
7882 if (temp != 0)
7883 return temp;
7885 /* At this point, a MEM target is no longer useful; we will get better
7886 code without it. */
7888 if (! REG_P (target))
7889 target = gen_reg_rtx (mode);
7891 /* If op1 was placed in target, swap op0 and op1. */
7892 if (target != op0 && target == op1)
7894 temp = op0;
7895 op0 = op1;
7896 op1 = temp;
7899 /* We generate better code and avoid problems with op1 mentioning
7900 target by forcing op1 into a pseudo if it isn't a constant. */
7901 if (! CONSTANT_P (op1))
7902 op1 = force_reg (mode, op1);
7905 enum rtx_code comparison_code;
7906 rtx cmpop1 = op1;
7908 if (code == MAX_EXPR)
7909 comparison_code = unsignedp ? GEU : GE;
7910 else
7911 comparison_code = unsignedp ? LEU : LE;
7913 /* Canonicalize to comparisons against 0. */
7914 if (op1 == const1_rtx)
7916 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
7917 or (a != 0 ? a : 1) for unsigned.
7918 For MIN we are safe converting (a <= 1 ? a : 1)
7919 into (a <= 0 ? a : 1) */
7920 cmpop1 = const0_rtx;
7921 if (code == MAX_EXPR)
7922 comparison_code = unsignedp ? NE : GT;
7924 if (op1 == constm1_rtx && !unsignedp)
7926 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
7927 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
7928 cmpop1 = const0_rtx;
7929 if (code == MIN_EXPR)
7930 comparison_code = LT;
7932 #ifdef HAVE_conditional_move
7933 /* Use a conditional move if possible. */
7934 if (can_conditionally_move_p (mode))
7936 rtx insn;
7938 /* ??? Same problem as in expmed.c: emit_conditional_move
7939 forces a stack adjustment via compare_from_rtx, and we
7940 lose the stack adjustment if the sequence we are about
7941 to create is discarded. */
7942 do_pending_stack_adjust ();
7944 start_sequence ();
7946 /* Try to emit the conditional move. */
7947 insn = emit_conditional_move (target, comparison_code,
7948 op0, cmpop1, mode,
7949 op0, op1, mode,
7950 unsignedp);
7952 /* If we could do the conditional move, emit the sequence,
7953 and return. */
7954 if (insn)
7956 rtx seq = get_insns ();
7957 end_sequence ();
7958 emit_insn (seq);
7959 return target;
7962 /* Otherwise discard the sequence and fall back to code with
7963 branches. */
7964 end_sequence ();
7966 #endif
7967 if (target != op0)
7968 emit_move_insn (target, op0);
7970 temp = gen_label_rtx ();
7971 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
7972 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
7973 -1);
7975 emit_move_insn (target, op1);
7976 emit_label (temp);
7977 return target;
7979 case BIT_NOT_EXPR:
7980 op0 = expand_expr (treeop0, subtarget,
7981 VOIDmode, EXPAND_NORMAL);
7982 if (modifier == EXPAND_STACK_PARM)
7983 target = 0;
7984 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7985 gcc_assert (temp);
7986 return temp;
7988 /* ??? Can optimize bitwise operations with one arg constant.
7989 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7990 and (a bitwise1 b) bitwise2 b (etc)
7991 but that is probably not worth while. */
7993 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7994 boolean values when we want in all cases to compute both of them. In
7995 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7996 as actual zero-or-1 values and then bitwise anding. In cases where
7997 there cannot be any side effects, better code would be made by
7998 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7999 how to recognize those cases. */
8001 case TRUTH_AND_EXPR:
8002 code = BIT_AND_EXPR;
8003 case BIT_AND_EXPR:
8004 goto binop;
8006 case TRUTH_OR_EXPR:
8007 code = BIT_IOR_EXPR;
8008 case BIT_IOR_EXPR:
8009 goto binop;
8011 case TRUTH_XOR_EXPR:
8012 code = BIT_XOR_EXPR;
8013 case BIT_XOR_EXPR:
8014 goto binop;
8016 case LROTATE_EXPR:
8017 case RROTATE_EXPR:
8018 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8019 || (GET_MODE_PRECISION (TYPE_MODE (type))
8020 == TYPE_PRECISION (type)));
8021 /* fall through */
8023 case LSHIFT_EXPR:
8024 case RSHIFT_EXPR:
8025 /* If this is a fixed-point operation, then we cannot use the code
8026 below because "expand_shift" doesn't support sat/no-sat fixed-point
8027 shifts. */
8028 if (ALL_FIXED_POINT_MODE_P (mode))
8029 goto binop;
8031 if (! safe_from_p (subtarget, treeop1, 1))
8032 subtarget = 0;
8033 if (modifier == EXPAND_STACK_PARM)
8034 target = 0;
8035 op0 = expand_expr (treeop0, subtarget,
8036 VOIDmode, EXPAND_NORMAL);
8037 temp = expand_shift (code, mode, op0, treeop1, target,
8038 unsignedp);
8039 if (code == LSHIFT_EXPR)
8040 temp = REDUCE_BIT_FIELD (temp);
8041 return temp;
8043 /* Could determine the answer when only additive constants differ. Also,
8044 the addition of one can be handled by changing the condition. */
8045 case LT_EXPR:
8046 case LE_EXPR:
8047 case GT_EXPR:
8048 case GE_EXPR:
8049 case EQ_EXPR:
8050 case NE_EXPR:
8051 case UNORDERED_EXPR:
8052 case ORDERED_EXPR:
8053 case UNLT_EXPR:
8054 case UNLE_EXPR:
8055 case UNGT_EXPR:
8056 case UNGE_EXPR:
8057 case UNEQ_EXPR:
8058 case LTGT_EXPR:
8059 temp = do_store_flag (ops,
8060 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8061 tmode != VOIDmode ? tmode : mode);
8062 if (temp)
8063 return temp;
8065 /* Use a compare and a jump for BLKmode comparisons, or for function
8066 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8068 if ((target == 0
8069 || modifier == EXPAND_STACK_PARM
8070 || ! safe_from_p (target, treeop0, 1)
8071 || ! safe_from_p (target, treeop1, 1)
8072 /* Make sure we don't have a hard reg (such as function's return
8073 value) live across basic blocks, if not optimizing. */
8074 || (!optimize && REG_P (target)
8075 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8076 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8078 emit_move_insn (target, const0_rtx);
8080 op1 = gen_label_rtx ();
8081 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8083 emit_move_insn (target, const1_rtx);
8085 emit_label (op1);
8086 return target;
8088 case TRUTH_NOT_EXPR:
8089 if (modifier == EXPAND_STACK_PARM)
8090 target = 0;
8091 op0 = expand_expr (treeop0, target,
8092 VOIDmode, EXPAND_NORMAL);
8093 /* The parser is careful to generate TRUTH_NOT_EXPR
8094 only with operands that are always zero or one. */
8095 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8096 target, 1, OPTAB_LIB_WIDEN);
8097 gcc_assert (temp);
8098 return temp;
8100 case COMPLEX_EXPR:
8101 /* Get the rtx code of the operands. */
8102 op0 = expand_normal (treeop0);
8103 op1 = expand_normal (treeop1);
8105 if (!target)
8106 target = gen_reg_rtx (TYPE_MODE (type));
8108 /* Move the real (op0) and imaginary (op1) parts to their location. */
8109 write_complex_part (target, op0, false);
8110 write_complex_part (target, op1, true);
8112 return target;
8114 case WIDEN_SUM_EXPR:
8116 tree oprnd0 = treeop0;
8117 tree oprnd1 = treeop1;
8119 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8120 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8121 target, unsignedp);
8122 return target;
8125 case REDUC_MAX_EXPR:
8126 case REDUC_MIN_EXPR:
8127 case REDUC_PLUS_EXPR:
8129 op0 = expand_normal (treeop0);
8130 this_optab = optab_for_tree_code (code, type, optab_default);
8131 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8132 gcc_assert (temp);
8133 return temp;
8136 case VEC_EXTRACT_EVEN_EXPR:
8137 case VEC_EXTRACT_ODD_EXPR:
8139 expand_operands (treeop0, treeop1,
8140 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8141 this_optab = optab_for_tree_code (code, type, optab_default);
8142 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8143 OPTAB_WIDEN);
8144 gcc_assert (temp);
8145 return temp;
8148 case VEC_INTERLEAVE_HIGH_EXPR:
8149 case VEC_INTERLEAVE_LOW_EXPR:
8151 expand_operands (treeop0, treeop1,
8152 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8153 this_optab = optab_for_tree_code (code, type, optab_default);
8154 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8155 OPTAB_WIDEN);
8156 gcc_assert (temp);
8157 return temp;
8160 case VEC_LSHIFT_EXPR:
8161 case VEC_RSHIFT_EXPR:
8163 target = expand_vec_shift_expr (ops, target);
8164 return target;
8167 case VEC_UNPACK_HI_EXPR:
8168 case VEC_UNPACK_LO_EXPR:
8170 op0 = expand_normal (treeop0);
8171 this_optab = optab_for_tree_code (code, type, optab_default);
8172 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
8173 target, unsignedp);
8174 gcc_assert (temp);
8175 return temp;
8178 case VEC_UNPACK_FLOAT_HI_EXPR:
8179 case VEC_UNPACK_FLOAT_LO_EXPR:
8181 op0 = expand_normal (treeop0);
8182 /* The signedness is determined from input operand. */
8183 this_optab = optab_for_tree_code (code,
8184 TREE_TYPE (treeop0),
8185 optab_default);
8186 temp = expand_widen_pattern_expr
8187 (ops, op0, NULL_RTX, NULL_RTX,
8188 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8190 gcc_assert (temp);
8191 return temp;
8194 case VEC_WIDEN_MULT_HI_EXPR:
8195 case VEC_WIDEN_MULT_LO_EXPR:
8197 tree oprnd0 = treeop0;
8198 tree oprnd1 = treeop1;
8200 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8201 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
8202 target, unsignedp);
8203 gcc_assert (target);
8204 return target;
8207 case VEC_PACK_TRUNC_EXPR:
8208 case VEC_PACK_SAT_EXPR:
8209 case VEC_PACK_FIX_TRUNC_EXPR:
8210 mode = TYPE_MODE (TREE_TYPE (treeop0));
8211 goto binop;
8213 default:
8214 gcc_unreachable ();
8217 /* Here to do an ordinary binary operator. */
8218 binop:
8219 expand_operands (treeop0, treeop1,
8220 subtarget, &op0, &op1, EXPAND_NORMAL);
8221 binop2:
8222 this_optab = optab_for_tree_code (code, type, optab_default);
8223 binop3:
8224 if (modifier == EXPAND_STACK_PARM)
8225 target = 0;
8226 temp = expand_binop (mode, this_optab, op0, op1, target,
8227 unsignedp, OPTAB_LIB_WIDEN);
8228 gcc_assert (temp);
8229 return REDUCE_BIT_FIELD (temp);
8231 #undef REDUCE_BIT_FIELD
8234 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
8235 enum expand_modifier modifier, rtx *alt_rtl)
8237 rtx op0, op1, temp, decl_rtl;
8238 tree type;
8239 int unsignedp;
8240 enum machine_mode mode;
8241 enum tree_code code = TREE_CODE (exp);
8242 optab this_optab;
8243 rtx subtarget, original_target;
8244 int ignore;
8245 tree context;
8246 bool reduce_bit_field;
8247 location_t loc = EXPR_LOCATION (exp);
8248 struct separate_ops ops;
8249 tree treeop0, treeop1, treeop2;
8250 tree ssa_name = NULL_TREE;
8251 gimple g;
8253 type = TREE_TYPE (exp);
8254 mode = TYPE_MODE (type);
8255 unsignedp = TYPE_UNSIGNED (type);
8257 treeop0 = treeop1 = treeop2 = NULL_TREE;
8258 if (!VL_EXP_CLASS_P (exp))
8259 switch (TREE_CODE_LENGTH (code))
8261 default:
8262 case 3: treeop2 = TREE_OPERAND (exp, 2);
8263 case 2: treeop1 = TREE_OPERAND (exp, 1);
8264 case 1: treeop0 = TREE_OPERAND (exp, 0);
8265 case 0: break;
8267 ops.code = code;
8268 ops.type = type;
8269 ops.op0 = treeop0;
8270 ops.op1 = treeop1;
8271 ops.op2 = treeop2;
8272 ops.location = loc;
8274 ignore = (target == const0_rtx
8275 || ((CONVERT_EXPR_CODE_P (code)
8276 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8277 && TREE_CODE (type) == VOID_TYPE));
8279 /* An operation in what may be a bit-field type needs the
8280 result to be reduced to the precision of the bit-field type,
8281 which is narrower than that of the type's mode. */
8282 reduce_bit_field = (!ignore
8283 && TREE_CODE (type) == INTEGER_TYPE
8284 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8286 /* If we are going to ignore this result, we need only do something
8287 if there is a side-effect somewhere in the expression. If there
8288 is, short-circuit the most common cases here. Note that we must
8289 not call expand_expr with anything but const0_rtx in case this
8290 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
8292 if (ignore)
8294 if (! TREE_SIDE_EFFECTS (exp))
8295 return const0_rtx;
8297 /* Ensure we reference a volatile object even if value is ignored, but
8298 don't do this if all we are doing is taking its address. */
8299 if (TREE_THIS_VOLATILE (exp)
8300 && TREE_CODE (exp) != FUNCTION_DECL
8301 && mode != VOIDmode && mode != BLKmode
8302 && modifier != EXPAND_CONST_ADDRESS)
8304 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
8305 if (MEM_P (temp))
8306 temp = copy_to_reg (temp);
8307 return const0_rtx;
8310 if (TREE_CODE_CLASS (code) == tcc_unary
8311 || code == COMPONENT_REF || code == INDIRECT_REF)
8312 return expand_expr (treeop0, const0_rtx, VOIDmode,
8313 modifier);
8315 else if (TREE_CODE_CLASS (code) == tcc_binary
8316 || TREE_CODE_CLASS (code) == tcc_comparison
8317 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
8319 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
8320 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
8321 return const0_rtx;
8323 else if (code == BIT_FIELD_REF)
8325 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
8326 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
8327 expand_expr (treeop2, const0_rtx, VOIDmode, modifier);
8328 return const0_rtx;
8331 target = 0;
8334 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8335 target = 0;
8337 /* Use subtarget as the target for operand 0 of a binary operation. */
8338 subtarget = get_subtarget (target);
8339 original_target = target;
8341 switch (code)
8343 case LABEL_DECL:
8345 tree function = decl_function_context (exp);
8347 temp = label_rtx (exp);
8348 temp = gen_rtx_LABEL_REF (Pmode, temp);
8350 if (function != current_function_decl
8351 && function != 0)
8352 LABEL_REF_NONLOCAL_P (temp) = 1;
8354 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
8355 return temp;
8358 case SSA_NAME:
8359 /* ??? ivopts calls expander, without any preparation from
8360 out-of-ssa. So fake instructions as if this was an access to the
8361 base variable. This unnecessarily allocates a pseudo, see how we can
8362 reuse it, if partition base vars have it set already. */
8363 if (!currently_expanding_to_rtl)
8364 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
8365 NULL);
8367 g = get_gimple_for_ssa_name (exp);
8368 if (g)
8369 return expand_expr_real (gimple_assign_rhs_to_tree (g), target, tmode,
8370 modifier, NULL);
8372 ssa_name = exp;
8373 decl_rtl = get_rtx_for_ssa_name (ssa_name);
8374 exp = SSA_NAME_VAR (ssa_name);
8375 goto expand_decl_rtl;
8377 case PARM_DECL:
8378 case VAR_DECL:
8379 /* If a static var's type was incomplete when the decl was written,
8380 but the type is complete now, lay out the decl now. */
8381 if (DECL_SIZE (exp) == 0
8382 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
8383 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
8384 layout_decl (exp, 0);
8386 /* TLS emulation hook - replace __thread vars with
8387 *__emutls_get_address (&_emutls.var). */
8388 if (! targetm.have_tls
8389 && TREE_CODE (exp) == VAR_DECL
8390 && DECL_THREAD_LOCAL_P (exp))
8392 exp = build_fold_indirect_ref_loc (loc, emutls_var_address (exp));
8393 return expand_expr_real_1 (exp, target, tmode, modifier, NULL);
8396 /* ... fall through ... */
8398 case FUNCTION_DECL:
8399 case RESULT_DECL:
8400 decl_rtl = DECL_RTL (exp);
8401 expand_decl_rtl:
8402 gcc_assert (decl_rtl);
8403 decl_rtl = copy_rtx (decl_rtl);
8404 /* Record writes to register variables. */
8405 if (modifier == EXPAND_WRITE && REG_P (decl_rtl)
8406 && REGNO (decl_rtl) < FIRST_PSEUDO_REGISTER)
8408 int i = REGNO (decl_rtl);
8409 int nregs = hard_regno_nregs[i][GET_MODE (decl_rtl)];
8410 while (nregs)
8412 SET_HARD_REG_BIT (crtl->asm_clobbers, i);
8413 i++;
8414 nregs--;
8418 /* Ensure variable marked as used even if it doesn't go through
8419 a parser. If it hasn't be used yet, write out an external
8420 definition. */
8421 if (! TREE_USED (exp))
8423 assemble_external (exp);
8424 TREE_USED (exp) = 1;
8427 /* Show we haven't gotten RTL for this yet. */
8428 temp = 0;
8430 /* Variables inherited from containing functions should have
8431 been lowered by this point. */
8432 context = decl_function_context (exp);
8433 gcc_assert (!context
8434 || context == current_function_decl
8435 || TREE_STATIC (exp)
8436 /* ??? C++ creates functions that are not TREE_STATIC. */
8437 || TREE_CODE (exp) == FUNCTION_DECL);
8439 /* This is the case of an array whose size is to be determined
8440 from its initializer, while the initializer is still being parsed.
8441 See expand_decl. */
8443 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
8444 temp = validize_mem (decl_rtl);
8446 /* If DECL_RTL is memory, we are in the normal case and the
8447 address is not valid, get the address into a register. */
8449 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
8451 if (alt_rtl)
8452 *alt_rtl = decl_rtl;
8453 decl_rtl = use_anchored_address (decl_rtl);
8454 if (modifier != EXPAND_CONST_ADDRESS
8455 && modifier != EXPAND_SUM
8456 && !memory_address_addr_space_p (DECL_MODE (exp),
8457 XEXP (decl_rtl, 0),
8458 MEM_ADDR_SPACE (decl_rtl)))
8459 temp = replace_equiv_address (decl_rtl,
8460 copy_rtx (XEXP (decl_rtl, 0)));
8463 /* If we got something, return it. But first, set the alignment
8464 if the address is a register. */
8465 if (temp != 0)
8467 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
8468 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
8470 return temp;
8473 /* If the mode of DECL_RTL does not match that of the decl, it
8474 must be a promoted value. We return a SUBREG of the wanted mode,
8475 but mark it so that we know that it was already extended. */
8476 if (REG_P (decl_rtl) && GET_MODE (decl_rtl) != DECL_MODE (exp))
8478 enum machine_mode pmode;
8480 /* Get the signedness to be used for this variable. Ensure we get
8481 the same mode we got when the variable was declared. */
8482 if (code == SSA_NAME
8483 && (g = SSA_NAME_DEF_STMT (ssa_name))
8484 && gimple_code (g) == GIMPLE_CALL)
8485 pmode = promote_function_mode (type, mode, &unsignedp,
8486 TREE_TYPE
8487 (TREE_TYPE (gimple_call_fn (g))),
8489 else
8490 pmode = promote_decl_mode (exp, &unsignedp);
8491 gcc_assert (GET_MODE (decl_rtl) == pmode);
8493 temp = gen_lowpart_SUBREG (mode, decl_rtl);
8494 SUBREG_PROMOTED_VAR_P (temp) = 1;
8495 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
8496 return temp;
8499 return decl_rtl;
8501 case INTEGER_CST:
8502 temp = immed_double_const (TREE_INT_CST_LOW (exp),
8503 TREE_INT_CST_HIGH (exp), mode);
8505 return temp;
8507 case VECTOR_CST:
8509 tree tmp = NULL_TREE;
8510 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
8511 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
8512 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
8513 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
8514 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
8515 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
8516 return const_vector_from_tree (exp);
8517 if (GET_MODE_CLASS (mode) == MODE_INT)
8519 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
8520 if (type_for_mode)
8521 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
8523 if (!tmp)
8524 tmp = build_constructor_from_list (type,
8525 TREE_VECTOR_CST_ELTS (exp));
8526 return expand_expr (tmp, ignore ? const0_rtx : target,
8527 tmode, modifier);
8530 case CONST_DECL:
8531 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
8533 case REAL_CST:
8534 /* If optimized, generate immediate CONST_DOUBLE
8535 which will be turned into memory by reload if necessary.
8537 We used to force a register so that loop.c could see it. But
8538 this does not allow gen_* patterns to perform optimizations with
8539 the constants. It also produces two insns in cases like "x = 1.0;".
8540 On most machines, floating-point constants are not permitted in
8541 many insns, so we'd end up copying it to a register in any case.
8543 Now, we do the copying in expand_binop, if appropriate. */
8544 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
8545 TYPE_MODE (TREE_TYPE (exp)));
8547 case FIXED_CST:
8548 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
8549 TYPE_MODE (TREE_TYPE (exp)));
8551 case COMPLEX_CST:
8552 /* Handle evaluating a complex constant in a CONCAT target. */
8553 if (original_target && GET_CODE (original_target) == CONCAT)
8555 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8556 rtx rtarg, itarg;
8558 rtarg = XEXP (original_target, 0);
8559 itarg = XEXP (original_target, 1);
8561 /* Move the real and imaginary parts separately. */
8562 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
8563 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
8565 if (op0 != rtarg)
8566 emit_move_insn (rtarg, op0);
8567 if (op1 != itarg)
8568 emit_move_insn (itarg, op1);
8570 return original_target;
8573 /* ... fall through ... */
8575 case STRING_CST:
8576 temp = expand_expr_constant (exp, 1, modifier);
8578 /* temp contains a constant address.
8579 On RISC machines where a constant address isn't valid,
8580 make some insns to get that address into a register. */
8581 if (modifier != EXPAND_CONST_ADDRESS
8582 && modifier != EXPAND_INITIALIZER
8583 && modifier != EXPAND_SUM
8584 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
8585 MEM_ADDR_SPACE (temp)))
8586 return replace_equiv_address (temp,
8587 copy_rtx (XEXP (temp, 0)));
8588 return temp;
8590 case SAVE_EXPR:
8592 tree val = treeop0;
8593 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
8595 if (!SAVE_EXPR_RESOLVED_P (exp))
8597 /* We can indeed still hit this case, typically via builtin
8598 expanders calling save_expr immediately before expanding
8599 something. Assume this means that we only have to deal
8600 with non-BLKmode values. */
8601 gcc_assert (GET_MODE (ret) != BLKmode);
8603 val = build_decl (EXPR_LOCATION (exp),
8604 VAR_DECL, NULL, TREE_TYPE (exp));
8605 DECL_ARTIFICIAL (val) = 1;
8606 DECL_IGNORED_P (val) = 1;
8607 treeop0 = val;
8608 TREE_OPERAND (exp, 0) = treeop0;
8609 SAVE_EXPR_RESOLVED_P (exp) = 1;
8611 if (!CONSTANT_P (ret))
8612 ret = copy_to_reg (ret);
8613 SET_DECL_RTL (val, ret);
8616 return ret;
8620 case CONSTRUCTOR:
8621 /* If we don't need the result, just ensure we evaluate any
8622 subexpressions. */
8623 if (ignore)
8625 unsigned HOST_WIDE_INT idx;
8626 tree value;
8628 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
8629 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
8631 return const0_rtx;
8634 return expand_constructor (exp, target, modifier, false);
8636 case MISALIGNED_INDIRECT_REF:
8637 case INDIRECT_REF:
8639 tree exp1 = treeop0;
8640 addr_space_t as = ADDR_SPACE_GENERIC;
8642 if (modifier != EXPAND_WRITE)
8644 tree t;
8646 t = fold_read_from_constant_string (exp);
8647 if (t)
8648 return expand_expr (t, target, tmode, modifier);
8651 if (POINTER_TYPE_P (TREE_TYPE (exp1)))
8652 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp1)));
8654 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
8655 op0 = memory_address_addr_space (mode, op0, as);
8657 temp = gen_rtx_MEM (mode, op0);
8659 set_mem_attributes (temp, exp, 0);
8660 set_mem_addr_space (temp, as);
8662 /* Resolve the misalignment now, so that we don't have to remember
8663 to resolve it later. Of course, this only works for reads. */
8664 if (code == MISALIGNED_INDIRECT_REF)
8666 int icode;
8667 rtx reg, insn;
8669 gcc_assert (modifier == EXPAND_NORMAL
8670 || modifier == EXPAND_STACK_PARM);
8672 /* The vectorizer should have already checked the mode. */
8673 icode = optab_handler (movmisalign_optab, mode);
8674 gcc_assert (icode != CODE_FOR_nothing);
8676 /* We've already validated the memory, and we're creating a
8677 new pseudo destination. The predicates really can't fail. */
8678 reg = gen_reg_rtx (mode);
8680 /* Nor can the insn generator. */
8681 insn = GEN_FCN (icode) (reg, temp);
8682 emit_insn (insn);
8684 return reg;
8687 return temp;
8690 case TARGET_MEM_REF:
8692 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
8693 struct mem_address addr;
8694 tree base;
8696 get_address_description (exp, &addr);
8697 op0 = addr_for_mem_ref (&addr, as, true);
8698 op0 = memory_address_addr_space (mode, op0, as);
8699 temp = gen_rtx_MEM (mode, op0);
8700 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
8701 set_mem_addr_space (temp, as);
8702 base = get_base_address (TMR_ORIGINAL (exp));
8703 if (INDIRECT_REF_P (base)
8704 && TMR_BASE (exp)
8705 && TREE_CODE (TMR_BASE (exp)) == SSA_NAME
8706 && POINTER_TYPE_P (TREE_TYPE (TMR_BASE (exp))))
8708 set_mem_expr (temp, build1 (INDIRECT_REF,
8709 TREE_TYPE (exp), TMR_BASE (exp)));
8710 set_mem_offset (temp, NULL_RTX);
8713 return temp;
8715 case MEM_REF:
8717 addr_space_t as
8718 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8719 enum machine_mode address_mode;
8720 tree base = TREE_OPERAND (exp, 0);
8721 gimple def_stmt;
8722 /* Handle expansion of non-aliased memory with non-BLKmode. That
8723 might end up in a register. */
8724 if (TREE_CODE (base) == ADDR_EXPR)
8726 HOST_WIDE_INT offset = mem_ref_offset (exp).low;
8727 tree bit_offset;
8728 base = TREE_OPERAND (base, 0);
8729 if (!DECL_P (base))
8731 HOST_WIDE_INT off;
8732 base = get_addr_base_and_unit_offset (base, &off);
8733 gcc_assert (base);
8734 offset += off;
8736 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
8737 decl we must use bitfield operations. */
8738 if (DECL_P (base)
8739 && !TREE_ADDRESSABLE (base)
8740 && DECL_MODE (base) != BLKmode
8741 && DECL_RTL_SET_P (base)
8742 && !MEM_P (DECL_RTL (base)))
8744 tree bftype;
8745 if (offset == 0
8746 && host_integerp (TYPE_SIZE (TREE_TYPE (exp)), 1)
8747 && (GET_MODE_BITSIZE (DECL_MODE (base))
8748 == TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp)))))
8749 return expand_expr (build1 (VIEW_CONVERT_EXPR,
8750 TREE_TYPE (exp), base),
8751 target, tmode, modifier);
8752 bit_offset = bitsize_int (offset * BITS_PER_UNIT);
8753 bftype = TREE_TYPE (base);
8754 if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
8755 bftype = TREE_TYPE (exp);
8756 return expand_expr (build3 (BIT_FIELD_REF, bftype,
8757 base,
8758 TYPE_SIZE (TREE_TYPE (exp)),
8759 bit_offset),
8760 target, tmode, modifier);
8763 address_mode = targetm.addr_space.address_mode (as);
8764 base = TREE_OPERAND (exp, 0);
8765 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
8766 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
8767 gimple_assign_rhs1 (def_stmt),
8768 gimple_assign_rhs2 (def_stmt));
8769 if (!integer_zerop (TREE_OPERAND (exp, 1)))
8770 base = build2 (POINTER_PLUS_EXPR, TREE_TYPE (base),
8771 base, double_int_to_tree (sizetype,
8772 mem_ref_offset (exp)));
8773 op0 = expand_expr (base, NULL_RTX, address_mode, EXPAND_SUM);
8774 op0 = memory_address_addr_space (mode, op0, as);
8775 temp = gen_rtx_MEM (mode, op0);
8776 set_mem_attributes (temp, exp, 0);
8777 set_mem_addr_space (temp, as);
8778 if (TREE_THIS_VOLATILE (exp))
8779 MEM_VOLATILE_P (temp) = 1;
8780 return temp;
8783 case ARRAY_REF:
8786 tree array = treeop0;
8787 tree index = treeop1;
8789 /* Fold an expression like: "foo"[2].
8790 This is not done in fold so it won't happen inside &.
8791 Don't fold if this is for wide characters since it's too
8792 difficult to do correctly and this is a very rare case. */
8794 if (modifier != EXPAND_CONST_ADDRESS
8795 && modifier != EXPAND_INITIALIZER
8796 && modifier != EXPAND_MEMORY)
8798 tree t = fold_read_from_constant_string (exp);
8800 if (t)
8801 return expand_expr (t, target, tmode, modifier);
8804 /* If this is a constant index into a constant array,
8805 just get the value from the array. Handle both the cases when
8806 we have an explicit constructor and when our operand is a variable
8807 that was declared const. */
8809 if (modifier != EXPAND_CONST_ADDRESS
8810 && modifier != EXPAND_INITIALIZER
8811 && modifier != EXPAND_MEMORY
8812 && TREE_CODE (array) == CONSTRUCTOR
8813 && ! TREE_SIDE_EFFECTS (array)
8814 && TREE_CODE (index) == INTEGER_CST)
8816 unsigned HOST_WIDE_INT ix;
8817 tree field, value;
8819 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
8820 field, value)
8821 if (tree_int_cst_equal (field, index))
8823 if (!TREE_SIDE_EFFECTS (value))
8824 return expand_expr (fold (value), target, tmode, modifier);
8825 break;
8829 else if (optimize >= 1
8830 && modifier != EXPAND_CONST_ADDRESS
8831 && modifier != EXPAND_INITIALIZER
8832 && modifier != EXPAND_MEMORY
8833 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8834 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8835 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
8836 && targetm.binds_local_p (array))
8838 if (TREE_CODE (index) == INTEGER_CST)
8840 tree init = DECL_INITIAL (array);
8842 if (TREE_CODE (init) == CONSTRUCTOR)
8844 unsigned HOST_WIDE_INT ix;
8845 tree field, value;
8847 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
8848 field, value)
8849 if (tree_int_cst_equal (field, index))
8851 if (TREE_SIDE_EFFECTS (value))
8852 break;
8854 if (TREE_CODE (value) == CONSTRUCTOR)
8856 /* If VALUE is a CONSTRUCTOR, this
8857 optimization is only useful if
8858 this doesn't store the CONSTRUCTOR
8859 into memory. If it does, it is more
8860 efficient to just load the data from
8861 the array directly. */
8862 rtx ret = expand_constructor (value, target,
8863 modifier, true);
8864 if (ret == NULL_RTX)
8865 break;
8868 return expand_expr (fold (value), target, tmode,
8869 modifier);
8872 else if(TREE_CODE (init) == STRING_CST)
8874 tree index1 = index;
8875 tree low_bound = array_ref_low_bound (exp);
8876 index1 = fold_convert_loc (loc, sizetype,
8877 treeop1);
8879 /* Optimize the special-case of a zero lower bound.
8881 We convert the low_bound to sizetype to avoid some problems
8882 with constant folding. (E.g. suppose the lower bound is 1,
8883 and its mode is QI. Without the conversion,l (ARRAY
8884 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8885 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
8887 if (! integer_zerop (low_bound))
8888 index1 = size_diffop_loc (loc, index1,
8889 fold_convert_loc (loc, sizetype,
8890 low_bound));
8892 if (0 > compare_tree_int (index1,
8893 TREE_STRING_LENGTH (init)))
8895 tree type = TREE_TYPE (TREE_TYPE (init));
8896 enum machine_mode mode = TYPE_MODE (type);
8898 if (GET_MODE_CLASS (mode) == MODE_INT
8899 && GET_MODE_SIZE (mode) == 1)
8900 return gen_int_mode (TREE_STRING_POINTER (init)
8901 [TREE_INT_CST_LOW (index1)],
8902 mode);
8908 goto normal_inner_ref;
8910 case COMPONENT_REF:
8911 /* If the operand is a CONSTRUCTOR, we can just extract the
8912 appropriate field if it is present. */
8913 if (TREE_CODE (treeop0) == CONSTRUCTOR)
8915 unsigned HOST_WIDE_INT idx;
8916 tree field, value;
8918 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
8919 idx, field, value)
8920 if (field == treeop1
8921 /* We can normally use the value of the field in the
8922 CONSTRUCTOR. However, if this is a bitfield in
8923 an integral mode that we can fit in a HOST_WIDE_INT,
8924 we must mask only the number of bits in the bitfield,
8925 since this is done implicitly by the constructor. If
8926 the bitfield does not meet either of those conditions,
8927 we can't do this optimization. */
8928 && (! DECL_BIT_FIELD (field)
8929 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
8930 && (GET_MODE_BITSIZE (DECL_MODE (field))
8931 <= HOST_BITS_PER_WIDE_INT))))
8933 if (DECL_BIT_FIELD (field)
8934 && modifier == EXPAND_STACK_PARM)
8935 target = 0;
8936 op0 = expand_expr (value, target, tmode, modifier);
8937 if (DECL_BIT_FIELD (field))
8939 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
8940 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
8942 if (TYPE_UNSIGNED (TREE_TYPE (field)))
8944 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
8945 op0 = expand_and (imode, op0, op1, target);
8947 else
8949 tree count
8950 = build_int_cst (NULL_TREE,
8951 GET_MODE_BITSIZE (imode) - bitsize);
8953 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
8954 target, 0);
8955 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
8956 target, 0);
8960 return op0;
8963 goto normal_inner_ref;
8965 case BIT_FIELD_REF:
8966 case ARRAY_RANGE_REF:
8967 normal_inner_ref:
8969 enum machine_mode mode1, mode2;
8970 HOST_WIDE_INT bitsize, bitpos;
8971 tree offset;
8972 int volatilep = 0, must_force_mem;
8973 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8974 &mode1, &unsignedp, &volatilep, true);
8975 rtx orig_op0, memloc;
8977 /* If we got back the original object, something is wrong. Perhaps
8978 we are evaluating an expression too early. In any event, don't
8979 infinitely recurse. */
8980 gcc_assert (tem != exp);
8982 /* If TEM's type is a union of variable size, pass TARGET to the inner
8983 computation, since it will need a temporary and TARGET is known
8984 to have to do. This occurs in unchecked conversion in Ada. */
8985 orig_op0 = op0
8986 = expand_expr (tem,
8987 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
8988 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
8989 != INTEGER_CST)
8990 && modifier != EXPAND_STACK_PARM
8991 ? target : NULL_RTX),
8992 VOIDmode,
8993 (modifier == EXPAND_INITIALIZER
8994 || modifier == EXPAND_CONST_ADDRESS
8995 || modifier == EXPAND_STACK_PARM)
8996 ? modifier : EXPAND_NORMAL);
8999 /* If the bitfield is volatile, we want to access it in the
9000 field's mode, not the computed mode. */
9001 if (volatilep
9002 && GET_CODE (op0) == MEM
9003 && flag_strict_volatile_bitfields > 0)
9004 op0 = adjust_address (op0, mode1, 0);
9006 mode2
9007 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
9009 /* If we have either an offset, a BLKmode result, or a reference
9010 outside the underlying object, we must force it to memory.
9011 Such a case can occur in Ada if we have unchecked conversion
9012 of an expression from a scalar type to an aggregate type or
9013 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
9014 passed a partially uninitialized object or a view-conversion
9015 to a larger size. */
9016 must_force_mem = (offset
9017 || mode1 == BLKmode
9018 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
9020 /* Handle CONCAT first. */
9021 if (GET_CODE (op0) == CONCAT && !must_force_mem)
9023 if (bitpos == 0
9024 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
9025 return op0;
9026 if (bitpos == 0
9027 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9028 && bitsize)
9030 op0 = XEXP (op0, 0);
9031 mode2 = GET_MODE (op0);
9033 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9034 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
9035 && bitpos
9036 && bitsize)
9038 op0 = XEXP (op0, 1);
9039 bitpos = 0;
9040 mode2 = GET_MODE (op0);
9042 else
9043 /* Otherwise force into memory. */
9044 must_force_mem = 1;
9047 /* If this is a constant, put it in a register if it is a legitimate
9048 constant and we don't need a memory reference. */
9049 if (CONSTANT_P (op0)
9050 && mode2 != BLKmode
9051 && LEGITIMATE_CONSTANT_P (op0)
9052 && !must_force_mem)
9053 op0 = force_reg (mode2, op0);
9055 /* Otherwise, if this is a constant, try to force it to the constant
9056 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
9057 is a legitimate constant. */
9058 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
9059 op0 = validize_mem (memloc);
9061 /* Otherwise, if this is a constant or the object is not in memory
9062 and need be, put it there. */
9063 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
9065 tree nt = build_qualified_type (TREE_TYPE (tem),
9066 (TYPE_QUALS (TREE_TYPE (tem))
9067 | TYPE_QUAL_CONST));
9068 memloc = assign_temp (nt, 1, 1, 1);
9069 emit_move_insn (memloc, op0);
9070 op0 = memloc;
9073 if (offset)
9075 enum machine_mode address_mode;
9076 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
9077 EXPAND_SUM);
9079 gcc_assert (MEM_P (op0));
9081 address_mode
9082 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (op0));
9083 if (GET_MODE (offset_rtx) != address_mode)
9084 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
9086 if (GET_MODE (op0) == BLKmode
9087 /* A constant address in OP0 can have VOIDmode, we must
9088 not try to call force_reg in that case. */
9089 && GET_MODE (XEXP (op0, 0)) != VOIDmode
9090 && bitsize != 0
9091 && (bitpos % bitsize) == 0
9092 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
9093 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
9095 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9096 bitpos = 0;
9099 op0 = offset_address (op0, offset_rtx,
9100 highest_pow2_factor (offset));
9103 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
9104 record its alignment as BIGGEST_ALIGNMENT. */
9105 if (MEM_P (op0) && bitpos == 0 && offset != 0
9106 && is_aligning_offset (offset, tem))
9107 set_mem_align (op0, BIGGEST_ALIGNMENT);
9109 /* Don't forget about volatility even if this is a bitfield. */
9110 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
9112 if (op0 == orig_op0)
9113 op0 = copy_rtx (op0);
9115 MEM_VOLATILE_P (op0) = 1;
9118 /* In cases where an aligned union has an unaligned object
9119 as a field, we might be extracting a BLKmode value from
9120 an integer-mode (e.g., SImode) object. Handle this case
9121 by doing the extract into an object as wide as the field
9122 (which we know to be the width of a basic mode), then
9123 storing into memory, and changing the mode to BLKmode. */
9124 if (mode1 == VOIDmode
9125 || REG_P (op0) || GET_CODE (op0) == SUBREG
9126 || (mode1 != BLKmode && ! direct_load[(int) mode1]
9127 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
9128 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
9129 && modifier != EXPAND_CONST_ADDRESS
9130 && modifier != EXPAND_INITIALIZER)
9131 /* If the field is volatile, we always want an aligned
9132 access. */
9133 || (volatilep && flag_strict_volatile_bitfields > 0)
9134 /* If the field isn't aligned enough to fetch as a memref,
9135 fetch it as a bit field. */
9136 || (mode1 != BLKmode
9137 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
9138 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
9139 || (MEM_P (op0)
9140 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
9141 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
9142 && ((modifier == EXPAND_CONST_ADDRESS
9143 || modifier == EXPAND_INITIALIZER)
9144 ? STRICT_ALIGNMENT
9145 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
9146 || (bitpos % BITS_PER_UNIT != 0)))
9147 /* If the type and the field are a constant size and the
9148 size of the type isn't the same size as the bitfield,
9149 we must use bitfield operations. */
9150 || (bitsize >= 0
9151 && TYPE_SIZE (TREE_TYPE (exp))
9152 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9153 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
9154 bitsize)))
9156 enum machine_mode ext_mode = mode;
9158 if (ext_mode == BLKmode
9159 && ! (target != 0 && MEM_P (op0)
9160 && MEM_P (target)
9161 && bitpos % BITS_PER_UNIT == 0))
9162 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9164 if (ext_mode == BLKmode)
9166 if (target == 0)
9167 target = assign_temp (type, 0, 1, 1);
9169 if (bitsize == 0)
9170 return target;
9172 /* In this case, BITPOS must start at a byte boundary and
9173 TARGET, if specified, must be a MEM. */
9174 gcc_assert (MEM_P (op0)
9175 && (!target || MEM_P (target))
9176 && !(bitpos % BITS_PER_UNIT));
9178 emit_block_move (target,
9179 adjust_address (op0, VOIDmode,
9180 bitpos / BITS_PER_UNIT),
9181 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
9182 / BITS_PER_UNIT),
9183 (modifier == EXPAND_STACK_PARM
9184 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9186 return target;
9189 op0 = validize_mem (op0);
9191 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
9192 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9194 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
9195 (modifier == EXPAND_STACK_PARM
9196 ? NULL_RTX : target),
9197 ext_mode, ext_mode);
9199 /* If the result is a record type and BITSIZE is narrower than
9200 the mode of OP0, an integral mode, and this is a big endian
9201 machine, we must put the field into the high-order bits. */
9202 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9203 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9204 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
9205 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9206 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
9207 - bitsize),
9208 op0, 1);
9210 /* If the result type is BLKmode, store the data into a temporary
9211 of the appropriate type, but with the mode corresponding to the
9212 mode for the data we have (op0's mode). It's tempting to make
9213 this a constant type, since we know it's only being stored once,
9214 but that can cause problems if we are taking the address of this
9215 COMPONENT_REF because the MEM of any reference via that address
9216 will have flags corresponding to the type, which will not
9217 necessarily be constant. */
9218 if (mode == BLKmode)
9220 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
9221 rtx new_rtx;
9223 /* If the reference doesn't use the alias set of its type,
9224 we cannot create the temporary using that type. */
9225 if (component_uses_parent_alias_set (exp))
9227 new_rtx = assign_stack_local (ext_mode, size, 0);
9228 set_mem_alias_set (new_rtx, get_alias_set (exp));
9230 else
9231 new_rtx = assign_stack_temp_for_type (ext_mode, size, 0, type);
9233 emit_move_insn (new_rtx, op0);
9234 op0 = copy_rtx (new_rtx);
9235 PUT_MODE (op0, BLKmode);
9236 set_mem_attributes (op0, exp, 1);
9239 return op0;
9242 /* If the result is BLKmode, use that to access the object
9243 now as well. */
9244 if (mode == BLKmode)
9245 mode1 = BLKmode;
9247 /* Get a reference to just this component. */
9248 if (modifier == EXPAND_CONST_ADDRESS
9249 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9250 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
9251 else
9252 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9254 if (op0 == orig_op0)
9255 op0 = copy_rtx (op0);
9257 set_mem_attributes (op0, exp, 0);
9258 if (REG_P (XEXP (op0, 0)))
9259 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9261 MEM_VOLATILE_P (op0) |= volatilep;
9262 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
9263 || modifier == EXPAND_CONST_ADDRESS
9264 || modifier == EXPAND_INITIALIZER)
9265 return op0;
9266 else if (target == 0)
9267 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9269 convert_move (target, op0, unsignedp);
9270 return target;
9273 case OBJ_TYPE_REF:
9274 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
9276 case CALL_EXPR:
9277 /* All valid uses of __builtin_va_arg_pack () are removed during
9278 inlining. */
9279 if (CALL_EXPR_VA_ARG_PACK (exp))
9280 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
9282 tree fndecl = get_callee_fndecl (exp), attr;
9284 if (fndecl
9285 && (attr = lookup_attribute ("error",
9286 DECL_ATTRIBUTES (fndecl))) != NULL)
9287 error ("%Kcall to %qs declared with attribute error: %s",
9288 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9289 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9290 if (fndecl
9291 && (attr = lookup_attribute ("warning",
9292 DECL_ATTRIBUTES (fndecl))) != NULL)
9293 warning_at (tree_nonartificial_location (exp),
9294 0, "%Kcall to %qs declared with attribute warning: %s",
9295 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9296 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9298 /* Check for a built-in function. */
9299 if (fndecl && DECL_BUILT_IN (fndecl))
9301 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
9302 return expand_builtin (exp, target, subtarget, tmode, ignore);
9305 return expand_call (exp, target, ignore);
9307 case VIEW_CONVERT_EXPR:
9308 op0 = NULL_RTX;
9310 /* If we are converting to BLKmode, try to avoid an intermediate
9311 temporary by fetching an inner memory reference. */
9312 if (mode == BLKmode
9313 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9314 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
9315 && handled_component_p (treeop0))
9317 enum machine_mode mode1;
9318 HOST_WIDE_INT bitsize, bitpos;
9319 tree offset;
9320 int unsignedp;
9321 int volatilep = 0;
9322 tree tem
9323 = get_inner_reference (treeop0, &bitsize, &bitpos,
9324 &offset, &mode1, &unsignedp, &volatilep,
9325 true);
9326 rtx orig_op0;
9328 /* ??? We should work harder and deal with non-zero offsets. */
9329 if (!offset
9330 && (bitpos % BITS_PER_UNIT) == 0
9331 && bitsize >= 0
9332 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) == 0)
9334 /* See the normal_inner_ref case for the rationale. */
9335 orig_op0
9336 = expand_expr (tem,
9337 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9338 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9339 != INTEGER_CST)
9340 && modifier != EXPAND_STACK_PARM
9341 ? target : NULL_RTX),
9342 VOIDmode,
9343 (modifier == EXPAND_INITIALIZER
9344 || modifier == EXPAND_CONST_ADDRESS
9345 || modifier == EXPAND_STACK_PARM)
9346 ? modifier : EXPAND_NORMAL);
9348 if (MEM_P (orig_op0))
9350 op0 = orig_op0;
9352 /* Get a reference to just this component. */
9353 if (modifier == EXPAND_CONST_ADDRESS
9354 || modifier == EXPAND_SUM
9355 || modifier == EXPAND_INITIALIZER)
9356 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
9357 else
9358 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
9360 if (op0 == orig_op0)
9361 op0 = copy_rtx (op0);
9363 set_mem_attributes (op0, treeop0, 0);
9364 if (REG_P (XEXP (op0, 0)))
9365 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9367 MEM_VOLATILE_P (op0) |= volatilep;
9372 if (!op0)
9373 op0 = expand_expr (treeop0,
9374 NULL_RTX, VOIDmode, modifier);
9376 /* If the input and output modes are both the same, we are done. */
9377 if (mode == GET_MODE (op0))
9379 /* If neither mode is BLKmode, and both modes are the same size
9380 then we can use gen_lowpart. */
9381 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
9382 && GET_MODE_SIZE (mode) == GET_MODE_SIZE (GET_MODE (op0))
9383 && !COMPLEX_MODE_P (GET_MODE (op0)))
9385 if (GET_CODE (op0) == SUBREG)
9386 op0 = force_reg (GET_MODE (op0), op0);
9387 op0 = gen_lowpart (mode, op0);
9389 /* If both types are integral, convert from one mode to the other. */
9390 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
9391 op0 = convert_modes (mode, GET_MODE (op0), op0,
9392 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9393 /* As a last resort, spill op0 to memory, and reload it in a
9394 different mode. */
9395 else if (!MEM_P (op0))
9397 /* If the operand is not a MEM, force it into memory. Since we
9398 are going to be changing the mode of the MEM, don't call
9399 force_const_mem for constants because we don't allow pool
9400 constants to change mode. */
9401 tree inner_type = TREE_TYPE (treeop0);
9403 gcc_assert (!TREE_ADDRESSABLE (exp));
9405 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
9406 target
9407 = assign_stack_temp_for_type
9408 (TYPE_MODE (inner_type),
9409 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
9411 emit_move_insn (target, op0);
9412 op0 = target;
9415 /* At this point, OP0 is in the correct mode. If the output type is
9416 such that the operand is known to be aligned, indicate that it is.
9417 Otherwise, we need only be concerned about alignment for non-BLKmode
9418 results. */
9419 if (MEM_P (op0))
9421 op0 = copy_rtx (op0);
9423 if (TYPE_ALIGN_OK (type))
9424 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
9425 else if (STRICT_ALIGNMENT
9426 && mode != BLKmode
9427 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
9429 tree inner_type = TREE_TYPE (treeop0);
9430 HOST_WIDE_INT temp_size
9431 = MAX (int_size_in_bytes (inner_type),
9432 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
9433 rtx new_rtx
9434 = assign_stack_temp_for_type (mode, temp_size, 0, type);
9435 rtx new_with_op0_mode
9436 = adjust_address (new_rtx, GET_MODE (op0), 0);
9438 gcc_assert (!TREE_ADDRESSABLE (exp));
9440 if (GET_MODE (op0) == BLKmode)
9441 emit_block_move (new_with_op0_mode, op0,
9442 GEN_INT (GET_MODE_SIZE (mode)),
9443 (modifier == EXPAND_STACK_PARM
9444 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9445 else
9446 emit_move_insn (new_with_op0_mode, op0);
9448 op0 = new_rtx;
9451 op0 = adjust_address (op0, mode, 0);
9454 return op0;
9456 /* Use a compare and a jump for BLKmode comparisons, or for function
9457 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
9459 /* Although TRUTH_{AND,OR}IF_EXPR aren't present in GIMPLE, they
9460 are occassionally created by folding during expansion. */
9461 case TRUTH_ANDIF_EXPR:
9462 case TRUTH_ORIF_EXPR:
9463 if (! ignore
9464 && (target == 0
9465 || modifier == EXPAND_STACK_PARM
9466 || ! safe_from_p (target, treeop0, 1)
9467 || ! safe_from_p (target, treeop1, 1)
9468 /* Make sure we don't have a hard reg (such as function's return
9469 value) live across basic blocks, if not optimizing. */
9470 || (!optimize && REG_P (target)
9471 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9472 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9474 if (target)
9475 emit_move_insn (target, const0_rtx);
9477 op1 = gen_label_rtx ();
9478 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
9480 if (target)
9481 emit_move_insn (target, const1_rtx);
9483 emit_label (op1);
9484 return ignore ? const0_rtx : target;
9486 case STATEMENT_LIST:
9488 tree_stmt_iterator iter;
9490 gcc_assert (ignore);
9492 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
9493 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
9495 return const0_rtx;
9497 case COND_EXPR:
9498 /* A COND_EXPR with its type being VOID_TYPE represents a
9499 conditional jump and is handled in
9500 expand_gimple_cond_expr. */
9501 gcc_assert (!VOID_TYPE_P (type));
9503 /* Note that COND_EXPRs whose type is a structure or union
9504 are required to be constructed to contain assignments of
9505 a temporary variable, so that we can evaluate them here
9506 for side effect only. If type is void, we must do likewise. */
9508 gcc_assert (!TREE_ADDRESSABLE (type)
9509 && !ignore
9510 && TREE_TYPE (treeop1) != void_type_node
9511 && TREE_TYPE (treeop2) != void_type_node);
9513 /* If we are not to produce a result, we have no target. Otherwise,
9514 if a target was specified use it; it will not be used as an
9515 intermediate target unless it is safe. If no target, use a
9516 temporary. */
9518 if (modifier != EXPAND_STACK_PARM
9519 && original_target
9520 && safe_from_p (original_target, treeop0, 1)
9521 && GET_MODE (original_target) == mode
9522 #ifdef HAVE_conditional_move
9523 && (! can_conditionally_move_p (mode)
9524 || REG_P (original_target))
9525 #endif
9526 && !MEM_P (original_target))
9527 temp = original_target;
9528 else
9529 temp = assign_temp (type, 0, 0, 1);
9531 do_pending_stack_adjust ();
9532 NO_DEFER_POP;
9533 op0 = gen_label_rtx ();
9534 op1 = gen_label_rtx ();
9535 jumpifnot (treeop0, op0, -1);
9536 store_expr (treeop1, temp,
9537 modifier == EXPAND_STACK_PARM,
9538 false);
9540 emit_jump_insn (gen_jump (op1));
9541 emit_barrier ();
9542 emit_label (op0);
9543 store_expr (treeop2, temp,
9544 modifier == EXPAND_STACK_PARM,
9545 false);
9547 emit_label (op1);
9548 OK_DEFER_POP;
9549 return temp;
9551 case VEC_COND_EXPR:
9552 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9553 return target;
9555 case MODIFY_EXPR:
9557 tree lhs = treeop0;
9558 tree rhs = treeop1;
9559 gcc_assert (ignore);
9561 /* Check for |= or &= of a bitfield of size one into another bitfield
9562 of size 1. In this case, (unless we need the result of the
9563 assignment) we can do this more efficiently with a
9564 test followed by an assignment, if necessary.
9566 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9567 things change so we do, this code should be enhanced to
9568 support it. */
9569 if (TREE_CODE (lhs) == COMPONENT_REF
9570 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9571 || TREE_CODE (rhs) == BIT_AND_EXPR)
9572 && TREE_OPERAND (rhs, 0) == lhs
9573 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9574 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9575 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9577 rtx label = gen_label_rtx ();
9578 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
9579 do_jump (TREE_OPERAND (rhs, 1),
9580 value ? label : 0,
9581 value ? 0 : label, -1);
9582 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
9583 MOVE_NONTEMPORAL (exp));
9584 do_pending_stack_adjust ();
9585 emit_label (label);
9586 return const0_rtx;
9589 expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
9590 return const0_rtx;
9593 case ADDR_EXPR:
9594 return expand_expr_addr_expr (exp, target, tmode, modifier);
9596 case REALPART_EXPR:
9597 op0 = expand_normal (treeop0);
9598 return read_complex_part (op0, false);
9600 case IMAGPART_EXPR:
9601 op0 = expand_normal (treeop0);
9602 return read_complex_part (op0, true);
9604 case RETURN_EXPR:
9605 case LABEL_EXPR:
9606 case GOTO_EXPR:
9607 case SWITCH_EXPR:
9608 case ASM_EXPR:
9609 /* Expanded in cfgexpand.c. */
9610 gcc_unreachable ();
9612 case TRY_CATCH_EXPR:
9613 case CATCH_EXPR:
9614 case EH_FILTER_EXPR:
9615 case TRY_FINALLY_EXPR:
9616 /* Lowered by tree-eh.c. */
9617 gcc_unreachable ();
9619 case WITH_CLEANUP_EXPR:
9620 case CLEANUP_POINT_EXPR:
9621 case TARGET_EXPR:
9622 case CASE_LABEL_EXPR:
9623 case VA_ARG_EXPR:
9624 case BIND_EXPR:
9625 case INIT_EXPR:
9626 case CONJ_EXPR:
9627 case COMPOUND_EXPR:
9628 case PREINCREMENT_EXPR:
9629 case PREDECREMENT_EXPR:
9630 case POSTINCREMENT_EXPR:
9631 case POSTDECREMENT_EXPR:
9632 case LOOP_EXPR:
9633 case EXIT_EXPR:
9634 /* Lowered by gimplify.c. */
9635 gcc_unreachable ();
9637 case FDESC_EXPR:
9638 /* Function descriptors are not valid except for as
9639 initialization constants, and should not be expanded. */
9640 gcc_unreachable ();
9642 case WITH_SIZE_EXPR:
9643 /* WITH_SIZE_EXPR expands to its first argument. The caller should
9644 have pulled out the size to use in whatever context it needed. */
9645 return expand_expr_real (treeop0, original_target, tmode,
9646 modifier, alt_rtl);
9648 case REALIGN_LOAD_EXPR:
9650 tree oprnd0 = treeop0;
9651 tree oprnd1 = treeop1;
9652 tree oprnd2 = treeop2;
9653 rtx op2;
9655 this_optab = optab_for_tree_code (code, type, optab_default);
9656 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9657 op2 = expand_normal (oprnd2);
9658 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9659 target, unsignedp);
9660 gcc_assert (temp);
9661 return temp;
9664 case DOT_PROD_EXPR:
9666 tree oprnd0 = treeop0;
9667 tree oprnd1 = treeop1;
9668 tree oprnd2 = treeop2;
9669 rtx op2;
9671 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9672 op2 = expand_normal (oprnd2);
9673 target = expand_widen_pattern_expr (&ops, op0, op1, op2,
9674 target, unsignedp);
9675 return target;
9678 case COMPOUND_LITERAL_EXPR:
9680 /* Initialize the anonymous variable declared in the compound
9681 literal, then return the variable. */
9682 tree decl = COMPOUND_LITERAL_EXPR_DECL (exp);
9684 /* Create RTL for this variable. */
9685 if (!DECL_RTL_SET_P (decl))
9687 if (DECL_HARD_REGISTER (decl))
9688 /* The user specified an assembler name for this variable.
9689 Set that up now. */
9690 rest_of_decl_compilation (decl, 0, 0);
9691 else
9692 expand_decl (decl);
9695 return expand_expr_real (decl, original_target, tmode,
9696 modifier, alt_rtl);
9699 default:
9700 return expand_expr_real_2 (&ops, target, tmode, modifier);
9704 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9705 signedness of TYPE), possibly returning the result in TARGET. */
9706 static rtx
9707 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
9709 HOST_WIDE_INT prec = TYPE_PRECISION (type);
9710 if (target && GET_MODE (target) != GET_MODE (exp))
9711 target = 0;
9712 /* For constant values, reduce using build_int_cst_type. */
9713 if (CONST_INT_P (exp))
9715 HOST_WIDE_INT value = INTVAL (exp);
9716 tree t = build_int_cst_type (type, value);
9717 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
9719 else if (TYPE_UNSIGNED (type))
9721 rtx mask = immed_double_int_const (double_int_mask (prec),
9722 GET_MODE (exp));
9723 return expand_and (GET_MODE (exp), exp, mask, target);
9725 else
9727 tree count = build_int_cst (NULL_TREE,
9728 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
9729 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9730 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9734 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9735 when applied to the address of EXP produces an address known to be
9736 aligned more than BIGGEST_ALIGNMENT. */
9738 static int
9739 is_aligning_offset (const_tree offset, const_tree exp)
9741 /* Strip off any conversions. */
9742 while (CONVERT_EXPR_P (offset))
9743 offset = TREE_OPERAND (offset, 0);
9745 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9746 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9747 if (TREE_CODE (offset) != BIT_AND_EXPR
9748 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9749 || compare_tree_int (TREE_OPERAND (offset, 1),
9750 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9751 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9752 return 0;
9754 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9755 It must be NEGATE_EXPR. Then strip any more conversions. */
9756 offset = TREE_OPERAND (offset, 0);
9757 while (CONVERT_EXPR_P (offset))
9758 offset = TREE_OPERAND (offset, 0);
9760 if (TREE_CODE (offset) != NEGATE_EXPR)
9761 return 0;
9763 offset = TREE_OPERAND (offset, 0);
9764 while (CONVERT_EXPR_P (offset))
9765 offset = TREE_OPERAND (offset, 0);
9767 /* This must now be the address of EXP. */
9768 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9771 /* Return the tree node if an ARG corresponds to a string constant or zero
9772 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9773 in bytes within the string that ARG is accessing. The type of the
9774 offset will be `sizetype'. */
9776 tree
9777 string_constant (tree arg, tree *ptr_offset)
9779 tree array, offset, lower_bound;
9780 STRIP_NOPS (arg);
9782 if (TREE_CODE (arg) == ADDR_EXPR)
9784 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9786 *ptr_offset = size_zero_node;
9787 return TREE_OPERAND (arg, 0);
9789 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9791 array = TREE_OPERAND (arg, 0);
9792 offset = size_zero_node;
9794 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9796 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9797 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9798 if (TREE_CODE (array) != STRING_CST
9799 && TREE_CODE (array) != VAR_DECL)
9800 return 0;
9802 /* Check if the array has a nonzero lower bound. */
9803 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9804 if (!integer_zerop (lower_bound))
9806 /* If the offset and base aren't both constants, return 0. */
9807 if (TREE_CODE (lower_bound) != INTEGER_CST)
9808 return 0;
9809 if (TREE_CODE (offset) != INTEGER_CST)
9810 return 0;
9811 /* Adjust offset by the lower bound. */
9812 offset = size_diffop (fold_convert (sizetype, offset),
9813 fold_convert (sizetype, lower_bound));
9816 else
9817 return 0;
9819 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
9821 tree arg0 = TREE_OPERAND (arg, 0);
9822 tree arg1 = TREE_OPERAND (arg, 1);
9824 STRIP_NOPS (arg0);
9825 STRIP_NOPS (arg1);
9827 if (TREE_CODE (arg0) == ADDR_EXPR
9828 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9829 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9831 array = TREE_OPERAND (arg0, 0);
9832 offset = arg1;
9834 else if (TREE_CODE (arg1) == ADDR_EXPR
9835 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9836 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9838 array = TREE_OPERAND (arg1, 0);
9839 offset = arg0;
9841 else
9842 return 0;
9844 else
9845 return 0;
9847 if (TREE_CODE (array) == STRING_CST)
9849 *ptr_offset = fold_convert (sizetype, offset);
9850 return array;
9852 else if (TREE_CODE (array) == VAR_DECL)
9854 int length;
9856 /* Variables initialized to string literals can be handled too. */
9857 if (DECL_INITIAL (array) == NULL_TREE
9858 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9859 return 0;
9861 /* If they are read-only, non-volatile and bind locally. */
9862 if (! TREE_READONLY (array)
9863 || TREE_SIDE_EFFECTS (array)
9864 || ! targetm.binds_local_p (array))
9865 return 0;
9867 /* Avoid const char foo[4] = "abcde"; */
9868 if (DECL_SIZE_UNIT (array) == NULL_TREE
9869 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9870 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9871 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9872 return 0;
9874 /* If variable is bigger than the string literal, OFFSET must be constant
9875 and inside of the bounds of the string literal. */
9876 offset = fold_convert (sizetype, offset);
9877 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9878 && (! host_integerp (offset, 1)
9879 || compare_tree_int (offset, length) >= 0))
9880 return 0;
9882 *ptr_offset = offset;
9883 return DECL_INITIAL (array);
9886 return 0;
9889 /* Generate code to calculate OPS, and exploded expression
9890 using a store-flag instruction and return an rtx for the result.
9891 OPS reflects a comparison.
9893 If TARGET is nonzero, store the result there if convenient.
9895 Return zero if there is no suitable set-flag instruction
9896 available on this machine.
9898 Once expand_expr has been called on the arguments of the comparison,
9899 we are committed to doing the store flag, since it is not safe to
9900 re-evaluate the expression. We emit the store-flag insn by calling
9901 emit_store_flag, but only expand the arguments if we have a reason
9902 to believe that emit_store_flag will be successful. If we think that
9903 it will, but it isn't, we have to simulate the store-flag with a
9904 set/jump/set sequence. */
9906 static rtx
9907 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
9909 enum rtx_code code;
9910 tree arg0, arg1, type;
9911 tree tem;
9912 enum machine_mode operand_mode;
9913 int unsignedp;
9914 rtx op0, op1;
9915 rtx subtarget = target;
9916 location_t loc = ops->location;
9918 arg0 = ops->op0;
9919 arg1 = ops->op1;
9921 /* Don't crash if the comparison was erroneous. */
9922 if (arg0 == error_mark_node || arg1 == error_mark_node)
9923 return const0_rtx;
9925 type = TREE_TYPE (arg0);
9926 operand_mode = TYPE_MODE (type);
9927 unsignedp = TYPE_UNSIGNED (type);
9929 /* We won't bother with BLKmode store-flag operations because it would mean
9930 passing a lot of information to emit_store_flag. */
9931 if (operand_mode == BLKmode)
9932 return 0;
9934 /* We won't bother with store-flag operations involving function pointers
9935 when function pointers must be canonicalized before comparisons. */
9936 #ifdef HAVE_canonicalize_funcptr_for_compare
9937 if (HAVE_canonicalize_funcptr_for_compare
9938 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
9939 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
9940 == FUNCTION_TYPE))
9941 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
9942 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
9943 == FUNCTION_TYPE))))
9944 return 0;
9945 #endif
9947 STRIP_NOPS (arg0);
9948 STRIP_NOPS (arg1);
9950 /* Get the rtx comparison code to use. We know that EXP is a comparison
9951 operation of some type. Some comparisons against 1 and -1 can be
9952 converted to comparisons with zero. Do so here so that the tests
9953 below will be aware that we have a comparison with zero. These
9954 tests will not catch constants in the first operand, but constants
9955 are rarely passed as the first operand. */
9957 switch (ops->code)
9959 case EQ_EXPR:
9960 code = EQ;
9961 break;
9962 case NE_EXPR:
9963 code = NE;
9964 break;
9965 case LT_EXPR:
9966 if (integer_onep (arg1))
9967 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9968 else
9969 code = unsignedp ? LTU : LT;
9970 break;
9971 case LE_EXPR:
9972 if (! unsignedp && integer_all_onesp (arg1))
9973 arg1 = integer_zero_node, code = LT;
9974 else
9975 code = unsignedp ? LEU : LE;
9976 break;
9977 case GT_EXPR:
9978 if (! unsignedp && integer_all_onesp (arg1))
9979 arg1 = integer_zero_node, code = GE;
9980 else
9981 code = unsignedp ? GTU : GT;
9982 break;
9983 case GE_EXPR:
9984 if (integer_onep (arg1))
9985 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9986 else
9987 code = unsignedp ? GEU : GE;
9988 break;
9990 case UNORDERED_EXPR:
9991 code = UNORDERED;
9992 break;
9993 case ORDERED_EXPR:
9994 code = ORDERED;
9995 break;
9996 case UNLT_EXPR:
9997 code = UNLT;
9998 break;
9999 case UNLE_EXPR:
10000 code = UNLE;
10001 break;
10002 case UNGT_EXPR:
10003 code = UNGT;
10004 break;
10005 case UNGE_EXPR:
10006 code = UNGE;
10007 break;
10008 case UNEQ_EXPR:
10009 code = UNEQ;
10010 break;
10011 case LTGT_EXPR:
10012 code = LTGT;
10013 break;
10015 default:
10016 gcc_unreachable ();
10019 /* Put a constant second. */
10020 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
10021 || TREE_CODE (arg0) == FIXED_CST)
10023 tem = arg0; arg0 = arg1; arg1 = tem;
10024 code = swap_condition (code);
10027 /* If this is an equality or inequality test of a single bit, we can
10028 do this by shifting the bit being tested to the low-order bit and
10029 masking the result with the constant 1. If the condition was EQ,
10030 we xor it with 1. This does not require an scc insn and is faster
10031 than an scc insn even if we have it.
10033 The code to make this transformation was moved into fold_single_bit_test,
10034 so we just call into the folder and expand its result. */
10036 if ((code == NE || code == EQ)
10037 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10038 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10040 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
10041 return expand_expr (fold_single_bit_test (loc,
10042 code == NE ? NE_EXPR : EQ_EXPR,
10043 arg0, arg1, type),
10044 target, VOIDmode, EXPAND_NORMAL);
10047 if (! get_subtarget (target)
10048 || GET_MODE (subtarget) != operand_mode)
10049 subtarget = 0;
10051 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
10053 if (target == 0)
10054 target = gen_reg_rtx (mode);
10056 /* Try a cstore if possible. */
10057 return emit_store_flag_force (target, code, op0, op1,
10058 operand_mode, unsignedp, 1);
10062 /* Stubs in case we haven't got a casesi insn. */
10063 #ifndef HAVE_casesi
10064 # define HAVE_casesi 0
10065 # define gen_casesi(a, b, c, d, e) (0)
10066 # define CODE_FOR_casesi CODE_FOR_nothing
10067 #endif
10069 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10070 0 otherwise (i.e. if there is no casesi instruction). */
10072 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10073 rtx table_label ATTRIBUTE_UNUSED, rtx default_label,
10074 rtx fallback_label ATTRIBUTE_UNUSED)
10076 enum machine_mode index_mode = SImode;
10077 int index_bits = GET_MODE_BITSIZE (index_mode);
10078 rtx op1, op2, index;
10079 enum machine_mode op_mode;
10081 if (! HAVE_casesi)
10082 return 0;
10084 /* Convert the index to SImode. */
10085 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10087 enum machine_mode omode = TYPE_MODE (index_type);
10088 rtx rangertx = expand_normal (range);
10090 /* We must handle the endpoints in the original mode. */
10091 index_expr = build2 (MINUS_EXPR, index_type,
10092 index_expr, minval);
10093 minval = integer_zero_node;
10094 index = expand_normal (index_expr);
10095 if (default_label)
10096 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10097 omode, 1, default_label);
10098 /* Now we can safely truncate. */
10099 index = convert_to_mode (index_mode, index, 0);
10101 else
10103 if (TYPE_MODE (index_type) != index_mode)
10105 index_type = lang_hooks.types.type_for_size (index_bits, 0);
10106 index_expr = fold_convert (index_type, index_expr);
10109 index = expand_normal (index_expr);
10112 do_pending_stack_adjust ();
10114 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10115 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10116 (index, op_mode))
10117 index = copy_to_mode_reg (op_mode, index);
10119 op1 = expand_normal (minval);
10121 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10122 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10123 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
10124 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10125 (op1, op_mode))
10126 op1 = copy_to_mode_reg (op_mode, op1);
10128 op2 = expand_normal (range);
10130 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10131 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10132 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
10133 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10134 (op2, op_mode))
10135 op2 = copy_to_mode_reg (op_mode, op2);
10137 emit_jump_insn (gen_casesi (index, op1, op2,
10138 table_label, !default_label
10139 ? fallback_label : default_label));
10140 return 1;
10143 /* Attempt to generate a tablejump instruction; same concept. */
10144 #ifndef HAVE_tablejump
10145 #define HAVE_tablejump 0
10146 #define gen_tablejump(x, y) (0)
10147 #endif
10149 /* Subroutine of the next function.
10151 INDEX is the value being switched on, with the lowest value
10152 in the table already subtracted.
10153 MODE is its expected mode (needed if INDEX is constant).
10154 RANGE is the length of the jump table.
10155 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10157 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10158 index value is out of range. */
10160 static void
10161 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10162 rtx default_label)
10164 rtx temp, vector;
10166 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
10167 cfun->cfg->max_jumptable_ents = INTVAL (range);
10169 /* Do an unsigned comparison (in the proper mode) between the index
10170 expression and the value which represents the length of the range.
10171 Since we just finished subtracting the lower bound of the range
10172 from the index expression, this comparison allows us to simultaneously
10173 check that the original index expression value is both greater than
10174 or equal to the minimum value of the range and less than or equal to
10175 the maximum value of the range. */
10177 if (default_label)
10178 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10179 default_label);
10181 /* If index is in range, it must fit in Pmode.
10182 Convert to Pmode so we can index with it. */
10183 if (mode != Pmode)
10184 index = convert_to_mode (Pmode, index, 1);
10186 /* Don't let a MEM slip through, because then INDEX that comes
10187 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10188 and break_out_memory_refs will go to work on it and mess it up. */
10189 #ifdef PIC_CASE_VECTOR_ADDRESS
10190 if (flag_pic && !REG_P (index))
10191 index = copy_to_mode_reg (Pmode, index);
10192 #endif
10194 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10195 GET_MODE_SIZE, because this indicates how large insns are. The other
10196 uses should all be Pmode, because they are addresses. This code
10197 could fail if addresses and insns are not the same size. */
10198 index = gen_rtx_PLUS (Pmode,
10199 gen_rtx_MULT (Pmode, index,
10200 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10201 gen_rtx_LABEL_REF (Pmode, table_label));
10202 #ifdef PIC_CASE_VECTOR_ADDRESS
10203 if (flag_pic)
10204 index = PIC_CASE_VECTOR_ADDRESS (index);
10205 else
10206 #endif
10207 index = memory_address (CASE_VECTOR_MODE, index);
10208 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10209 vector = gen_const_mem (CASE_VECTOR_MODE, index);
10210 convert_move (temp, vector, 0);
10212 emit_jump_insn (gen_tablejump (temp, table_label));
10214 /* If we are generating PIC code or if the table is PC-relative, the
10215 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10216 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10217 emit_barrier ();
10221 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10222 rtx table_label, rtx default_label)
10224 rtx index;
10226 if (! HAVE_tablejump)
10227 return 0;
10229 index_expr = fold_build2 (MINUS_EXPR, index_type,
10230 fold_convert (index_type, index_expr),
10231 fold_convert (index_type, minval));
10232 index = expand_normal (index_expr);
10233 do_pending_stack_adjust ();
10235 do_tablejump (index, TYPE_MODE (index_type),
10236 convert_modes (TYPE_MODE (index_type),
10237 TYPE_MODE (TREE_TYPE (range)),
10238 expand_normal (range),
10239 TYPE_UNSIGNED (TREE_TYPE (range))),
10240 table_label, default_label);
10241 return 1;
10244 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10245 static rtx
10246 const_vector_from_tree (tree exp)
10248 rtvec v;
10249 int units, i;
10250 tree link, elt;
10251 enum machine_mode inner, mode;
10253 mode = TYPE_MODE (TREE_TYPE (exp));
10255 if (initializer_zerop (exp))
10256 return CONST0_RTX (mode);
10258 units = GET_MODE_NUNITS (mode);
10259 inner = GET_MODE_INNER (mode);
10261 v = rtvec_alloc (units);
10263 link = TREE_VECTOR_CST_ELTS (exp);
10264 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10266 elt = TREE_VALUE (link);
10268 if (TREE_CODE (elt) == REAL_CST)
10269 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10270 inner);
10271 else if (TREE_CODE (elt) == FIXED_CST)
10272 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
10273 inner);
10274 else
10275 RTVEC_ELT (v, i) = immed_double_int_const (tree_to_double_int (elt),
10276 inner);
10279 /* Initialize remaining elements to 0. */
10280 for (; i < units; ++i)
10281 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10283 return gen_rtx_CONST_VECTOR (mode, v);
10287 /* Build a decl for a EH personality function named NAME. */
10289 tree
10290 build_personality_function (const char *name)
10292 tree decl, type;
10294 type = build_function_type_list (integer_type_node, integer_type_node,
10295 long_long_unsigned_type_node,
10296 ptr_type_node, ptr_type_node, NULL_TREE);
10297 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
10298 get_identifier (name), type);
10299 DECL_ARTIFICIAL (decl) = 1;
10300 DECL_EXTERNAL (decl) = 1;
10301 TREE_PUBLIC (decl) = 1;
10303 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
10304 are the flags assigned by targetm.encode_section_info. */
10305 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
10307 return decl;
10310 /* Extracts the personality function of DECL and returns the corresponding
10311 libfunc. */
10314 get_personality_function (tree decl)
10316 tree personality = DECL_FUNCTION_PERSONALITY (decl);
10317 enum eh_personality_kind pk;
10319 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
10320 if (pk == eh_personality_none)
10321 return NULL;
10323 if (!personality
10324 && pk == eh_personality_any)
10325 personality = lang_hooks.eh_personality ();
10327 if (pk == eh_personality_lang)
10328 gcc_assert (personality != NULL_TREE);
10330 return XEXP (DECL_RTL (personality), 0);
10333 #include "gt-expr.h"