* de.po: Update.
[official-gcc.git] / gcc / expr.c
blob7a83b7e317f75656c2230d6799e95335e658639f
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
53 #include "target.h"
54 #include "timevar.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
62 #ifdef PUSH_ROUNDING
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #endif
68 #endif
70 #endif
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
75 #else
76 #define STACK_PUSH_CODE PRE_INC
77 #endif
78 #endif
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87 int cse_not_expected;
89 /* This structure is used by move_by_pieces to describe the move to
90 be performed. */
91 struct move_by_pieces
93 rtx to;
94 rtx to_addr;
95 int autinc_to;
96 int explicit_inc_to;
97 rtx from;
98 rtx from_addr;
99 int autinc_from;
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
103 int reverse;
106 /* This structure is used by store_by_pieces to describe the clear to
107 be performed. */
109 struct store_by_pieces
111 rtx to;
112 rtx to_addr;
113 int autinc_to;
114 int explicit_inc_to;
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
118 void *constfundata;
119 int reverse;
122 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
123 unsigned int,
124 unsigned int);
125 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126 struct move_by_pieces *);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
129 static rtx emit_block_move_via_libcall (rtx, rtx, rtx, bool);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static rtx clear_storage_via_libcall (rtx, rtx, bool);
138 static tree clear_storage_libcall_fn (int);
139 static rtx compress_float_constant (rtx, rtx);
140 static rtx get_subtarget (rtx);
141 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, int);
144 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
146 tree, tree, int);
148 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
150 static int is_aligning_offset (tree, tree);
151 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
152 enum expand_modifier);
153 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
154 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
155 #ifdef PUSH_ROUNDING
156 static void emit_single_push_insn (enum machine_mode, rtx, tree);
157 #endif
158 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
159 static rtx const_vector_from_tree (tree);
160 static void write_complex_part (rtx, rtx, bool);
162 /* Record for each mode whether we can move a register directly to or
163 from an object of that mode in memory. If we can't, we won't try
164 to use that mode directly when accessing a field of that mode. */
166 static char direct_load[NUM_MACHINE_MODES];
167 static char direct_store[NUM_MACHINE_MODES];
169 /* Record for each mode whether we can float-extend from memory. */
171 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
173 /* This macro is used to determine whether move_by_pieces should be called
174 to perform a structure copy. */
175 #ifndef MOVE_BY_PIECES_P
176 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
177 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
178 < (unsigned int) MOVE_RATIO)
179 #endif
181 /* This macro is used to determine whether clear_by_pieces should be
182 called to clear storage. */
183 #ifndef CLEAR_BY_PIECES_P
184 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
185 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
186 < (unsigned int) CLEAR_RATIO)
187 #endif
189 /* This macro is used to determine whether store_by_pieces should be
190 called to "memset" storage with byte values other than zero, or
191 to "memcpy" storage when the source is a constant string. */
192 #ifndef STORE_BY_PIECES_P
193 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
194 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
195 < (unsigned int) MOVE_RATIO)
196 #endif
198 /* This array records the insn_code of insns to perform block moves. */
199 enum insn_code movmem_optab[NUM_MACHINE_MODES];
201 /* This array records the insn_code of insns to perform block sets. */
202 enum insn_code setmem_optab[NUM_MACHINE_MODES];
204 /* These arrays record the insn_code of three different kinds of insns
205 to perform block compares. */
206 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
207 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
208 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
210 /* Synchronization primitives. */
211 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
212 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
213 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
214 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
215 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
216 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
217 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
218 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
230 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
231 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
232 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
234 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
236 #ifndef SLOW_UNALIGNED_ACCESS
237 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
238 #endif
240 /* This is run once per compilation to set up which modes can be used
241 directly in memory and to initialize the block move optab. */
243 void
244 init_expr_once (void)
246 rtx insn, pat;
247 enum machine_mode mode;
248 int num_clobbers;
249 rtx mem, mem1;
250 rtx reg;
252 /* Try indexing by frame ptr and try by stack ptr.
253 It is known that on the Convex the stack ptr isn't a valid index.
254 With luck, one or the other is valid on any machine. */
255 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
256 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
258 /* A scratch register we can modify in-place below to avoid
259 useless RTL allocations. */
260 reg = gen_rtx_REG (VOIDmode, -1);
262 insn = rtx_alloc (INSN);
263 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
264 PATTERN (insn) = pat;
266 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
267 mode = (enum machine_mode) ((int) mode + 1))
269 int regno;
271 direct_load[(int) mode] = direct_store[(int) mode] = 0;
272 PUT_MODE (mem, mode);
273 PUT_MODE (mem1, mode);
274 PUT_MODE (reg, mode);
276 /* See if there is some register that can be used in this mode and
277 directly loaded or stored from memory. */
279 if (mode != VOIDmode && mode != BLKmode)
280 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
281 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
282 regno++)
284 if (! HARD_REGNO_MODE_OK (regno, mode))
285 continue;
287 REGNO (reg) = regno;
289 SET_SRC (pat) = mem;
290 SET_DEST (pat) = reg;
291 if (recog (pat, insn, &num_clobbers) >= 0)
292 direct_load[(int) mode] = 1;
294 SET_SRC (pat) = mem1;
295 SET_DEST (pat) = reg;
296 if (recog (pat, insn, &num_clobbers) >= 0)
297 direct_load[(int) mode] = 1;
299 SET_SRC (pat) = reg;
300 SET_DEST (pat) = mem;
301 if (recog (pat, insn, &num_clobbers) >= 0)
302 direct_store[(int) mode] = 1;
304 SET_SRC (pat) = reg;
305 SET_DEST (pat) = mem1;
306 if (recog (pat, insn, &num_clobbers) >= 0)
307 direct_store[(int) mode] = 1;
311 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
313 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
314 mode = GET_MODE_WIDER_MODE (mode))
316 enum machine_mode srcmode;
317 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
318 srcmode = GET_MODE_WIDER_MODE (srcmode))
320 enum insn_code ic;
322 ic = can_extend_p (mode, srcmode, 0);
323 if (ic == CODE_FOR_nothing)
324 continue;
326 PUT_MODE (mem, srcmode);
328 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
329 float_extend_from_mem[mode][srcmode] = true;
334 /* This is run at the start of compiling a function. */
336 void
337 init_expr (void)
339 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
342 /* Copy data from FROM to TO, where the machine modes are not the same.
343 Both modes may be integer, or both may be floating.
344 UNSIGNEDP should be nonzero if FROM is an unsigned type.
345 This causes zero-extension instead of sign-extension. */
347 void
348 convert_move (rtx to, rtx from, int unsignedp)
350 enum machine_mode to_mode = GET_MODE (to);
351 enum machine_mode from_mode = GET_MODE (from);
352 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
353 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
354 enum insn_code code;
355 rtx libcall;
357 /* rtx code for making an equivalent value. */
358 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
359 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
362 gcc_assert (to_real == from_real);
364 /* If the source and destination are already the same, then there's
365 nothing to do. */
366 if (to == from)
367 return;
369 /* If FROM is a SUBREG that indicates that we have already done at least
370 the required extension, strip it. We don't handle such SUBREGs as
371 TO here. */
373 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
374 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
375 >= GET_MODE_SIZE (to_mode))
376 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
377 from = gen_lowpart (to_mode, from), from_mode = to_mode;
379 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
381 if (to_mode == from_mode
382 || (from_mode == VOIDmode && CONSTANT_P (from)))
384 emit_move_insn (to, from);
385 return;
388 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
390 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
392 if (VECTOR_MODE_P (to_mode))
393 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
394 else
395 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
397 emit_move_insn (to, from);
398 return;
401 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
403 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
404 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
405 return;
408 if (to_real)
410 rtx value, insns;
411 convert_optab tab;
413 gcc_assert ((GET_MODE_PRECISION (from_mode)
414 != GET_MODE_PRECISION (to_mode))
415 || (DECIMAL_FLOAT_MODE_P (from_mode)
416 != DECIMAL_FLOAT_MODE_P (to_mode)));
418 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
419 /* Conversion between decimal float and binary float, same size. */
420 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
421 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
422 tab = sext_optab;
423 else
424 tab = trunc_optab;
426 /* Try converting directly if the insn is supported. */
428 code = tab->handlers[to_mode][from_mode].insn_code;
429 if (code != CODE_FOR_nothing)
431 emit_unop_insn (code, to, from,
432 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
433 return;
436 /* Otherwise use a libcall. */
437 libcall = tab->handlers[to_mode][from_mode].libfunc;
439 /* Is this conversion implemented yet? */
440 gcc_assert (libcall);
442 start_sequence ();
443 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
444 1, from, from_mode);
445 insns = get_insns ();
446 end_sequence ();
447 emit_libcall_block (insns, to, value,
448 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
449 from)
450 : gen_rtx_FLOAT_EXTEND (to_mode, from));
451 return;
454 /* Handle pointer conversion. */ /* SPEE 900220. */
455 /* Targets are expected to provide conversion insns between PxImode and
456 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
457 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
459 enum machine_mode full_mode
460 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
462 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
463 != CODE_FOR_nothing);
465 if (full_mode != from_mode)
466 from = convert_to_mode (full_mode, from, unsignedp);
467 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
468 to, from, UNKNOWN);
469 return;
471 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
473 rtx new_from;
474 enum machine_mode full_mode
475 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
477 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
478 != CODE_FOR_nothing);
480 if (to_mode == full_mode)
482 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
483 to, from, UNKNOWN);
484 return;
487 new_from = gen_reg_rtx (full_mode);
488 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
489 new_from, from, UNKNOWN);
491 /* else proceed to integer conversions below. */
492 from_mode = full_mode;
493 from = new_from;
496 /* Now both modes are integers. */
498 /* Handle expanding beyond a word. */
499 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
500 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
502 rtx insns;
503 rtx lowpart;
504 rtx fill_value;
505 rtx lowfrom;
506 int i;
507 enum machine_mode lowpart_mode;
508 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
510 /* Try converting directly if the insn is supported. */
511 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
512 != CODE_FOR_nothing)
514 /* If FROM is a SUBREG, put it into a register. Do this
515 so that we always generate the same set of insns for
516 better cse'ing; if an intermediate assignment occurred,
517 we won't be doing the operation directly on the SUBREG. */
518 if (optimize > 0 && GET_CODE (from) == SUBREG)
519 from = force_reg (from_mode, from);
520 emit_unop_insn (code, to, from, equiv_code);
521 return;
523 /* Next, try converting via full word. */
524 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
525 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
526 != CODE_FOR_nothing))
528 if (REG_P (to))
530 if (reg_overlap_mentioned_p (to, from))
531 from = force_reg (from_mode, from);
532 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
534 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
535 emit_unop_insn (code, to,
536 gen_lowpart (word_mode, to), equiv_code);
537 return;
540 /* No special multiword conversion insn; do it by hand. */
541 start_sequence ();
543 /* Since we will turn this into a no conflict block, we must ensure
544 that the source does not overlap the target. */
546 if (reg_overlap_mentioned_p (to, from))
547 from = force_reg (from_mode, from);
549 /* Get a copy of FROM widened to a word, if necessary. */
550 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
551 lowpart_mode = word_mode;
552 else
553 lowpart_mode = from_mode;
555 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
557 lowpart = gen_lowpart (lowpart_mode, to);
558 emit_move_insn (lowpart, lowfrom);
560 /* Compute the value to put in each remaining word. */
561 if (unsignedp)
562 fill_value = const0_rtx;
563 else
565 #ifdef HAVE_slt
566 if (HAVE_slt
567 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
568 && STORE_FLAG_VALUE == -1)
570 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
571 lowpart_mode, 0);
572 fill_value = gen_reg_rtx (word_mode);
573 emit_insn (gen_slt (fill_value));
575 else
576 #endif
578 fill_value
579 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
580 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
581 NULL_RTX, 0);
582 fill_value = convert_to_mode (word_mode, fill_value, 1);
586 /* Fill the remaining words. */
587 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
589 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
590 rtx subword = operand_subword (to, index, 1, to_mode);
592 gcc_assert (subword);
594 if (fill_value != subword)
595 emit_move_insn (subword, fill_value);
598 insns = get_insns ();
599 end_sequence ();
601 emit_no_conflict_block (insns, to, from, NULL_RTX,
602 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
603 return;
606 /* Truncating multi-word to a word or less. */
607 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
608 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
610 if (!((MEM_P (from)
611 && ! MEM_VOLATILE_P (from)
612 && direct_load[(int) to_mode]
613 && ! mode_dependent_address_p (XEXP (from, 0)))
614 || REG_P (from)
615 || GET_CODE (from) == SUBREG))
616 from = force_reg (from_mode, from);
617 convert_move (to, gen_lowpart (word_mode, from), 0);
618 return;
621 /* Now follow all the conversions between integers
622 no more than a word long. */
624 /* For truncation, usually we can just refer to FROM in a narrower mode. */
625 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
626 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
627 GET_MODE_BITSIZE (from_mode)))
629 if (!((MEM_P (from)
630 && ! MEM_VOLATILE_P (from)
631 && direct_load[(int) to_mode]
632 && ! mode_dependent_address_p (XEXP (from, 0)))
633 || REG_P (from)
634 || GET_CODE (from) == SUBREG))
635 from = force_reg (from_mode, from);
636 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
637 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
638 from = copy_to_reg (from);
639 emit_move_insn (to, gen_lowpart (to_mode, from));
640 return;
643 /* Handle extension. */
644 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
646 /* Convert directly if that works. */
647 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
648 != CODE_FOR_nothing)
650 emit_unop_insn (code, to, from, equiv_code);
651 return;
653 else
655 enum machine_mode intermediate;
656 rtx tmp;
657 tree shift_amount;
659 /* Search for a mode to convert via. */
660 for (intermediate = from_mode; intermediate != VOIDmode;
661 intermediate = GET_MODE_WIDER_MODE (intermediate))
662 if (((can_extend_p (to_mode, intermediate, unsignedp)
663 != CODE_FOR_nothing)
664 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
665 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
666 GET_MODE_BITSIZE (intermediate))))
667 && (can_extend_p (intermediate, from_mode, unsignedp)
668 != CODE_FOR_nothing))
670 convert_move (to, convert_to_mode (intermediate, from,
671 unsignedp), unsignedp);
672 return;
675 /* No suitable intermediate mode.
676 Generate what we need with shifts. */
677 shift_amount = build_int_cst (NULL_TREE,
678 GET_MODE_BITSIZE (to_mode)
679 - GET_MODE_BITSIZE (from_mode));
680 from = gen_lowpart (to_mode, force_reg (from_mode, from));
681 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
682 to, unsignedp);
683 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
684 to, unsignedp);
685 if (tmp != to)
686 emit_move_insn (to, tmp);
687 return;
691 /* Support special truncate insns for certain modes. */
692 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
694 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
695 to, from, UNKNOWN);
696 return;
699 /* Handle truncation of volatile memrefs, and so on;
700 the things that couldn't be truncated directly,
701 and for which there was no special instruction.
703 ??? Code above formerly short-circuited this, for most integer
704 mode pairs, with a force_reg in from_mode followed by a recursive
705 call to this routine. Appears always to have been wrong. */
706 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
708 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
709 emit_move_insn (to, temp);
710 return;
713 /* Mode combination is not recognized. */
714 gcc_unreachable ();
717 /* Return an rtx for a value that would result
718 from converting X to mode MODE.
719 Both X and MODE may be floating, or both integer.
720 UNSIGNEDP is nonzero if X is an unsigned value.
721 This can be done by referring to a part of X in place
722 or by copying to a new temporary with conversion. */
725 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
727 return convert_modes (mode, VOIDmode, x, unsignedp);
730 /* Return an rtx for a value that would result
731 from converting X from mode OLDMODE to mode MODE.
732 Both modes may be floating, or both integer.
733 UNSIGNEDP is nonzero if X is an unsigned value.
735 This can be done by referring to a part of X in place
736 or by copying to a new temporary with conversion.
738 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
741 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
743 rtx temp;
745 /* If FROM is a SUBREG that indicates that we have already done at least
746 the required extension, strip it. */
748 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
749 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
750 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
751 x = gen_lowpart (mode, x);
753 if (GET_MODE (x) != VOIDmode)
754 oldmode = GET_MODE (x);
756 if (mode == oldmode)
757 return x;
759 /* There is one case that we must handle specially: If we are converting
760 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
761 we are to interpret the constant as unsigned, gen_lowpart will do
762 the wrong if the constant appears negative. What we want to do is
763 make the high-order word of the constant zero, not all ones. */
765 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
766 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
767 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
769 HOST_WIDE_INT val = INTVAL (x);
771 if (oldmode != VOIDmode
772 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
774 int width = GET_MODE_BITSIZE (oldmode);
776 /* We need to zero extend VAL. */
777 val &= ((HOST_WIDE_INT) 1 << width) - 1;
780 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
783 /* We can do this with a gen_lowpart if both desired and current modes
784 are integer, and this is either a constant integer, a register, or a
785 non-volatile MEM. Except for the constant case where MODE is no
786 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
788 if ((GET_CODE (x) == CONST_INT
789 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
790 || (GET_MODE_CLASS (mode) == MODE_INT
791 && GET_MODE_CLASS (oldmode) == MODE_INT
792 && (GET_CODE (x) == CONST_DOUBLE
793 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
794 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
795 && direct_load[(int) mode])
796 || (REG_P (x)
797 && (! HARD_REGISTER_P (x)
798 || HARD_REGNO_MODE_OK (REGNO (x), mode))
799 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
800 GET_MODE_BITSIZE (GET_MODE (x)))))))))
802 /* ?? If we don't know OLDMODE, we have to assume here that
803 X does not need sign- or zero-extension. This may not be
804 the case, but it's the best we can do. */
805 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
806 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
808 HOST_WIDE_INT val = INTVAL (x);
809 int width = GET_MODE_BITSIZE (oldmode);
811 /* We must sign or zero-extend in this case. Start by
812 zero-extending, then sign extend if we need to. */
813 val &= ((HOST_WIDE_INT) 1 << width) - 1;
814 if (! unsignedp
815 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
816 val |= (HOST_WIDE_INT) (-1) << width;
818 return gen_int_mode (val, mode);
821 return gen_lowpart (mode, x);
824 /* Converting from integer constant into mode is always equivalent to an
825 subreg operation. */
826 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
828 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
829 return simplify_gen_subreg (mode, x, oldmode, 0);
832 temp = gen_reg_rtx (mode);
833 convert_move (temp, x, unsignedp);
834 return temp;
837 /* STORE_MAX_PIECES is the number of bytes at a time that we can
838 store efficiently. Due to internal GCC limitations, this is
839 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
840 for an immediate constant. */
842 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
844 /* Determine whether the LEN bytes can be moved by using several move
845 instructions. Return nonzero if a call to move_by_pieces should
846 succeed. */
849 can_move_by_pieces (unsigned HOST_WIDE_INT len,
850 unsigned int align ATTRIBUTE_UNUSED)
852 return MOVE_BY_PIECES_P (len, align);
855 /* Generate several move instructions to copy LEN bytes from block FROM to
856 block TO. (These are MEM rtx's with BLKmode).
858 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
859 used to push FROM to the stack.
861 ALIGN is maximum stack alignment we can assume.
863 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
864 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
865 stpcpy. */
868 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
869 unsigned int align, int endp)
871 struct move_by_pieces data;
872 rtx to_addr, from_addr = XEXP (from, 0);
873 unsigned int max_size = MOVE_MAX_PIECES + 1;
874 enum machine_mode mode = VOIDmode, tmode;
875 enum insn_code icode;
877 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
879 data.offset = 0;
880 data.from_addr = from_addr;
881 if (to)
883 to_addr = XEXP (to, 0);
884 data.to = to;
885 data.autinc_to
886 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
887 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
888 data.reverse
889 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
891 else
893 to_addr = NULL_RTX;
894 data.to = NULL_RTX;
895 data.autinc_to = 1;
896 #ifdef STACK_GROWS_DOWNWARD
897 data.reverse = 1;
898 #else
899 data.reverse = 0;
900 #endif
902 data.to_addr = to_addr;
903 data.from = from;
904 data.autinc_from
905 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
906 || GET_CODE (from_addr) == POST_INC
907 || GET_CODE (from_addr) == POST_DEC);
909 data.explicit_inc_from = 0;
910 data.explicit_inc_to = 0;
911 if (data.reverse) data.offset = len;
912 data.len = len;
914 /* If copying requires more than two move insns,
915 copy addresses to registers (to make displacements shorter)
916 and use post-increment if available. */
917 if (!(data.autinc_from && data.autinc_to)
918 && move_by_pieces_ninsns (len, align, max_size) > 2)
920 /* Find the mode of the largest move... */
921 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
922 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
923 if (GET_MODE_SIZE (tmode) < max_size)
924 mode = tmode;
926 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
928 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
929 data.autinc_from = 1;
930 data.explicit_inc_from = -1;
932 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
934 data.from_addr = copy_addr_to_reg (from_addr);
935 data.autinc_from = 1;
936 data.explicit_inc_from = 1;
938 if (!data.autinc_from && CONSTANT_P (from_addr))
939 data.from_addr = copy_addr_to_reg (from_addr);
940 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
942 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
943 data.autinc_to = 1;
944 data.explicit_inc_to = -1;
946 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
948 data.to_addr = copy_addr_to_reg (to_addr);
949 data.autinc_to = 1;
950 data.explicit_inc_to = 1;
952 if (!data.autinc_to && CONSTANT_P (to_addr))
953 data.to_addr = copy_addr_to_reg (to_addr);
956 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
957 if (align >= GET_MODE_ALIGNMENT (tmode))
958 align = GET_MODE_ALIGNMENT (tmode);
959 else
961 enum machine_mode xmode;
963 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
964 tmode != VOIDmode;
965 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
966 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
967 || SLOW_UNALIGNED_ACCESS (tmode, align))
968 break;
970 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
973 /* First move what we can in the largest integer mode, then go to
974 successively smaller modes. */
976 while (max_size > 1)
978 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
979 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
980 if (GET_MODE_SIZE (tmode) < max_size)
981 mode = tmode;
983 if (mode == VOIDmode)
984 break;
986 icode = mov_optab->handlers[(int) mode].insn_code;
987 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
988 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
990 max_size = GET_MODE_SIZE (mode);
993 /* The code above should have handled everything. */
994 gcc_assert (!data.len);
996 if (endp)
998 rtx to1;
1000 gcc_assert (!data.reverse);
1001 if (data.autinc_to)
1003 if (endp == 2)
1005 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1006 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1007 else
1008 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1009 -1));
1011 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1012 data.offset);
1014 else
1016 if (endp == 2)
1017 --data.offset;
1018 to1 = adjust_address (data.to, QImode, data.offset);
1020 return to1;
1022 else
1023 return data.to;
1026 /* Return number of insns required to move L bytes by pieces.
1027 ALIGN (in bits) is maximum alignment we can assume. */
1029 static unsigned HOST_WIDE_INT
1030 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1031 unsigned int max_size)
1033 unsigned HOST_WIDE_INT n_insns = 0;
1034 enum machine_mode tmode;
1036 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1037 if (align >= GET_MODE_ALIGNMENT (tmode))
1038 align = GET_MODE_ALIGNMENT (tmode);
1039 else
1041 enum machine_mode tmode, xmode;
1043 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1044 tmode != VOIDmode;
1045 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1046 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1047 || SLOW_UNALIGNED_ACCESS (tmode, align))
1048 break;
1050 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1053 while (max_size > 1)
1055 enum machine_mode mode = VOIDmode;
1056 enum insn_code icode;
1058 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1059 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1060 if (GET_MODE_SIZE (tmode) < max_size)
1061 mode = tmode;
1063 if (mode == VOIDmode)
1064 break;
1066 icode = mov_optab->handlers[(int) mode].insn_code;
1067 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1068 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1070 max_size = GET_MODE_SIZE (mode);
1073 gcc_assert (!l);
1074 return n_insns;
1077 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1078 with move instructions for mode MODE. GENFUN is the gen_... function
1079 to make a move insn for that mode. DATA has all the other info. */
1081 static void
1082 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1083 struct move_by_pieces *data)
1085 unsigned int size = GET_MODE_SIZE (mode);
1086 rtx to1 = NULL_RTX, from1;
1088 while (data->len >= size)
1090 if (data->reverse)
1091 data->offset -= size;
1093 if (data->to)
1095 if (data->autinc_to)
1096 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1097 data->offset);
1098 else
1099 to1 = adjust_address (data->to, mode, data->offset);
1102 if (data->autinc_from)
1103 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1104 data->offset);
1105 else
1106 from1 = adjust_address (data->from, mode, data->offset);
1108 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1109 emit_insn (gen_add2_insn (data->to_addr,
1110 GEN_INT (-(HOST_WIDE_INT)size)));
1111 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1112 emit_insn (gen_add2_insn (data->from_addr,
1113 GEN_INT (-(HOST_WIDE_INT)size)));
1115 if (data->to)
1116 emit_insn ((*genfun) (to1, from1));
1117 else
1119 #ifdef PUSH_ROUNDING
1120 emit_single_push_insn (mode, from1, NULL);
1121 #else
1122 gcc_unreachable ();
1123 #endif
1126 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1127 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1128 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1129 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1131 if (! data->reverse)
1132 data->offset += size;
1134 data->len -= size;
1138 /* Emit code to move a block Y to a block X. This may be done with
1139 string-move instructions, with multiple scalar move instructions,
1140 or with a library call.
1142 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1143 SIZE is an rtx that says how long they are.
1144 ALIGN is the maximum alignment we can assume they have.
1145 METHOD describes what kind of copy this is, and what mechanisms may be used.
1147 Return the address of the new block, if memcpy is called and returns it,
1148 0 otherwise. */
1151 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1153 bool may_use_call;
1154 rtx retval = 0;
1155 unsigned int align;
1157 switch (method)
1159 case BLOCK_OP_NORMAL:
1160 case BLOCK_OP_TAILCALL:
1161 may_use_call = true;
1162 break;
1164 case BLOCK_OP_CALL_PARM:
1165 may_use_call = block_move_libcall_safe_for_call_parm ();
1167 /* Make inhibit_defer_pop nonzero around the library call
1168 to force it to pop the arguments right away. */
1169 NO_DEFER_POP;
1170 break;
1172 case BLOCK_OP_NO_LIBCALL:
1173 may_use_call = false;
1174 break;
1176 default:
1177 gcc_unreachable ();
1180 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1182 gcc_assert (MEM_P (x));
1183 gcc_assert (MEM_P (y));
1184 gcc_assert (size);
1186 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1187 block copy is more efficient for other large modes, e.g. DCmode. */
1188 x = adjust_address (x, BLKmode, 0);
1189 y = adjust_address (y, BLKmode, 0);
1191 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1192 can be incorrect is coming from __builtin_memcpy. */
1193 if (GET_CODE (size) == CONST_INT)
1195 if (INTVAL (size) == 0)
1196 return 0;
1198 x = shallow_copy_rtx (x);
1199 y = shallow_copy_rtx (y);
1200 set_mem_size (x, size);
1201 set_mem_size (y, size);
1204 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1205 move_by_pieces (x, y, INTVAL (size), align, 0);
1206 else if (emit_block_move_via_movmem (x, y, size, align))
1208 else if (may_use_call)
1209 retval = emit_block_move_via_libcall (x, y, size,
1210 method == BLOCK_OP_TAILCALL);
1211 else
1212 emit_block_move_via_loop (x, y, size, align);
1214 if (method == BLOCK_OP_CALL_PARM)
1215 OK_DEFER_POP;
1217 return retval;
1220 /* A subroutine of emit_block_move. Returns true if calling the
1221 block move libcall will not clobber any parameters which may have
1222 already been placed on the stack. */
1224 static bool
1225 block_move_libcall_safe_for_call_parm (void)
1227 /* If arguments are pushed on the stack, then they're safe. */
1228 if (PUSH_ARGS)
1229 return true;
1231 /* If registers go on the stack anyway, any argument is sure to clobber
1232 an outgoing argument. */
1233 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1235 tree fn = emit_block_move_libcall_fn (false);
1236 (void) fn;
1237 if (REG_PARM_STACK_SPACE (fn) != 0)
1238 return false;
1240 #endif
1242 /* If any argument goes in memory, then it might clobber an outgoing
1243 argument. */
1245 CUMULATIVE_ARGS args_so_far;
1246 tree fn, arg;
1248 fn = emit_block_move_libcall_fn (false);
1249 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1251 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1252 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1254 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1255 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1256 if (!tmp || !REG_P (tmp))
1257 return false;
1258 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1259 return false;
1260 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1263 return true;
1266 /* A subroutine of emit_block_move. Expand a movmem pattern;
1267 return true if successful. */
1269 static bool
1270 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1272 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1273 int save_volatile_ok = volatile_ok;
1274 enum machine_mode mode;
1276 /* Since this is a move insn, we don't care about volatility. */
1277 volatile_ok = 1;
1279 /* Try the most limited insn first, because there's no point
1280 including more than one in the machine description unless
1281 the more limited one has some advantage. */
1283 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1284 mode = GET_MODE_WIDER_MODE (mode))
1286 enum insn_code code = movmem_optab[(int) mode];
1287 insn_operand_predicate_fn pred;
1289 if (code != CODE_FOR_nothing
1290 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1291 here because if SIZE is less than the mode mask, as it is
1292 returned by the macro, it will definitely be less than the
1293 actual mode mask. */
1294 && ((GET_CODE (size) == CONST_INT
1295 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1296 <= (GET_MODE_MASK (mode) >> 1)))
1297 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1298 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1299 || (*pred) (x, BLKmode))
1300 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1301 || (*pred) (y, BLKmode))
1302 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1303 || (*pred) (opalign, VOIDmode)))
1305 rtx op2;
1306 rtx last = get_last_insn ();
1307 rtx pat;
1309 op2 = convert_to_mode (mode, size, 1);
1310 pred = insn_data[(int) code].operand[2].predicate;
1311 if (pred != 0 && ! (*pred) (op2, mode))
1312 op2 = copy_to_mode_reg (mode, op2);
1314 /* ??? When called via emit_block_move_for_call, it'd be
1315 nice if there were some way to inform the backend, so
1316 that it doesn't fail the expansion because it thinks
1317 emitting the libcall would be more efficient. */
1319 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1320 if (pat)
1322 emit_insn (pat);
1323 volatile_ok = save_volatile_ok;
1324 return true;
1326 else
1327 delete_insns_since (last);
1331 volatile_ok = save_volatile_ok;
1332 return false;
1335 /* A subroutine of emit_block_move. Expand a call to memcpy.
1336 Return the return value from memcpy, 0 otherwise. */
1338 static rtx
1339 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1341 rtx dst_addr, src_addr;
1342 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1343 enum machine_mode size_mode;
1344 rtx retval;
1346 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1347 pseudos. We can then place those new pseudos into a VAR_DECL and
1348 use them later. */
1350 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1351 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1353 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1354 src_addr = convert_memory_address (ptr_mode, src_addr);
1356 dst_tree = make_tree (ptr_type_node, dst_addr);
1357 src_tree = make_tree (ptr_type_node, src_addr);
1359 size_mode = TYPE_MODE (sizetype);
1361 size = convert_to_mode (size_mode, size, 1);
1362 size = copy_to_mode_reg (size_mode, size);
1364 /* It is incorrect to use the libcall calling conventions to call
1365 memcpy in this context. This could be a user call to memcpy and
1366 the user may wish to examine the return value from memcpy. For
1367 targets where libcalls and normal calls have different conventions
1368 for returning pointers, we could end up generating incorrect code. */
1370 size_tree = make_tree (sizetype, size);
1372 fn = emit_block_move_libcall_fn (true);
1373 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1374 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1375 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1377 /* Now we have to build up the CALL_EXPR itself. */
1378 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1379 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1380 call_expr, arg_list, NULL_TREE);
1381 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1383 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1385 return retval;
1388 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1389 for the function we use for block copies. The first time FOR_CALL
1390 is true, we call assemble_external. */
1392 static GTY(()) tree block_move_fn;
1394 void
1395 init_block_move_fn (const char *asmspec)
1397 if (!block_move_fn)
1399 tree args, fn;
1401 fn = get_identifier ("memcpy");
1402 args = build_function_type_list (ptr_type_node, ptr_type_node,
1403 const_ptr_type_node, sizetype,
1404 NULL_TREE);
1406 fn = build_decl (FUNCTION_DECL, fn, args);
1407 DECL_EXTERNAL (fn) = 1;
1408 TREE_PUBLIC (fn) = 1;
1409 DECL_ARTIFICIAL (fn) = 1;
1410 TREE_NOTHROW (fn) = 1;
1412 block_move_fn = fn;
1415 if (asmspec)
1416 set_user_assembler_name (block_move_fn, asmspec);
1419 static tree
1420 emit_block_move_libcall_fn (int for_call)
1422 static bool emitted_extern;
1424 if (!block_move_fn)
1425 init_block_move_fn (NULL);
1427 if (for_call && !emitted_extern)
1429 emitted_extern = true;
1430 make_decl_rtl (block_move_fn);
1431 assemble_external (block_move_fn);
1434 return block_move_fn;
1437 /* A subroutine of emit_block_move. Copy the data via an explicit
1438 loop. This is used only when libcalls are forbidden. */
1439 /* ??? It'd be nice to copy in hunks larger than QImode. */
1441 static void
1442 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1443 unsigned int align ATTRIBUTE_UNUSED)
1445 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1446 enum machine_mode iter_mode;
1448 iter_mode = GET_MODE (size);
1449 if (iter_mode == VOIDmode)
1450 iter_mode = word_mode;
1452 top_label = gen_label_rtx ();
1453 cmp_label = gen_label_rtx ();
1454 iter = gen_reg_rtx (iter_mode);
1456 emit_move_insn (iter, const0_rtx);
1458 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1459 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1460 do_pending_stack_adjust ();
1462 emit_jump (cmp_label);
1463 emit_label (top_label);
1465 tmp = convert_modes (Pmode, iter_mode, iter, true);
1466 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1467 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1468 x = change_address (x, QImode, x_addr);
1469 y = change_address (y, QImode, y_addr);
1471 emit_move_insn (x, y);
1473 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1474 true, OPTAB_LIB_WIDEN);
1475 if (tmp != iter)
1476 emit_move_insn (iter, tmp);
1478 emit_label (cmp_label);
1480 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1481 true, top_label);
1484 /* Copy all or part of a value X into registers starting at REGNO.
1485 The number of registers to be filled is NREGS. */
1487 void
1488 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1490 int i;
1491 #ifdef HAVE_load_multiple
1492 rtx pat;
1493 rtx last;
1494 #endif
1496 if (nregs == 0)
1497 return;
1499 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1500 x = validize_mem (force_const_mem (mode, x));
1502 /* See if the machine can do this with a load multiple insn. */
1503 #ifdef HAVE_load_multiple
1504 if (HAVE_load_multiple)
1506 last = get_last_insn ();
1507 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1508 GEN_INT (nregs));
1509 if (pat)
1511 emit_insn (pat);
1512 return;
1514 else
1515 delete_insns_since (last);
1517 #endif
1519 for (i = 0; i < nregs; i++)
1520 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1521 operand_subword_force (x, i, mode));
1524 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1525 The number of registers to be filled is NREGS. */
1527 void
1528 move_block_from_reg (int regno, rtx x, int nregs)
1530 int i;
1532 if (nregs == 0)
1533 return;
1535 /* See if the machine can do this with a store multiple insn. */
1536 #ifdef HAVE_store_multiple
1537 if (HAVE_store_multiple)
1539 rtx last = get_last_insn ();
1540 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1541 GEN_INT (nregs));
1542 if (pat)
1544 emit_insn (pat);
1545 return;
1547 else
1548 delete_insns_since (last);
1550 #endif
1552 for (i = 0; i < nregs; i++)
1554 rtx tem = operand_subword (x, i, 1, BLKmode);
1556 gcc_assert (tem);
1558 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1562 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1563 ORIG, where ORIG is a non-consecutive group of registers represented by
1564 a PARALLEL. The clone is identical to the original except in that the
1565 original set of registers is replaced by a new set of pseudo registers.
1566 The new set has the same modes as the original set. */
1569 gen_group_rtx (rtx orig)
1571 int i, length;
1572 rtx *tmps;
1574 gcc_assert (GET_CODE (orig) == PARALLEL);
1576 length = XVECLEN (orig, 0);
1577 tmps = alloca (sizeof (rtx) * length);
1579 /* Skip a NULL entry in first slot. */
1580 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1582 if (i)
1583 tmps[0] = 0;
1585 for (; i < length; i++)
1587 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1588 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1590 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1593 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1596 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1597 except that values are placed in TMPS[i], and must later be moved
1598 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1600 static void
1601 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1603 rtx src;
1604 int start, i;
1605 enum machine_mode m = GET_MODE (orig_src);
1607 gcc_assert (GET_CODE (dst) == PARALLEL);
1609 if (m != VOIDmode
1610 && !SCALAR_INT_MODE_P (m)
1611 && !MEM_P (orig_src)
1612 && GET_CODE (orig_src) != CONCAT)
1614 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1615 if (imode == BLKmode)
1616 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1617 else
1618 src = gen_reg_rtx (imode);
1619 if (imode != BLKmode)
1620 src = gen_lowpart (GET_MODE (orig_src), src);
1621 emit_move_insn (src, orig_src);
1622 /* ...and back again. */
1623 if (imode != BLKmode)
1624 src = gen_lowpart (imode, src);
1625 emit_group_load_1 (tmps, dst, src, type, ssize);
1626 return;
1629 /* Check for a NULL entry, used to indicate that the parameter goes
1630 both on the stack and in registers. */
1631 if (XEXP (XVECEXP (dst, 0, 0), 0))
1632 start = 0;
1633 else
1634 start = 1;
1636 /* Process the pieces. */
1637 for (i = start; i < XVECLEN (dst, 0); i++)
1639 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1640 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1641 unsigned int bytelen = GET_MODE_SIZE (mode);
1642 int shift = 0;
1644 /* Handle trailing fragments that run over the size of the struct. */
1645 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1647 /* Arrange to shift the fragment to where it belongs.
1648 extract_bit_field loads to the lsb of the reg. */
1649 if (
1650 #ifdef BLOCK_REG_PADDING
1651 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1652 == (BYTES_BIG_ENDIAN ? upward : downward)
1653 #else
1654 BYTES_BIG_ENDIAN
1655 #endif
1657 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1658 bytelen = ssize - bytepos;
1659 gcc_assert (bytelen > 0);
1662 /* If we won't be loading directly from memory, protect the real source
1663 from strange tricks we might play; but make sure that the source can
1664 be loaded directly into the destination. */
1665 src = orig_src;
1666 if (!MEM_P (orig_src)
1667 && (!CONSTANT_P (orig_src)
1668 || (GET_MODE (orig_src) != mode
1669 && GET_MODE (orig_src) != VOIDmode)))
1671 if (GET_MODE (orig_src) == VOIDmode)
1672 src = gen_reg_rtx (mode);
1673 else
1674 src = gen_reg_rtx (GET_MODE (orig_src));
1676 emit_move_insn (src, orig_src);
1679 /* Optimize the access just a bit. */
1680 if (MEM_P (src)
1681 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1682 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1683 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1684 && bytelen == GET_MODE_SIZE (mode))
1686 tmps[i] = gen_reg_rtx (mode);
1687 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1689 else if (COMPLEX_MODE_P (mode)
1690 && GET_MODE (src) == mode
1691 && bytelen == GET_MODE_SIZE (mode))
1692 /* Let emit_move_complex do the bulk of the work. */
1693 tmps[i] = src;
1694 else if (GET_CODE (src) == CONCAT)
1696 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1697 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1699 if ((bytepos == 0 && bytelen == slen0)
1700 || (bytepos != 0 && bytepos + bytelen <= slen))
1702 /* The following assumes that the concatenated objects all
1703 have the same size. In this case, a simple calculation
1704 can be used to determine the object and the bit field
1705 to be extracted. */
1706 tmps[i] = XEXP (src, bytepos / slen0);
1707 if (! CONSTANT_P (tmps[i])
1708 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1709 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1710 (bytepos % slen0) * BITS_PER_UNIT,
1711 1, NULL_RTX, mode, mode);
1713 else
1715 rtx mem;
1717 gcc_assert (!bytepos);
1718 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1719 emit_move_insn (mem, src);
1720 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1721 0, 1, NULL_RTX, mode, mode);
1724 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1725 SIMD register, which is currently broken. While we get GCC
1726 to emit proper RTL for these cases, let's dump to memory. */
1727 else if (VECTOR_MODE_P (GET_MODE (dst))
1728 && REG_P (src))
1730 int slen = GET_MODE_SIZE (GET_MODE (src));
1731 rtx mem;
1733 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1734 emit_move_insn (mem, src);
1735 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1737 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1738 && XVECLEN (dst, 0) > 1)
1739 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1740 else if (CONSTANT_P (src)
1741 || (REG_P (src) && GET_MODE (src) == mode))
1742 tmps[i] = src;
1743 else
1744 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1745 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1746 mode, mode);
1748 if (shift)
1749 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1750 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1754 /* Emit code to move a block SRC of type TYPE to a block DST,
1755 where DST is non-consecutive registers represented by a PARALLEL.
1756 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1757 if not known. */
1759 void
1760 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1762 rtx *tmps;
1763 int i;
1765 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1766 emit_group_load_1 (tmps, dst, src, type, ssize);
1768 /* Copy the extracted pieces into the proper (probable) hard regs. */
1769 for (i = 0; i < XVECLEN (dst, 0); i++)
1771 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1772 if (d == NULL)
1773 continue;
1774 emit_move_insn (d, tmps[i]);
1778 /* Similar, but load SRC into new pseudos in a format that looks like
1779 PARALLEL. This can later be fed to emit_group_move to get things
1780 in the right place. */
1783 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1785 rtvec vec;
1786 int i;
1788 vec = rtvec_alloc (XVECLEN (parallel, 0));
1789 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1791 /* Convert the vector to look just like the original PARALLEL, except
1792 with the computed values. */
1793 for (i = 0; i < XVECLEN (parallel, 0); i++)
1795 rtx e = XVECEXP (parallel, 0, i);
1796 rtx d = XEXP (e, 0);
1798 if (d)
1800 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1801 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1803 RTVEC_ELT (vec, i) = e;
1806 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1809 /* Emit code to move a block SRC to block DST, where SRC and DST are
1810 non-consecutive groups of registers, each represented by a PARALLEL. */
1812 void
1813 emit_group_move (rtx dst, rtx src)
1815 int i;
1817 gcc_assert (GET_CODE (src) == PARALLEL
1818 && GET_CODE (dst) == PARALLEL
1819 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1821 /* Skip first entry if NULL. */
1822 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1823 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1824 XEXP (XVECEXP (src, 0, i), 0));
1827 /* Move a group of registers represented by a PARALLEL into pseudos. */
1830 emit_group_move_into_temps (rtx src)
1832 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1833 int i;
1835 for (i = 0; i < XVECLEN (src, 0); i++)
1837 rtx e = XVECEXP (src, 0, i);
1838 rtx d = XEXP (e, 0);
1840 if (d)
1841 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1842 RTVEC_ELT (vec, i) = e;
1845 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1848 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1849 where SRC is non-consecutive registers represented by a PARALLEL.
1850 SSIZE represents the total size of block ORIG_DST, or -1 if not
1851 known. */
1853 void
1854 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1856 rtx *tmps, dst;
1857 int start, i;
1858 enum machine_mode m = GET_MODE (orig_dst);
1860 gcc_assert (GET_CODE (src) == PARALLEL);
1862 if (!SCALAR_INT_MODE_P (m)
1863 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1865 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1866 if (imode == BLKmode)
1867 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1868 else
1869 dst = gen_reg_rtx (imode);
1870 emit_group_store (dst, src, type, ssize);
1871 if (imode != BLKmode)
1872 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1873 emit_move_insn (orig_dst, dst);
1874 return;
1877 /* Check for a NULL entry, used to indicate that the parameter goes
1878 both on the stack and in registers. */
1879 if (XEXP (XVECEXP (src, 0, 0), 0))
1880 start = 0;
1881 else
1882 start = 1;
1884 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1886 /* Copy the (probable) hard regs into pseudos. */
1887 for (i = start; i < XVECLEN (src, 0); i++)
1889 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1890 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1891 emit_move_insn (tmps[i], reg);
1894 /* If we won't be storing directly into memory, protect the real destination
1895 from strange tricks we might play. */
1896 dst = orig_dst;
1897 if (GET_CODE (dst) == PARALLEL)
1899 rtx temp;
1901 /* We can get a PARALLEL dst if there is a conditional expression in
1902 a return statement. In that case, the dst and src are the same,
1903 so no action is necessary. */
1904 if (rtx_equal_p (dst, src))
1905 return;
1907 /* It is unclear if we can ever reach here, but we may as well handle
1908 it. Allocate a temporary, and split this into a store/load to/from
1909 the temporary. */
1911 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1912 emit_group_store (temp, src, type, ssize);
1913 emit_group_load (dst, temp, type, ssize);
1914 return;
1916 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1918 dst = gen_reg_rtx (GET_MODE (orig_dst));
1919 /* Make life a bit easier for combine. */
1920 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
1923 /* Process the pieces. */
1924 for (i = start; i < XVECLEN (src, 0); i++)
1926 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1927 enum machine_mode mode = GET_MODE (tmps[i]);
1928 unsigned int bytelen = GET_MODE_SIZE (mode);
1929 rtx dest = dst;
1931 /* Handle trailing fragments that run over the size of the struct. */
1932 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1934 /* store_bit_field always takes its value from the lsb.
1935 Move the fragment to the lsb if it's not already there. */
1936 if (
1937 #ifdef BLOCK_REG_PADDING
1938 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1939 == (BYTES_BIG_ENDIAN ? upward : downward)
1940 #else
1941 BYTES_BIG_ENDIAN
1942 #endif
1945 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1946 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
1947 build_int_cst (NULL_TREE, shift),
1948 tmps[i], 0);
1950 bytelen = ssize - bytepos;
1953 if (GET_CODE (dst) == CONCAT)
1955 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1956 dest = XEXP (dst, 0);
1957 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1959 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1960 dest = XEXP (dst, 1);
1962 else
1964 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
1965 dest = assign_stack_temp (GET_MODE (dest),
1966 GET_MODE_SIZE (GET_MODE (dest)), 0);
1967 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
1968 tmps[i]);
1969 dst = dest;
1970 break;
1974 /* Optimize the access just a bit. */
1975 if (MEM_P (dest)
1976 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1977 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
1978 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1979 && bytelen == GET_MODE_SIZE (mode))
1980 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
1981 else
1982 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
1983 mode, tmps[i]);
1986 /* Copy from the pseudo into the (probable) hard reg. */
1987 if (orig_dst != dst)
1988 emit_move_insn (orig_dst, dst);
1991 /* Generate code to copy a BLKmode object of TYPE out of a
1992 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1993 is null, a stack temporary is created. TGTBLK is returned.
1995 The purpose of this routine is to handle functions that return
1996 BLKmode structures in registers. Some machines (the PA for example)
1997 want to return all small structures in registers regardless of the
1998 structure's alignment. */
2001 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2003 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2004 rtx src = NULL, dst = NULL;
2005 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2006 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2008 if (tgtblk == 0)
2010 tgtblk = assign_temp (build_qualified_type (type,
2011 (TYPE_QUALS (type)
2012 | TYPE_QUAL_CONST)),
2013 0, 1, 1);
2014 preserve_temp_slots (tgtblk);
2017 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2018 into a new pseudo which is a full word. */
2020 if (GET_MODE (srcreg) != BLKmode
2021 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2022 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2024 /* If the structure doesn't take up a whole number of words, see whether
2025 SRCREG is padded on the left or on the right. If it's on the left,
2026 set PADDING_CORRECTION to the number of bits to skip.
2028 In most ABIs, the structure will be returned at the least end of
2029 the register, which translates to right padding on little-endian
2030 targets and left padding on big-endian targets. The opposite
2031 holds if the structure is returned at the most significant
2032 end of the register. */
2033 if (bytes % UNITS_PER_WORD != 0
2034 && (targetm.calls.return_in_msb (type)
2035 ? !BYTES_BIG_ENDIAN
2036 : BYTES_BIG_ENDIAN))
2037 padding_correction
2038 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2040 /* Copy the structure BITSIZE bites at a time.
2042 We could probably emit more efficient code for machines which do not use
2043 strict alignment, but it doesn't seem worth the effort at the current
2044 time. */
2045 for (bitpos = 0, xbitpos = padding_correction;
2046 bitpos < bytes * BITS_PER_UNIT;
2047 bitpos += bitsize, xbitpos += bitsize)
2049 /* We need a new source operand each time xbitpos is on a
2050 word boundary and when xbitpos == padding_correction
2051 (the first time through). */
2052 if (xbitpos % BITS_PER_WORD == 0
2053 || xbitpos == padding_correction)
2054 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2055 GET_MODE (srcreg));
2057 /* We need a new destination operand each time bitpos is on
2058 a word boundary. */
2059 if (bitpos % BITS_PER_WORD == 0)
2060 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2062 /* Use xbitpos for the source extraction (right justified) and
2063 xbitpos for the destination store (left justified). */
2064 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2065 extract_bit_field (src, bitsize,
2066 xbitpos % BITS_PER_WORD, 1,
2067 NULL_RTX, word_mode, word_mode));
2070 return tgtblk;
2073 /* Add a USE expression for REG to the (possibly empty) list pointed
2074 to by CALL_FUSAGE. REG must denote a hard register. */
2076 void
2077 use_reg (rtx *call_fusage, rtx reg)
2079 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2081 *call_fusage
2082 = gen_rtx_EXPR_LIST (VOIDmode,
2083 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2086 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2087 starting at REGNO. All of these registers must be hard registers. */
2089 void
2090 use_regs (rtx *call_fusage, int regno, int nregs)
2092 int i;
2094 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2096 for (i = 0; i < nregs; i++)
2097 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2100 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2101 PARALLEL REGS. This is for calls that pass values in multiple
2102 non-contiguous locations. The Irix 6 ABI has examples of this. */
2104 void
2105 use_group_regs (rtx *call_fusage, rtx regs)
2107 int i;
2109 for (i = 0; i < XVECLEN (regs, 0); i++)
2111 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2113 /* A NULL entry means the parameter goes both on the stack and in
2114 registers. This can also be a MEM for targets that pass values
2115 partially on the stack and partially in registers. */
2116 if (reg != 0 && REG_P (reg))
2117 use_reg (call_fusage, reg);
2122 /* Determine whether the LEN bytes generated by CONSTFUN can be
2123 stored to memory using several move instructions. CONSTFUNDATA is
2124 a pointer which will be passed as argument in every CONSTFUN call.
2125 ALIGN is maximum alignment we can assume. Return nonzero if a
2126 call to store_by_pieces should succeed. */
2129 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2130 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2131 void *constfundata, unsigned int align)
2133 unsigned HOST_WIDE_INT l;
2134 unsigned int max_size;
2135 HOST_WIDE_INT offset = 0;
2136 enum machine_mode mode, tmode;
2137 enum insn_code icode;
2138 int reverse;
2139 rtx cst;
2141 if (len == 0)
2142 return 1;
2144 if (! STORE_BY_PIECES_P (len, align))
2145 return 0;
2147 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2148 if (align >= GET_MODE_ALIGNMENT (tmode))
2149 align = GET_MODE_ALIGNMENT (tmode);
2150 else
2152 enum machine_mode xmode;
2154 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2155 tmode != VOIDmode;
2156 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2157 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2158 || SLOW_UNALIGNED_ACCESS (tmode, align))
2159 break;
2161 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2164 /* We would first store what we can in the largest integer mode, then go to
2165 successively smaller modes. */
2167 for (reverse = 0;
2168 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2169 reverse++)
2171 l = len;
2172 mode = VOIDmode;
2173 max_size = STORE_MAX_PIECES + 1;
2174 while (max_size > 1)
2176 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2177 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2178 if (GET_MODE_SIZE (tmode) < max_size)
2179 mode = tmode;
2181 if (mode == VOIDmode)
2182 break;
2184 icode = mov_optab->handlers[(int) mode].insn_code;
2185 if (icode != CODE_FOR_nothing
2186 && align >= GET_MODE_ALIGNMENT (mode))
2188 unsigned int size = GET_MODE_SIZE (mode);
2190 while (l >= size)
2192 if (reverse)
2193 offset -= size;
2195 cst = (*constfun) (constfundata, offset, mode);
2196 if (!LEGITIMATE_CONSTANT_P (cst))
2197 return 0;
2199 if (!reverse)
2200 offset += size;
2202 l -= size;
2206 max_size = GET_MODE_SIZE (mode);
2209 /* The code above should have handled everything. */
2210 gcc_assert (!l);
2213 return 1;
2216 /* Generate several move instructions to store LEN bytes generated by
2217 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2218 pointer which will be passed as argument in every CONSTFUN call.
2219 ALIGN is maximum alignment we can assume.
2220 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2221 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2222 stpcpy. */
2225 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2226 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2227 void *constfundata, unsigned int align, int endp)
2229 struct store_by_pieces data;
2231 if (len == 0)
2233 gcc_assert (endp != 2);
2234 return to;
2237 gcc_assert (STORE_BY_PIECES_P (len, align));
2238 data.constfun = constfun;
2239 data.constfundata = constfundata;
2240 data.len = len;
2241 data.to = to;
2242 store_by_pieces_1 (&data, align);
2243 if (endp)
2245 rtx to1;
2247 gcc_assert (!data.reverse);
2248 if (data.autinc_to)
2250 if (endp == 2)
2252 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2253 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2254 else
2255 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2256 -1));
2258 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2259 data.offset);
2261 else
2263 if (endp == 2)
2264 --data.offset;
2265 to1 = adjust_address (data.to, QImode, data.offset);
2267 return to1;
2269 else
2270 return data.to;
2273 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2274 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2276 static void
2277 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2279 struct store_by_pieces data;
2281 if (len == 0)
2282 return;
2284 data.constfun = clear_by_pieces_1;
2285 data.constfundata = NULL;
2286 data.len = len;
2287 data.to = to;
2288 store_by_pieces_1 (&data, align);
2291 /* Callback routine for clear_by_pieces.
2292 Return const0_rtx unconditionally. */
2294 static rtx
2295 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2296 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2297 enum machine_mode mode ATTRIBUTE_UNUSED)
2299 return const0_rtx;
2302 /* Subroutine of clear_by_pieces and store_by_pieces.
2303 Generate several move instructions to store LEN bytes of block TO. (A MEM
2304 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2306 static void
2307 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2308 unsigned int align ATTRIBUTE_UNUSED)
2310 rtx to_addr = XEXP (data->to, 0);
2311 unsigned int max_size = STORE_MAX_PIECES + 1;
2312 enum machine_mode mode = VOIDmode, tmode;
2313 enum insn_code icode;
2315 data->offset = 0;
2316 data->to_addr = to_addr;
2317 data->autinc_to
2318 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2319 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2321 data->explicit_inc_to = 0;
2322 data->reverse
2323 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2324 if (data->reverse)
2325 data->offset = data->len;
2327 /* If storing requires more than two move insns,
2328 copy addresses to registers (to make displacements shorter)
2329 and use post-increment if available. */
2330 if (!data->autinc_to
2331 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2333 /* Determine the main mode we'll be using. */
2334 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2335 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2336 if (GET_MODE_SIZE (tmode) < max_size)
2337 mode = tmode;
2339 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2341 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2342 data->autinc_to = 1;
2343 data->explicit_inc_to = -1;
2346 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2347 && ! data->autinc_to)
2349 data->to_addr = copy_addr_to_reg (to_addr);
2350 data->autinc_to = 1;
2351 data->explicit_inc_to = 1;
2354 if ( !data->autinc_to && CONSTANT_P (to_addr))
2355 data->to_addr = copy_addr_to_reg (to_addr);
2358 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2359 if (align >= GET_MODE_ALIGNMENT (tmode))
2360 align = GET_MODE_ALIGNMENT (tmode);
2361 else
2363 enum machine_mode xmode;
2365 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2366 tmode != VOIDmode;
2367 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2368 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2369 || SLOW_UNALIGNED_ACCESS (tmode, align))
2370 break;
2372 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2375 /* First store what we can in the largest integer mode, then go to
2376 successively smaller modes. */
2378 while (max_size > 1)
2380 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2381 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2382 if (GET_MODE_SIZE (tmode) < max_size)
2383 mode = tmode;
2385 if (mode == VOIDmode)
2386 break;
2388 icode = mov_optab->handlers[(int) mode].insn_code;
2389 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2390 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2392 max_size = GET_MODE_SIZE (mode);
2395 /* The code above should have handled everything. */
2396 gcc_assert (!data->len);
2399 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2400 with move instructions for mode MODE. GENFUN is the gen_... function
2401 to make a move insn for that mode. DATA has all the other info. */
2403 static void
2404 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2405 struct store_by_pieces *data)
2407 unsigned int size = GET_MODE_SIZE (mode);
2408 rtx to1, cst;
2410 while (data->len >= size)
2412 if (data->reverse)
2413 data->offset -= size;
2415 if (data->autinc_to)
2416 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2417 data->offset);
2418 else
2419 to1 = adjust_address (data->to, mode, data->offset);
2421 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2422 emit_insn (gen_add2_insn (data->to_addr,
2423 GEN_INT (-(HOST_WIDE_INT) size)));
2425 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2426 emit_insn ((*genfun) (to1, cst));
2428 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2429 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2431 if (! data->reverse)
2432 data->offset += size;
2434 data->len -= size;
2438 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2439 its length in bytes. */
2442 clear_storage (rtx object, rtx size, enum block_op_methods method)
2444 enum machine_mode mode = GET_MODE (object);
2445 unsigned int align;
2447 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2449 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2450 just move a zero. Otherwise, do this a piece at a time. */
2451 if (mode != BLKmode
2452 && GET_CODE (size) == CONST_INT
2453 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2455 rtx zero = CONST0_RTX (mode);
2456 if (zero != NULL)
2458 emit_move_insn (object, zero);
2459 return NULL;
2462 if (COMPLEX_MODE_P (mode))
2464 zero = CONST0_RTX (GET_MODE_INNER (mode));
2465 if (zero != NULL)
2467 write_complex_part (object, zero, 0);
2468 write_complex_part (object, zero, 1);
2469 return NULL;
2474 if (size == const0_rtx)
2475 return NULL;
2477 align = MEM_ALIGN (object);
2479 if (GET_CODE (size) == CONST_INT
2480 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2481 clear_by_pieces (object, INTVAL (size), align);
2482 else if (set_storage_via_setmem (object, size, const0_rtx, align))
2484 else
2485 return clear_storage_via_libcall (object, size,
2486 method == BLOCK_OP_TAILCALL);
2488 return NULL;
2491 /* A subroutine of clear_storage. Expand a call to memset.
2492 Return the return value of memset, 0 otherwise. */
2494 static rtx
2495 clear_storage_via_libcall (rtx object, rtx size, bool tailcall)
2497 tree call_expr, arg_list, fn, object_tree, size_tree;
2498 enum machine_mode size_mode;
2499 rtx retval;
2501 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2502 place those into new pseudos into a VAR_DECL and use them later. */
2504 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2506 size_mode = TYPE_MODE (sizetype);
2507 size = convert_to_mode (size_mode, size, 1);
2508 size = copy_to_mode_reg (size_mode, size);
2510 /* It is incorrect to use the libcall calling conventions to call
2511 memset in this context. This could be a user call to memset and
2512 the user may wish to examine the return value from memset. For
2513 targets where libcalls and normal calls have different conventions
2514 for returning pointers, we could end up generating incorrect code. */
2516 object_tree = make_tree (ptr_type_node, object);
2517 size_tree = make_tree (sizetype, size);
2519 fn = clear_storage_libcall_fn (true);
2520 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2521 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2522 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2524 /* Now we have to build up the CALL_EXPR itself. */
2525 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2526 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2527 call_expr, arg_list, NULL_TREE);
2528 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2530 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2532 return retval;
2535 /* A subroutine of clear_storage_via_libcall. Create the tree node
2536 for the function we use for block clears. The first time FOR_CALL
2537 is true, we call assemble_external. */
2539 static GTY(()) tree block_clear_fn;
2541 void
2542 init_block_clear_fn (const char *asmspec)
2544 if (!block_clear_fn)
2546 tree fn, args;
2548 fn = get_identifier ("memset");
2549 args = build_function_type_list (ptr_type_node, ptr_type_node,
2550 integer_type_node, sizetype,
2551 NULL_TREE);
2553 fn = build_decl (FUNCTION_DECL, fn, args);
2554 DECL_EXTERNAL (fn) = 1;
2555 TREE_PUBLIC (fn) = 1;
2556 DECL_ARTIFICIAL (fn) = 1;
2557 TREE_NOTHROW (fn) = 1;
2559 block_clear_fn = fn;
2562 if (asmspec)
2563 set_user_assembler_name (block_clear_fn, asmspec);
2566 static tree
2567 clear_storage_libcall_fn (int for_call)
2569 static bool emitted_extern;
2571 if (!block_clear_fn)
2572 init_block_clear_fn (NULL);
2574 if (for_call && !emitted_extern)
2576 emitted_extern = true;
2577 make_decl_rtl (block_clear_fn);
2578 assemble_external (block_clear_fn);
2581 return block_clear_fn;
2584 /* Expand a setmem pattern; return true if successful. */
2586 bool
2587 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align)
2589 /* Try the most limited insn first, because there's no point
2590 including more than one in the machine description unless
2591 the more limited one has some advantage. */
2593 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2594 enum machine_mode mode;
2596 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2597 mode = GET_MODE_WIDER_MODE (mode))
2599 enum insn_code code = setmem_optab[(int) mode];
2600 insn_operand_predicate_fn pred;
2602 if (code != CODE_FOR_nothing
2603 /* We don't need MODE to be narrower than
2604 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2605 the mode mask, as it is returned by the macro, it will
2606 definitely be less than the actual mode mask. */
2607 && ((GET_CODE (size) == CONST_INT
2608 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2609 <= (GET_MODE_MASK (mode) >> 1)))
2610 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2611 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2612 || (*pred) (object, BLKmode))
2613 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2614 || (*pred) (opalign, VOIDmode)))
2616 rtx opsize, opchar;
2617 enum machine_mode char_mode;
2618 rtx last = get_last_insn ();
2619 rtx pat;
2621 opsize = convert_to_mode (mode, size, 1);
2622 pred = insn_data[(int) code].operand[1].predicate;
2623 if (pred != 0 && ! (*pred) (opsize, mode))
2624 opsize = copy_to_mode_reg (mode, opsize);
2626 opchar = val;
2627 char_mode = insn_data[(int) code].operand[2].mode;
2628 if (char_mode != VOIDmode)
2630 opchar = convert_to_mode (char_mode, opchar, 1);
2631 pred = insn_data[(int) code].operand[2].predicate;
2632 if (pred != 0 && ! (*pred) (opchar, char_mode))
2633 opchar = copy_to_mode_reg (char_mode, opchar);
2636 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2637 if (pat)
2639 emit_insn (pat);
2640 return true;
2642 else
2643 delete_insns_since (last);
2647 return false;
2651 /* Write to one of the components of the complex value CPLX. Write VAL to
2652 the real part if IMAG_P is false, and the imaginary part if its true. */
2654 static void
2655 write_complex_part (rtx cplx, rtx val, bool imag_p)
2657 enum machine_mode cmode;
2658 enum machine_mode imode;
2659 unsigned ibitsize;
2661 if (GET_CODE (cplx) == CONCAT)
2663 emit_move_insn (XEXP (cplx, imag_p), val);
2664 return;
2667 cmode = GET_MODE (cplx);
2668 imode = GET_MODE_INNER (cmode);
2669 ibitsize = GET_MODE_BITSIZE (imode);
2671 /* For MEMs simplify_gen_subreg may generate an invalid new address
2672 because, e.g., the original address is considered mode-dependent
2673 by the target, which restricts simplify_subreg from invoking
2674 adjust_address_nv. Instead of preparing fallback support for an
2675 invalid address, we call adjust_address_nv directly. */
2676 if (MEM_P (cplx))
2678 emit_move_insn (adjust_address_nv (cplx, imode,
2679 imag_p ? GET_MODE_SIZE (imode) : 0),
2680 val);
2681 return;
2684 /* If the sub-object is at least word sized, then we know that subregging
2685 will work. This special case is important, since store_bit_field
2686 wants to operate on integer modes, and there's rarely an OImode to
2687 correspond to TCmode. */
2688 if (ibitsize >= BITS_PER_WORD
2689 /* For hard regs we have exact predicates. Assume we can split
2690 the original object if it spans an even number of hard regs.
2691 This special case is important for SCmode on 64-bit platforms
2692 where the natural size of floating-point regs is 32-bit. */
2693 || (REG_P (cplx)
2694 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2695 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2697 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2698 imag_p ? GET_MODE_SIZE (imode) : 0);
2699 if (part)
2701 emit_move_insn (part, val);
2702 return;
2704 else
2705 /* simplify_gen_subreg may fail for sub-word MEMs. */
2706 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2709 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2712 /* Extract one of the components of the complex value CPLX. Extract the
2713 real part if IMAG_P is false, and the imaginary part if it's true. */
2715 static rtx
2716 read_complex_part (rtx cplx, bool imag_p)
2718 enum machine_mode cmode, imode;
2719 unsigned ibitsize;
2721 if (GET_CODE (cplx) == CONCAT)
2722 return XEXP (cplx, imag_p);
2724 cmode = GET_MODE (cplx);
2725 imode = GET_MODE_INNER (cmode);
2726 ibitsize = GET_MODE_BITSIZE (imode);
2728 /* Special case reads from complex constants that got spilled to memory. */
2729 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2731 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2732 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2734 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2735 if (CONSTANT_CLASS_P (part))
2736 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2740 /* For MEMs simplify_gen_subreg may generate an invalid new address
2741 because, e.g., the original address is considered mode-dependent
2742 by the target, which restricts simplify_subreg from invoking
2743 adjust_address_nv. Instead of preparing fallback support for an
2744 invalid address, we call adjust_address_nv directly. */
2745 if (MEM_P (cplx))
2746 return adjust_address_nv (cplx, imode,
2747 imag_p ? GET_MODE_SIZE (imode) : 0);
2749 /* If the sub-object is at least word sized, then we know that subregging
2750 will work. This special case is important, since extract_bit_field
2751 wants to operate on integer modes, and there's rarely an OImode to
2752 correspond to TCmode. */
2753 if (ibitsize >= BITS_PER_WORD
2754 /* For hard regs we have exact predicates. Assume we can split
2755 the original object if it spans an even number of hard regs.
2756 This special case is important for SCmode on 64-bit platforms
2757 where the natural size of floating-point regs is 32-bit. */
2758 || (REG_P (cplx)
2759 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2760 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2762 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2763 imag_p ? GET_MODE_SIZE (imode) : 0);
2764 if (ret)
2765 return ret;
2766 else
2767 /* simplify_gen_subreg may fail for sub-word MEMs. */
2768 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2771 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2772 true, NULL_RTX, imode, imode);
2775 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2776 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2777 represented in NEW_MODE. If FORCE is true, this will never happen, as
2778 we'll force-create a SUBREG if needed. */
2780 static rtx
2781 emit_move_change_mode (enum machine_mode new_mode,
2782 enum machine_mode old_mode, rtx x, bool force)
2784 rtx ret;
2786 if (reload_in_progress && MEM_P (x))
2788 /* We can't use gen_lowpart here because it may call change_address
2789 which is not appropriate if we were called when a reload was in
2790 progress. We don't have to worry about changing the address since
2791 the size in bytes is supposed to be the same. Copy the MEM to
2792 change the mode and move any substitutions from the old MEM to
2793 the new one. */
2795 ret = adjust_address_nv (x, new_mode, 0);
2796 copy_replacements (x, ret);
2798 else
2800 /* Note that we do want simplify_subreg's behavior of validating
2801 that the new mode is ok for a hard register. If we were to use
2802 simplify_gen_subreg, we would create the subreg, but would
2803 probably run into the target not being able to implement it. */
2804 /* Except, of course, when FORCE is true, when this is exactly what
2805 we want. Which is needed for CCmodes on some targets. */
2806 if (force)
2807 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2808 else
2809 ret = simplify_subreg (new_mode, x, old_mode, 0);
2812 return ret;
2815 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2816 an integer mode of the same size as MODE. Returns the instruction
2817 emitted, or NULL if such a move could not be generated. */
2819 static rtx
2820 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2822 enum machine_mode imode;
2823 enum insn_code code;
2825 /* There must exist a mode of the exact size we require. */
2826 imode = int_mode_for_mode (mode);
2827 if (imode == BLKmode)
2828 return NULL_RTX;
2830 /* The target must support moves in this mode. */
2831 code = mov_optab->handlers[imode].insn_code;
2832 if (code == CODE_FOR_nothing)
2833 return NULL_RTX;
2835 x = emit_move_change_mode (imode, mode, x, force);
2836 if (x == NULL_RTX)
2837 return NULL_RTX;
2838 y = emit_move_change_mode (imode, mode, y, force);
2839 if (y == NULL_RTX)
2840 return NULL_RTX;
2841 return emit_insn (GEN_FCN (code) (x, y));
2844 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2845 Return an equivalent MEM that does not use an auto-increment. */
2847 static rtx
2848 emit_move_resolve_push (enum machine_mode mode, rtx x)
2850 enum rtx_code code = GET_CODE (XEXP (x, 0));
2851 HOST_WIDE_INT adjust;
2852 rtx temp;
2854 adjust = GET_MODE_SIZE (mode);
2855 #ifdef PUSH_ROUNDING
2856 adjust = PUSH_ROUNDING (adjust);
2857 #endif
2858 if (code == PRE_DEC || code == POST_DEC)
2859 adjust = -adjust;
2860 else if (code == PRE_MODIFY || code == POST_MODIFY)
2862 rtx expr = XEXP (XEXP (x, 0), 1);
2863 HOST_WIDE_INT val;
2865 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2866 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
2867 val = INTVAL (XEXP (expr, 1));
2868 if (GET_CODE (expr) == MINUS)
2869 val = -val;
2870 gcc_assert (adjust == val || adjust == -val);
2871 adjust = val;
2874 /* Do not use anti_adjust_stack, since we don't want to update
2875 stack_pointer_delta. */
2876 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2877 GEN_INT (adjust), stack_pointer_rtx,
2878 0, OPTAB_LIB_WIDEN);
2879 if (temp != stack_pointer_rtx)
2880 emit_move_insn (stack_pointer_rtx, temp);
2882 switch (code)
2884 case PRE_INC:
2885 case PRE_DEC:
2886 case PRE_MODIFY:
2887 temp = stack_pointer_rtx;
2888 break;
2889 case POST_INC:
2890 case POST_DEC:
2891 case POST_MODIFY:
2892 temp = plus_constant (stack_pointer_rtx, -adjust);
2893 break;
2894 default:
2895 gcc_unreachable ();
2898 return replace_equiv_address (x, temp);
2901 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2902 X is known to satisfy push_operand, and MODE is known to be complex.
2903 Returns the last instruction emitted. */
2905 static rtx
2906 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2908 enum machine_mode submode = GET_MODE_INNER (mode);
2909 bool imag_first;
2911 #ifdef PUSH_ROUNDING
2912 unsigned int submodesize = GET_MODE_SIZE (submode);
2914 /* In case we output to the stack, but the size is smaller than the
2915 machine can push exactly, we need to use move instructions. */
2916 if (PUSH_ROUNDING (submodesize) != submodesize)
2918 x = emit_move_resolve_push (mode, x);
2919 return emit_move_insn (x, y);
2921 #endif
2923 /* Note that the real part always precedes the imag part in memory
2924 regardless of machine's endianness. */
2925 switch (GET_CODE (XEXP (x, 0)))
2927 case PRE_DEC:
2928 case POST_DEC:
2929 imag_first = true;
2930 break;
2931 case PRE_INC:
2932 case POST_INC:
2933 imag_first = false;
2934 break;
2935 default:
2936 gcc_unreachable ();
2939 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2940 read_complex_part (y, imag_first));
2941 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2942 read_complex_part (y, !imag_first));
2945 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2946 MODE is known to be complex. Returns the last instruction emitted. */
2948 static rtx
2949 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
2951 bool try_int;
2953 /* Need to take special care for pushes, to maintain proper ordering
2954 of the data, and possibly extra padding. */
2955 if (push_operand (x, mode))
2956 return emit_move_complex_push (mode, x, y);
2958 /* See if we can coerce the target into moving both values at once. */
2960 /* Move floating point as parts. */
2961 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
2962 && mov_optab->handlers[GET_MODE_INNER (mode)].insn_code != CODE_FOR_nothing)
2963 try_int = false;
2964 /* Not possible if the values are inherently not adjacent. */
2965 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
2966 try_int = false;
2967 /* Is possible if both are registers (or subregs of registers). */
2968 else if (register_operand (x, mode) && register_operand (y, mode))
2969 try_int = true;
2970 /* If one of the operands is a memory, and alignment constraints
2971 are friendly enough, we may be able to do combined memory operations.
2972 We do not attempt this if Y is a constant because that combination is
2973 usually better with the by-parts thing below. */
2974 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
2975 && (!STRICT_ALIGNMENT
2976 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
2977 try_int = true;
2978 else
2979 try_int = false;
2981 if (try_int)
2983 rtx ret;
2985 /* For memory to memory moves, optimal behavior can be had with the
2986 existing block move logic. */
2987 if (MEM_P (x) && MEM_P (y))
2989 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
2990 BLOCK_OP_NO_LIBCALL);
2991 return get_last_insn ();
2994 ret = emit_move_via_integer (mode, x, y, true);
2995 if (ret)
2996 return ret;
2999 /* Show the output dies here. This is necessary for SUBREGs
3000 of pseudos since we cannot track their lifetimes correctly;
3001 hard regs shouldn't appear here except as return values. */
3002 if (!reload_completed && !reload_in_progress
3003 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3004 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3006 write_complex_part (x, read_complex_part (y, false), false);
3007 write_complex_part (x, read_complex_part (y, true), true);
3008 return get_last_insn ();
3011 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3012 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3014 static rtx
3015 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3017 rtx ret;
3019 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3020 if (mode != CCmode)
3022 enum insn_code code = mov_optab->handlers[CCmode].insn_code;
3023 if (code != CODE_FOR_nothing)
3025 x = emit_move_change_mode (CCmode, mode, x, true);
3026 y = emit_move_change_mode (CCmode, mode, y, true);
3027 return emit_insn (GEN_FCN (code) (x, y));
3031 /* Otherwise, find the MODE_INT mode of the same width. */
3032 ret = emit_move_via_integer (mode, x, y, false);
3033 gcc_assert (ret != NULL);
3034 return ret;
3037 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3038 MODE is any multi-word or full-word mode that lacks a move_insn
3039 pattern. Note that you will get better code if you define such
3040 patterns, even if they must turn into multiple assembler instructions. */
3042 static rtx
3043 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3045 rtx last_insn = 0;
3046 rtx seq, inner;
3047 bool need_clobber;
3048 int i;
3050 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3052 /* If X is a push on the stack, do the push now and replace
3053 X with a reference to the stack pointer. */
3054 if (push_operand (x, mode))
3055 x = emit_move_resolve_push (mode, x);
3057 /* If we are in reload, see if either operand is a MEM whose address
3058 is scheduled for replacement. */
3059 if (reload_in_progress && MEM_P (x)
3060 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3061 x = replace_equiv_address_nv (x, inner);
3062 if (reload_in_progress && MEM_P (y)
3063 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3064 y = replace_equiv_address_nv (y, inner);
3066 start_sequence ();
3068 need_clobber = false;
3069 for (i = 0;
3070 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3071 i++)
3073 rtx xpart = operand_subword (x, i, 1, mode);
3074 rtx ypart = operand_subword (y, i, 1, mode);
3076 /* If we can't get a part of Y, put Y into memory if it is a
3077 constant. Otherwise, force it into a register. Then we must
3078 be able to get a part of Y. */
3079 if (ypart == 0 && CONSTANT_P (y))
3081 y = force_const_mem (mode, y);
3082 ypart = operand_subword (y, i, 1, mode);
3084 else if (ypart == 0)
3085 ypart = operand_subword_force (y, i, mode);
3087 gcc_assert (xpart && ypart);
3089 need_clobber |= (GET_CODE (xpart) == SUBREG);
3091 last_insn = emit_move_insn (xpart, ypart);
3094 seq = get_insns ();
3095 end_sequence ();
3097 /* Show the output dies here. This is necessary for SUBREGs
3098 of pseudos since we cannot track their lifetimes correctly;
3099 hard regs shouldn't appear here except as return values.
3100 We never want to emit such a clobber after reload. */
3101 if (x != y
3102 && ! (reload_in_progress || reload_completed)
3103 && need_clobber != 0)
3104 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3106 emit_insn (seq);
3108 return last_insn;
3111 /* Low level part of emit_move_insn.
3112 Called just like emit_move_insn, but assumes X and Y
3113 are basically valid. */
3116 emit_move_insn_1 (rtx x, rtx y)
3118 enum machine_mode mode = GET_MODE (x);
3119 enum insn_code code;
3121 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3123 code = mov_optab->handlers[mode].insn_code;
3124 if (code != CODE_FOR_nothing)
3125 return emit_insn (GEN_FCN (code) (x, y));
3127 /* Expand complex moves by moving real part and imag part. */
3128 if (COMPLEX_MODE_P (mode))
3129 return emit_move_complex (mode, x, y);
3131 if (GET_MODE_CLASS (mode) == MODE_CC)
3132 return emit_move_ccmode (mode, x, y);
3134 /* Try using a move pattern for the corresponding integer mode. This is
3135 only safe when simplify_subreg can convert MODE constants into integer
3136 constants. At present, it can only do this reliably if the value
3137 fits within a HOST_WIDE_INT. */
3138 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3140 rtx ret = emit_move_via_integer (mode, x, y, false);
3141 if (ret)
3142 return ret;
3145 return emit_move_multi_word (mode, x, y);
3148 /* Generate code to copy Y into X.
3149 Both Y and X must have the same mode, except that
3150 Y can be a constant with VOIDmode.
3151 This mode cannot be BLKmode; use emit_block_move for that.
3153 Return the last instruction emitted. */
3156 emit_move_insn (rtx x, rtx y)
3158 enum machine_mode mode = GET_MODE (x);
3159 rtx y_cst = NULL_RTX;
3160 rtx last_insn, set;
3162 gcc_assert (mode != BLKmode
3163 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3165 if (CONSTANT_P (y))
3167 if (optimize
3168 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3169 && (last_insn = compress_float_constant (x, y)))
3170 return last_insn;
3172 y_cst = y;
3174 if (!LEGITIMATE_CONSTANT_P (y))
3176 y = force_const_mem (mode, y);
3178 /* If the target's cannot_force_const_mem prevented the spill,
3179 assume that the target's move expanders will also take care
3180 of the non-legitimate constant. */
3181 if (!y)
3182 y = y_cst;
3186 /* If X or Y are memory references, verify that their addresses are valid
3187 for the machine. */
3188 if (MEM_P (x)
3189 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3190 && ! push_operand (x, GET_MODE (x)))
3191 || (flag_force_addr
3192 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3193 x = validize_mem (x);
3195 if (MEM_P (y)
3196 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3197 || (flag_force_addr
3198 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3199 y = validize_mem (y);
3201 gcc_assert (mode != BLKmode);
3203 last_insn = emit_move_insn_1 (x, y);
3205 if (y_cst && REG_P (x)
3206 && (set = single_set (last_insn)) != NULL_RTX
3207 && SET_DEST (set) == x
3208 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3209 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3211 return last_insn;
3214 /* If Y is representable exactly in a narrower mode, and the target can
3215 perform the extension directly from constant or memory, then emit the
3216 move as an extension. */
3218 static rtx
3219 compress_float_constant (rtx x, rtx y)
3221 enum machine_mode dstmode = GET_MODE (x);
3222 enum machine_mode orig_srcmode = GET_MODE (y);
3223 enum machine_mode srcmode;
3224 REAL_VALUE_TYPE r;
3225 int oldcost, newcost;
3227 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3229 if (LEGITIMATE_CONSTANT_P (y))
3230 oldcost = rtx_cost (y, SET);
3231 else
3232 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3234 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3235 srcmode != orig_srcmode;
3236 srcmode = GET_MODE_WIDER_MODE (srcmode))
3238 enum insn_code ic;
3239 rtx trunc_y, last_insn;
3241 /* Skip if the target can't extend this way. */
3242 ic = can_extend_p (dstmode, srcmode, 0);
3243 if (ic == CODE_FOR_nothing)
3244 continue;
3246 /* Skip if the narrowed value isn't exact. */
3247 if (! exact_real_truncate (srcmode, &r))
3248 continue;
3250 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3252 if (LEGITIMATE_CONSTANT_P (trunc_y))
3254 /* Skip if the target needs extra instructions to perform
3255 the extension. */
3256 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3257 continue;
3258 /* This is valid, but may not be cheaper than the original. */
3259 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3260 if (oldcost < newcost)
3261 continue;
3263 else if (float_extend_from_mem[dstmode][srcmode])
3265 trunc_y = force_const_mem (srcmode, trunc_y);
3266 /* This is valid, but may not be cheaper than the original. */
3267 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3268 if (oldcost < newcost)
3269 continue;
3270 trunc_y = validize_mem (trunc_y);
3272 else
3273 continue;
3275 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3276 last_insn = get_last_insn ();
3278 if (REG_P (x))
3279 set_unique_reg_note (last_insn, REG_EQUAL, y);
3281 return last_insn;
3284 return NULL_RTX;
3287 /* Pushing data onto the stack. */
3289 /* Push a block of length SIZE (perhaps variable)
3290 and return an rtx to address the beginning of the block.
3291 The value may be virtual_outgoing_args_rtx.
3293 EXTRA is the number of bytes of padding to push in addition to SIZE.
3294 BELOW nonzero means this padding comes at low addresses;
3295 otherwise, the padding comes at high addresses. */
3298 push_block (rtx size, int extra, int below)
3300 rtx temp;
3302 size = convert_modes (Pmode, ptr_mode, size, 1);
3303 if (CONSTANT_P (size))
3304 anti_adjust_stack (plus_constant (size, extra));
3305 else if (REG_P (size) && extra == 0)
3306 anti_adjust_stack (size);
3307 else
3309 temp = copy_to_mode_reg (Pmode, size);
3310 if (extra != 0)
3311 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3312 temp, 0, OPTAB_LIB_WIDEN);
3313 anti_adjust_stack (temp);
3316 #ifndef STACK_GROWS_DOWNWARD
3317 if (0)
3318 #else
3319 if (1)
3320 #endif
3322 temp = virtual_outgoing_args_rtx;
3323 if (extra != 0 && below)
3324 temp = plus_constant (temp, extra);
3326 else
3328 if (GET_CODE (size) == CONST_INT)
3329 temp = plus_constant (virtual_outgoing_args_rtx,
3330 -INTVAL (size) - (below ? 0 : extra));
3331 else if (extra != 0 && !below)
3332 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3333 negate_rtx (Pmode, plus_constant (size, extra)));
3334 else
3335 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3336 negate_rtx (Pmode, size));
3339 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3342 #ifdef PUSH_ROUNDING
3344 /* Emit single push insn. */
3346 static void
3347 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3349 rtx dest_addr;
3350 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3351 rtx dest;
3352 enum insn_code icode;
3353 insn_operand_predicate_fn pred;
3355 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3356 /* If there is push pattern, use it. Otherwise try old way of throwing
3357 MEM representing push operation to move expander. */
3358 icode = push_optab->handlers[(int) mode].insn_code;
3359 if (icode != CODE_FOR_nothing)
3361 if (((pred = insn_data[(int) icode].operand[0].predicate)
3362 && !((*pred) (x, mode))))
3363 x = force_reg (mode, x);
3364 emit_insn (GEN_FCN (icode) (x));
3365 return;
3367 if (GET_MODE_SIZE (mode) == rounded_size)
3368 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3369 /* If we are to pad downward, adjust the stack pointer first and
3370 then store X into the stack location using an offset. This is
3371 because emit_move_insn does not know how to pad; it does not have
3372 access to type. */
3373 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3375 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3376 HOST_WIDE_INT offset;
3378 emit_move_insn (stack_pointer_rtx,
3379 expand_binop (Pmode,
3380 #ifdef STACK_GROWS_DOWNWARD
3381 sub_optab,
3382 #else
3383 add_optab,
3384 #endif
3385 stack_pointer_rtx,
3386 GEN_INT (rounded_size),
3387 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3389 offset = (HOST_WIDE_INT) padding_size;
3390 #ifdef STACK_GROWS_DOWNWARD
3391 if (STACK_PUSH_CODE == POST_DEC)
3392 /* We have already decremented the stack pointer, so get the
3393 previous value. */
3394 offset += (HOST_WIDE_INT) rounded_size;
3395 #else
3396 if (STACK_PUSH_CODE == POST_INC)
3397 /* We have already incremented the stack pointer, so get the
3398 previous value. */
3399 offset -= (HOST_WIDE_INT) rounded_size;
3400 #endif
3401 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3403 else
3405 #ifdef STACK_GROWS_DOWNWARD
3406 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3407 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3408 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3409 #else
3410 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3411 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3412 GEN_INT (rounded_size));
3413 #endif
3414 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3417 dest = gen_rtx_MEM (mode, dest_addr);
3419 if (type != 0)
3421 set_mem_attributes (dest, type, 1);
3423 if (flag_optimize_sibling_calls)
3424 /* Function incoming arguments may overlap with sibling call
3425 outgoing arguments and we cannot allow reordering of reads
3426 from function arguments with stores to outgoing arguments
3427 of sibling calls. */
3428 set_mem_alias_set (dest, 0);
3430 emit_move_insn (dest, x);
3432 #endif
3434 /* Generate code to push X onto the stack, assuming it has mode MODE and
3435 type TYPE.
3436 MODE is redundant except when X is a CONST_INT (since they don't
3437 carry mode info).
3438 SIZE is an rtx for the size of data to be copied (in bytes),
3439 needed only if X is BLKmode.
3441 ALIGN (in bits) is maximum alignment we can assume.
3443 If PARTIAL and REG are both nonzero, then copy that many of the first
3444 bytes of X into registers starting with REG, and push the rest of X.
3445 The amount of space pushed is decreased by PARTIAL bytes.
3446 REG must be a hard register in this case.
3447 If REG is zero but PARTIAL is not, take any all others actions for an
3448 argument partially in registers, but do not actually load any
3449 registers.
3451 EXTRA is the amount in bytes of extra space to leave next to this arg.
3452 This is ignored if an argument block has already been allocated.
3454 On a machine that lacks real push insns, ARGS_ADDR is the address of
3455 the bottom of the argument block for this call. We use indexing off there
3456 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3457 argument block has not been preallocated.
3459 ARGS_SO_FAR is the size of args previously pushed for this call.
3461 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3462 for arguments passed in registers. If nonzero, it will be the number
3463 of bytes required. */
3465 void
3466 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3467 unsigned int align, int partial, rtx reg, int extra,
3468 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3469 rtx alignment_pad)
3471 rtx xinner;
3472 enum direction stack_direction
3473 #ifdef STACK_GROWS_DOWNWARD
3474 = downward;
3475 #else
3476 = upward;
3477 #endif
3479 /* Decide where to pad the argument: `downward' for below,
3480 `upward' for above, or `none' for don't pad it.
3481 Default is below for small data on big-endian machines; else above. */
3482 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3484 /* Invert direction if stack is post-decrement.
3485 FIXME: why? */
3486 if (STACK_PUSH_CODE == POST_DEC)
3487 if (where_pad != none)
3488 where_pad = (where_pad == downward ? upward : downward);
3490 xinner = x;
3492 if (mode == BLKmode)
3494 /* Copy a block into the stack, entirely or partially. */
3496 rtx temp;
3497 int used;
3498 int offset;
3499 int skip;
3501 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3502 used = partial - offset;
3504 gcc_assert (size);
3506 /* USED is now the # of bytes we need not copy to the stack
3507 because registers will take care of them. */
3509 if (partial != 0)
3510 xinner = adjust_address (xinner, BLKmode, used);
3512 /* If the partial register-part of the arg counts in its stack size,
3513 skip the part of stack space corresponding to the registers.
3514 Otherwise, start copying to the beginning of the stack space,
3515 by setting SKIP to 0. */
3516 skip = (reg_parm_stack_space == 0) ? 0 : used;
3518 #ifdef PUSH_ROUNDING
3519 /* Do it with several push insns if that doesn't take lots of insns
3520 and if there is no difficulty with push insns that skip bytes
3521 on the stack for alignment purposes. */
3522 if (args_addr == 0
3523 && PUSH_ARGS
3524 && GET_CODE (size) == CONST_INT
3525 && skip == 0
3526 && MEM_ALIGN (xinner) >= align
3527 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3528 /* Here we avoid the case of a structure whose weak alignment
3529 forces many pushes of a small amount of data,
3530 and such small pushes do rounding that causes trouble. */
3531 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3532 || align >= BIGGEST_ALIGNMENT
3533 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3534 == (align / BITS_PER_UNIT)))
3535 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3537 /* Push padding now if padding above and stack grows down,
3538 or if padding below and stack grows up.
3539 But if space already allocated, this has already been done. */
3540 if (extra && args_addr == 0
3541 && where_pad != none && where_pad != stack_direction)
3542 anti_adjust_stack (GEN_INT (extra));
3544 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3546 else
3547 #endif /* PUSH_ROUNDING */
3549 rtx target;
3551 /* Otherwise make space on the stack and copy the data
3552 to the address of that space. */
3554 /* Deduct words put into registers from the size we must copy. */
3555 if (partial != 0)
3557 if (GET_CODE (size) == CONST_INT)
3558 size = GEN_INT (INTVAL (size) - used);
3559 else
3560 size = expand_binop (GET_MODE (size), sub_optab, size,
3561 GEN_INT (used), NULL_RTX, 0,
3562 OPTAB_LIB_WIDEN);
3565 /* Get the address of the stack space.
3566 In this case, we do not deal with EXTRA separately.
3567 A single stack adjust will do. */
3568 if (! args_addr)
3570 temp = push_block (size, extra, where_pad == downward);
3571 extra = 0;
3573 else if (GET_CODE (args_so_far) == CONST_INT)
3574 temp = memory_address (BLKmode,
3575 plus_constant (args_addr,
3576 skip + INTVAL (args_so_far)));
3577 else
3578 temp = memory_address (BLKmode,
3579 plus_constant (gen_rtx_PLUS (Pmode,
3580 args_addr,
3581 args_so_far),
3582 skip));
3584 if (!ACCUMULATE_OUTGOING_ARGS)
3586 /* If the source is referenced relative to the stack pointer,
3587 copy it to another register to stabilize it. We do not need
3588 to do this if we know that we won't be changing sp. */
3590 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3591 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3592 temp = copy_to_reg (temp);
3595 target = gen_rtx_MEM (BLKmode, temp);
3597 /* We do *not* set_mem_attributes here, because incoming arguments
3598 may overlap with sibling call outgoing arguments and we cannot
3599 allow reordering of reads from function arguments with stores
3600 to outgoing arguments of sibling calls. We do, however, want
3601 to record the alignment of the stack slot. */
3602 /* ALIGN may well be better aligned than TYPE, e.g. due to
3603 PARM_BOUNDARY. Assume the caller isn't lying. */
3604 set_mem_align (target, align);
3606 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3609 else if (partial > 0)
3611 /* Scalar partly in registers. */
3613 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3614 int i;
3615 int not_stack;
3616 /* # bytes of start of argument
3617 that we must make space for but need not store. */
3618 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3619 int args_offset = INTVAL (args_so_far);
3620 int skip;
3622 /* Push padding now if padding above and stack grows down,
3623 or if padding below and stack grows up.
3624 But if space already allocated, this has already been done. */
3625 if (extra && args_addr == 0
3626 && where_pad != none && where_pad != stack_direction)
3627 anti_adjust_stack (GEN_INT (extra));
3629 /* If we make space by pushing it, we might as well push
3630 the real data. Otherwise, we can leave OFFSET nonzero
3631 and leave the space uninitialized. */
3632 if (args_addr == 0)
3633 offset = 0;
3635 /* Now NOT_STACK gets the number of words that we don't need to
3636 allocate on the stack. Convert OFFSET to words too. */
3637 not_stack = (partial - offset) / UNITS_PER_WORD;
3638 offset /= UNITS_PER_WORD;
3640 /* If the partial register-part of the arg counts in its stack size,
3641 skip the part of stack space corresponding to the registers.
3642 Otherwise, start copying to the beginning of the stack space,
3643 by setting SKIP to 0. */
3644 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3646 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3647 x = validize_mem (force_const_mem (mode, x));
3649 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3650 SUBREGs of such registers are not allowed. */
3651 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3652 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3653 x = copy_to_reg (x);
3655 /* Loop over all the words allocated on the stack for this arg. */
3656 /* We can do it by words, because any scalar bigger than a word
3657 has a size a multiple of a word. */
3658 #ifndef PUSH_ARGS_REVERSED
3659 for (i = not_stack; i < size; i++)
3660 #else
3661 for (i = size - 1; i >= not_stack; i--)
3662 #endif
3663 if (i >= not_stack + offset)
3664 emit_push_insn (operand_subword_force (x, i, mode),
3665 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3666 0, args_addr,
3667 GEN_INT (args_offset + ((i - not_stack + skip)
3668 * UNITS_PER_WORD)),
3669 reg_parm_stack_space, alignment_pad);
3671 else
3673 rtx addr;
3674 rtx dest;
3676 /* Push padding now if padding above and stack grows down,
3677 or if padding below and stack grows up.
3678 But if space already allocated, this has already been done. */
3679 if (extra && args_addr == 0
3680 && where_pad != none && where_pad != stack_direction)
3681 anti_adjust_stack (GEN_INT (extra));
3683 #ifdef PUSH_ROUNDING
3684 if (args_addr == 0 && PUSH_ARGS)
3685 emit_single_push_insn (mode, x, type);
3686 else
3687 #endif
3689 if (GET_CODE (args_so_far) == CONST_INT)
3690 addr
3691 = memory_address (mode,
3692 plus_constant (args_addr,
3693 INTVAL (args_so_far)));
3694 else
3695 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3696 args_so_far));
3697 dest = gen_rtx_MEM (mode, addr);
3699 /* We do *not* set_mem_attributes here, because incoming arguments
3700 may overlap with sibling call outgoing arguments and we cannot
3701 allow reordering of reads from function arguments with stores
3702 to outgoing arguments of sibling calls. We do, however, want
3703 to record the alignment of the stack slot. */
3704 /* ALIGN may well be better aligned than TYPE, e.g. due to
3705 PARM_BOUNDARY. Assume the caller isn't lying. */
3706 set_mem_align (dest, align);
3708 emit_move_insn (dest, x);
3712 /* If part should go in registers, copy that part
3713 into the appropriate registers. Do this now, at the end,
3714 since mem-to-mem copies above may do function calls. */
3715 if (partial > 0 && reg != 0)
3717 /* Handle calls that pass values in multiple non-contiguous locations.
3718 The Irix 6 ABI has examples of this. */
3719 if (GET_CODE (reg) == PARALLEL)
3720 emit_group_load (reg, x, type, -1);
3721 else
3723 gcc_assert (partial % UNITS_PER_WORD == 0);
3724 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3728 if (extra && args_addr == 0 && where_pad == stack_direction)
3729 anti_adjust_stack (GEN_INT (extra));
3731 if (alignment_pad && args_addr == 0)
3732 anti_adjust_stack (alignment_pad);
3735 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3736 operations. */
3738 static rtx
3739 get_subtarget (rtx x)
3741 return (optimize
3742 || x == 0
3743 /* Only registers can be subtargets. */
3744 || !REG_P (x)
3745 /* Don't use hard regs to avoid extending their life. */
3746 || REGNO (x) < FIRST_PSEUDO_REGISTER
3747 ? 0 : x);
3750 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3751 FIELD is a bitfield. Returns true if the optimization was successful,
3752 and there's nothing else to do. */
3754 static bool
3755 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3756 unsigned HOST_WIDE_INT bitpos,
3757 enum machine_mode mode1, rtx str_rtx,
3758 tree to, tree src)
3760 enum machine_mode str_mode = GET_MODE (str_rtx);
3761 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3762 tree op0, op1;
3763 rtx value, result;
3764 optab binop;
3766 if (mode1 != VOIDmode
3767 || bitsize >= BITS_PER_WORD
3768 || str_bitsize > BITS_PER_WORD
3769 || TREE_SIDE_EFFECTS (to)
3770 || TREE_THIS_VOLATILE (to))
3771 return false;
3773 STRIP_NOPS (src);
3774 if (!BINARY_CLASS_P (src)
3775 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3776 return false;
3778 op0 = TREE_OPERAND (src, 0);
3779 op1 = TREE_OPERAND (src, 1);
3780 STRIP_NOPS (op0);
3782 if (!operand_equal_p (to, op0, 0))
3783 return false;
3785 if (MEM_P (str_rtx))
3787 unsigned HOST_WIDE_INT offset1;
3789 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3790 str_mode = word_mode;
3791 str_mode = get_best_mode (bitsize, bitpos,
3792 MEM_ALIGN (str_rtx), str_mode, 0);
3793 if (str_mode == VOIDmode)
3794 return false;
3795 str_bitsize = GET_MODE_BITSIZE (str_mode);
3797 offset1 = bitpos;
3798 bitpos %= str_bitsize;
3799 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3800 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3802 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3803 return false;
3805 /* If the bit field covers the whole REG/MEM, store_field
3806 will likely generate better code. */
3807 if (bitsize >= str_bitsize)
3808 return false;
3810 /* We can't handle fields split across multiple entities. */
3811 if (bitpos + bitsize > str_bitsize)
3812 return false;
3814 if (BYTES_BIG_ENDIAN)
3815 bitpos = str_bitsize - bitpos - bitsize;
3817 switch (TREE_CODE (src))
3819 case PLUS_EXPR:
3820 case MINUS_EXPR:
3821 /* For now, just optimize the case of the topmost bitfield
3822 where we don't need to do any masking and also
3823 1 bit bitfields where xor can be used.
3824 We might win by one instruction for the other bitfields
3825 too if insv/extv instructions aren't used, so that
3826 can be added later. */
3827 if (bitpos + bitsize != str_bitsize
3828 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3829 break;
3831 value = expand_expr (op1, NULL_RTX, str_mode, 0);
3832 value = convert_modes (str_mode,
3833 TYPE_MODE (TREE_TYPE (op1)), value,
3834 TYPE_UNSIGNED (TREE_TYPE (op1)));
3836 /* We may be accessing data outside the field, which means
3837 we can alias adjacent data. */
3838 if (MEM_P (str_rtx))
3840 str_rtx = shallow_copy_rtx (str_rtx);
3841 set_mem_alias_set (str_rtx, 0);
3842 set_mem_expr (str_rtx, 0);
3845 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3846 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
3848 value = expand_and (str_mode, value, const1_rtx, NULL);
3849 binop = xor_optab;
3851 value = expand_shift (LSHIFT_EXPR, str_mode, value,
3852 build_int_cst (NULL_TREE, bitpos),
3853 NULL_RTX, 1);
3854 result = expand_binop (str_mode, binop, str_rtx,
3855 value, str_rtx, 1, OPTAB_WIDEN);
3856 if (result != str_rtx)
3857 emit_move_insn (str_rtx, result);
3858 return true;
3860 case BIT_IOR_EXPR:
3861 case BIT_XOR_EXPR:
3862 if (TREE_CODE (op1) != INTEGER_CST)
3863 break;
3864 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3865 value = convert_modes (GET_MODE (str_rtx),
3866 TYPE_MODE (TREE_TYPE (op1)), value,
3867 TYPE_UNSIGNED (TREE_TYPE (op1)));
3869 /* We may be accessing data outside the field, which means
3870 we can alias adjacent data. */
3871 if (MEM_P (str_rtx))
3873 str_rtx = shallow_copy_rtx (str_rtx);
3874 set_mem_alias_set (str_rtx, 0);
3875 set_mem_expr (str_rtx, 0);
3878 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
3879 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3881 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
3882 - 1);
3883 value = expand_and (GET_MODE (str_rtx), value, mask,
3884 NULL_RTX);
3886 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
3887 build_int_cst (NULL_TREE, bitpos),
3888 NULL_RTX, 1);
3889 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
3890 value, str_rtx, 1, OPTAB_WIDEN);
3891 if (result != str_rtx)
3892 emit_move_insn (str_rtx, result);
3893 return true;
3895 default:
3896 break;
3899 return false;
3903 /* Expand an assignment that stores the value of FROM into TO. */
3905 void
3906 expand_assignment (tree to, tree from)
3908 rtx to_rtx = 0;
3909 rtx result;
3911 /* Don't crash if the lhs of the assignment was erroneous. */
3913 if (TREE_CODE (to) == ERROR_MARK)
3915 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3916 return;
3919 /* Assignment of a structure component needs special treatment
3920 if the structure component's rtx is not simply a MEM.
3921 Assignment of an array element at a constant index, and assignment of
3922 an array element in an unaligned packed structure field, has the same
3923 problem. */
3924 if (handled_component_p (to)
3925 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3927 enum machine_mode mode1;
3928 HOST_WIDE_INT bitsize, bitpos;
3929 tree offset;
3930 int unsignedp;
3931 int volatilep = 0;
3932 tree tem;
3934 push_temp_slots ();
3935 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3936 &unsignedp, &volatilep, true);
3938 /* If we are going to use store_bit_field and extract_bit_field,
3939 make sure to_rtx will be safe for multiple use. */
3941 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3943 if (offset != 0)
3945 rtx offset_rtx;
3947 if (!MEM_P (to_rtx))
3949 /* We can get constant negative offsets into arrays with broken
3950 user code. Translate this to a trap instead of ICEing. */
3951 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
3952 expand_builtin_trap ();
3953 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
3956 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3957 #ifdef POINTERS_EXTEND_UNSIGNED
3958 if (GET_MODE (offset_rtx) != Pmode)
3959 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3960 #else
3961 if (GET_MODE (offset_rtx) != ptr_mode)
3962 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3963 #endif
3965 /* A constant address in TO_RTX can have VOIDmode, we must not try
3966 to call force_reg for that case. Avoid that case. */
3967 if (MEM_P (to_rtx)
3968 && GET_MODE (to_rtx) == BLKmode
3969 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3970 && bitsize > 0
3971 && (bitpos % bitsize) == 0
3972 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3973 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3975 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3976 bitpos = 0;
3979 to_rtx = offset_address (to_rtx, offset_rtx,
3980 highest_pow2_factor_for_target (to,
3981 offset));
3984 /* Handle expand_expr of a complex value returning a CONCAT. */
3985 if (GET_CODE (to_rtx) == CONCAT)
3987 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
3989 gcc_assert (bitpos == 0);
3990 result = store_expr (from, to_rtx, false);
3992 else
3994 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
3995 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
3998 else
4000 if (MEM_P (to_rtx))
4002 /* If the field is at offset zero, we could have been given the
4003 DECL_RTX of the parent struct. Don't munge it. */
4004 to_rtx = shallow_copy_rtx (to_rtx);
4006 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4008 /* Deal with volatile and readonly fields. The former is only
4009 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4010 if (volatilep)
4011 MEM_VOLATILE_P (to_rtx) = 1;
4012 if (component_uses_parent_alias_set (to))
4013 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4016 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4017 to_rtx, to, from))
4018 result = NULL;
4019 else
4020 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4021 TREE_TYPE (tem), get_alias_set (to));
4024 if (result)
4025 preserve_temp_slots (result);
4026 free_temp_slots ();
4027 pop_temp_slots ();
4028 return;
4031 /* If the rhs is a function call and its value is not an aggregate,
4032 call the function before we start to compute the lhs.
4033 This is needed for correct code for cases such as
4034 val = setjmp (buf) on machines where reference to val
4035 requires loading up part of an address in a separate insn.
4037 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4038 since it might be a promoted variable where the zero- or sign- extension
4039 needs to be done. Handling this in the normal way is safe because no
4040 computation is done before the call. */
4041 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4042 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4043 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4044 && REG_P (DECL_RTL (to))))
4046 rtx value;
4048 push_temp_slots ();
4049 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4050 if (to_rtx == 0)
4051 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4053 /* Handle calls that return values in multiple non-contiguous locations.
4054 The Irix 6 ABI has examples of this. */
4055 if (GET_CODE (to_rtx) == PARALLEL)
4056 emit_group_load (to_rtx, value, TREE_TYPE (from),
4057 int_size_in_bytes (TREE_TYPE (from)));
4058 else if (GET_MODE (to_rtx) == BLKmode)
4059 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4060 else
4062 if (POINTER_TYPE_P (TREE_TYPE (to)))
4063 value = convert_memory_address (GET_MODE (to_rtx), value);
4064 emit_move_insn (to_rtx, value);
4066 preserve_temp_slots (to_rtx);
4067 free_temp_slots ();
4068 pop_temp_slots ();
4069 return;
4072 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4073 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4075 if (to_rtx == 0)
4076 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4078 /* Don't move directly into a return register. */
4079 if (TREE_CODE (to) == RESULT_DECL
4080 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4082 rtx temp;
4084 push_temp_slots ();
4085 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4087 if (GET_CODE (to_rtx) == PARALLEL)
4088 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4089 int_size_in_bytes (TREE_TYPE (from)));
4090 else
4091 emit_move_insn (to_rtx, temp);
4093 preserve_temp_slots (to_rtx);
4094 free_temp_slots ();
4095 pop_temp_slots ();
4096 return;
4099 /* In case we are returning the contents of an object which overlaps
4100 the place the value is being stored, use a safe function when copying
4101 a value through a pointer into a structure value return block. */
4102 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4103 && current_function_returns_struct
4104 && !current_function_returns_pcc_struct)
4106 rtx from_rtx, size;
4108 push_temp_slots ();
4109 size = expr_size (from);
4110 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4112 emit_library_call (memmove_libfunc, LCT_NORMAL,
4113 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4114 XEXP (from_rtx, 0), Pmode,
4115 convert_to_mode (TYPE_MODE (sizetype),
4116 size, TYPE_UNSIGNED (sizetype)),
4117 TYPE_MODE (sizetype));
4119 preserve_temp_slots (to_rtx);
4120 free_temp_slots ();
4121 pop_temp_slots ();
4122 return;
4125 /* Compute FROM and store the value in the rtx we got. */
4127 push_temp_slots ();
4128 result = store_expr (from, to_rtx, 0);
4129 preserve_temp_slots (result);
4130 free_temp_slots ();
4131 pop_temp_slots ();
4132 return;
4135 /* Generate code for computing expression EXP,
4136 and storing the value into TARGET.
4138 If the mode is BLKmode then we may return TARGET itself.
4139 It turns out that in BLKmode it doesn't cause a problem.
4140 because C has no operators that could combine two different
4141 assignments into the same BLKmode object with different values
4142 with no sequence point. Will other languages need this to
4143 be more thorough?
4145 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4146 stack, and block moves may need to be treated specially. */
4149 store_expr (tree exp, rtx target, int call_param_p)
4151 rtx temp;
4152 rtx alt_rtl = NULL_RTX;
4153 int dont_return_target = 0;
4155 if (VOID_TYPE_P (TREE_TYPE (exp)))
4157 /* C++ can generate ?: expressions with a throw expression in one
4158 branch and an rvalue in the other. Here, we resolve attempts to
4159 store the throw expression's nonexistent result. */
4160 gcc_assert (!call_param_p);
4161 expand_expr (exp, const0_rtx, VOIDmode, 0);
4162 return NULL_RTX;
4164 if (TREE_CODE (exp) == COMPOUND_EXPR)
4166 /* Perform first part of compound expression, then assign from second
4167 part. */
4168 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4169 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4170 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4172 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4174 /* For conditional expression, get safe form of the target. Then
4175 test the condition, doing the appropriate assignment on either
4176 side. This avoids the creation of unnecessary temporaries.
4177 For non-BLKmode, it is more efficient not to do this. */
4179 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4181 do_pending_stack_adjust ();
4182 NO_DEFER_POP;
4183 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4184 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4185 emit_jump_insn (gen_jump (lab2));
4186 emit_barrier ();
4187 emit_label (lab1);
4188 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
4189 emit_label (lab2);
4190 OK_DEFER_POP;
4192 return NULL_RTX;
4194 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4195 /* If this is a scalar in a register that is stored in a wider mode
4196 than the declared mode, compute the result into its declared mode
4197 and then convert to the wider mode. Our value is the computed
4198 expression. */
4200 rtx inner_target = 0;
4202 /* We can do the conversion inside EXP, which will often result
4203 in some optimizations. Do the conversion in two steps: first
4204 change the signedness, if needed, then the extend. But don't
4205 do this if the type of EXP is a subtype of something else
4206 since then the conversion might involve more than just
4207 converting modes. */
4208 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4209 && TREE_TYPE (TREE_TYPE (exp)) == 0
4210 && (!lang_hooks.reduce_bit_field_operations
4211 || (GET_MODE_PRECISION (GET_MODE (target))
4212 == TYPE_PRECISION (TREE_TYPE (exp)))))
4214 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4215 != SUBREG_PROMOTED_UNSIGNED_P (target))
4216 exp = convert
4217 (lang_hooks.types.signed_or_unsigned_type
4218 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4220 exp = convert (lang_hooks.types.type_for_mode
4221 (GET_MODE (SUBREG_REG (target)),
4222 SUBREG_PROMOTED_UNSIGNED_P (target)),
4223 exp);
4225 inner_target = SUBREG_REG (target);
4228 temp = expand_expr (exp, inner_target, VOIDmode,
4229 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4231 /* If TEMP is a VOIDmode constant, use convert_modes to make
4232 sure that we properly convert it. */
4233 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4235 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4236 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4237 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4238 GET_MODE (target), temp,
4239 SUBREG_PROMOTED_UNSIGNED_P (target));
4242 convert_move (SUBREG_REG (target), temp,
4243 SUBREG_PROMOTED_UNSIGNED_P (target));
4245 return NULL_RTX;
4247 else
4249 temp = expand_expr_real (exp, target, GET_MODE (target),
4250 (call_param_p
4251 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4252 &alt_rtl);
4253 /* Return TARGET if it's a specified hardware register.
4254 If TARGET is a volatile mem ref, either return TARGET
4255 or return a reg copied *from* TARGET; ANSI requires this.
4257 Otherwise, if TEMP is not TARGET, return TEMP
4258 if it is constant (for efficiency),
4259 or if we really want the correct value. */
4260 if (!(target && REG_P (target)
4261 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4262 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4263 && ! rtx_equal_p (temp, target)
4264 && CONSTANT_P (temp))
4265 dont_return_target = 1;
4268 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4269 the same as that of TARGET, adjust the constant. This is needed, for
4270 example, in case it is a CONST_DOUBLE and we want only a word-sized
4271 value. */
4272 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4273 && TREE_CODE (exp) != ERROR_MARK
4274 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4275 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4276 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4278 /* If value was not generated in the target, store it there.
4279 Convert the value to TARGET's type first if necessary and emit the
4280 pending incrementations that have been queued when expanding EXP.
4281 Note that we cannot emit the whole queue blindly because this will
4282 effectively disable the POST_INC optimization later.
4284 If TEMP and TARGET compare equal according to rtx_equal_p, but
4285 one or both of them are volatile memory refs, we have to distinguish
4286 two cases:
4287 - expand_expr has used TARGET. In this case, we must not generate
4288 another copy. This can be detected by TARGET being equal according
4289 to == .
4290 - expand_expr has not used TARGET - that means that the source just
4291 happens to have the same RTX form. Since temp will have been created
4292 by expand_expr, it will compare unequal according to == .
4293 We must generate a copy in this case, to reach the correct number
4294 of volatile memory references. */
4296 if ((! rtx_equal_p (temp, target)
4297 || (temp != target && (side_effects_p (temp)
4298 || side_effects_p (target))))
4299 && TREE_CODE (exp) != ERROR_MARK
4300 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4301 but TARGET is not valid memory reference, TEMP will differ
4302 from TARGET although it is really the same location. */
4303 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4304 /* If there's nothing to copy, don't bother. Don't call
4305 expr_size unless necessary, because some front-ends (C++)
4306 expr_size-hook must not be given objects that are not
4307 supposed to be bit-copied or bit-initialized. */
4308 && expr_size (exp) != const0_rtx)
4310 if (GET_MODE (temp) != GET_MODE (target)
4311 && GET_MODE (temp) != VOIDmode)
4313 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4314 if (dont_return_target)
4316 /* In this case, we will return TEMP,
4317 so make sure it has the proper mode.
4318 But don't forget to store the value into TARGET. */
4319 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4320 emit_move_insn (target, temp);
4322 else
4323 convert_move (target, temp, unsignedp);
4326 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4328 /* Handle copying a string constant into an array. The string
4329 constant may be shorter than the array. So copy just the string's
4330 actual length, and clear the rest. First get the size of the data
4331 type of the string, which is actually the size of the target. */
4332 rtx size = expr_size (exp);
4334 if (GET_CODE (size) == CONST_INT
4335 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4336 emit_block_move (target, temp, size,
4337 (call_param_p
4338 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4339 else
4341 /* Compute the size of the data to copy from the string. */
4342 tree copy_size
4343 = size_binop (MIN_EXPR,
4344 make_tree (sizetype, size),
4345 size_int (TREE_STRING_LENGTH (exp)));
4346 rtx copy_size_rtx
4347 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4348 (call_param_p
4349 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4350 rtx label = 0;
4352 /* Copy that much. */
4353 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4354 TYPE_UNSIGNED (sizetype));
4355 emit_block_move (target, temp, copy_size_rtx,
4356 (call_param_p
4357 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4359 /* Figure out how much is left in TARGET that we have to clear.
4360 Do all calculations in ptr_mode. */
4361 if (GET_CODE (copy_size_rtx) == CONST_INT)
4363 size = plus_constant (size, -INTVAL (copy_size_rtx));
4364 target = adjust_address (target, BLKmode,
4365 INTVAL (copy_size_rtx));
4367 else
4369 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4370 copy_size_rtx, NULL_RTX, 0,
4371 OPTAB_LIB_WIDEN);
4373 #ifdef POINTERS_EXTEND_UNSIGNED
4374 if (GET_MODE (copy_size_rtx) != Pmode)
4375 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4376 TYPE_UNSIGNED (sizetype));
4377 #endif
4379 target = offset_address (target, copy_size_rtx,
4380 highest_pow2_factor (copy_size));
4381 label = gen_label_rtx ();
4382 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4383 GET_MODE (size), 0, label);
4386 if (size != const0_rtx)
4387 clear_storage (target, size, BLOCK_OP_NORMAL);
4389 if (label)
4390 emit_label (label);
4393 /* Handle calls that return values in multiple non-contiguous locations.
4394 The Irix 6 ABI has examples of this. */
4395 else if (GET_CODE (target) == PARALLEL)
4396 emit_group_load (target, temp, TREE_TYPE (exp),
4397 int_size_in_bytes (TREE_TYPE (exp)));
4398 else if (GET_MODE (temp) == BLKmode)
4399 emit_block_move (target, temp, expr_size (exp),
4400 (call_param_p
4401 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4402 else
4404 temp = force_operand (temp, target);
4405 if (temp != target)
4406 emit_move_insn (target, temp);
4410 return NULL_RTX;
4413 /* Examine CTOR to discover:
4414 * how many scalar fields are set to nonzero values,
4415 and place it in *P_NZ_ELTS;
4416 * how many scalar fields are set to non-constant values,
4417 and place it in *P_NC_ELTS; and
4418 * how many scalar fields in total are in CTOR,
4419 and place it in *P_ELT_COUNT.
4420 * if a type is a union, and the initializer from the constructor
4421 is not the largest element in the union, then set *p_must_clear. */
4423 static void
4424 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4425 HOST_WIDE_INT *p_nc_elts,
4426 HOST_WIDE_INT *p_elt_count,
4427 bool *p_must_clear)
4429 unsigned HOST_WIDE_INT idx;
4430 HOST_WIDE_INT nz_elts, nc_elts, elt_count;
4431 tree value, purpose;
4433 nz_elts = 0;
4434 nc_elts = 0;
4435 elt_count = 0;
4437 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4439 HOST_WIDE_INT mult;
4441 mult = 1;
4442 if (TREE_CODE (purpose) == RANGE_EXPR)
4444 tree lo_index = TREE_OPERAND (purpose, 0);
4445 tree hi_index = TREE_OPERAND (purpose, 1);
4447 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4448 mult = (tree_low_cst (hi_index, 1)
4449 - tree_low_cst (lo_index, 1) + 1);
4452 switch (TREE_CODE (value))
4454 case CONSTRUCTOR:
4456 HOST_WIDE_INT nz = 0, nc = 0, ic = 0;
4457 categorize_ctor_elements_1 (value, &nz, &nc, &ic, p_must_clear);
4458 nz_elts += mult * nz;
4459 nc_elts += mult * nc;
4460 elt_count += mult * ic;
4462 break;
4464 case INTEGER_CST:
4465 case REAL_CST:
4466 if (!initializer_zerop (value))
4467 nz_elts += mult;
4468 elt_count += mult;
4469 break;
4471 case STRING_CST:
4472 nz_elts += mult * TREE_STRING_LENGTH (value);
4473 elt_count += mult * TREE_STRING_LENGTH (value);
4474 break;
4476 case COMPLEX_CST:
4477 if (!initializer_zerop (TREE_REALPART (value)))
4478 nz_elts += mult;
4479 if (!initializer_zerop (TREE_IMAGPART (value)))
4480 nz_elts += mult;
4481 elt_count += mult;
4482 break;
4484 case VECTOR_CST:
4486 tree v;
4487 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4489 if (!initializer_zerop (TREE_VALUE (v)))
4490 nz_elts += mult;
4491 elt_count += mult;
4494 break;
4496 default:
4497 nz_elts += mult;
4498 elt_count += mult;
4499 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4500 nc_elts += mult;
4501 break;
4505 if (!*p_must_clear
4506 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4507 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4509 tree init_sub_type;
4510 bool clear_this = true;
4512 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4514 /* We don't expect more than one element of the union to be
4515 initialized. Not sure what we should do otherwise... */
4516 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4517 == 1);
4519 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4520 CONSTRUCTOR_ELTS (ctor),
4521 0)->value);
4523 /* ??? We could look at each element of the union, and find the
4524 largest element. Which would avoid comparing the size of the
4525 initialized element against any tail padding in the union.
4526 Doesn't seem worth the effort... */
4527 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4528 TYPE_SIZE (init_sub_type)) == 1)
4530 /* And now we have to find out if the element itself is fully
4531 constructed. E.g. for union { struct { int a, b; } s; } u
4532 = { .s = { .a = 1 } }. */
4533 if (elt_count == count_type_elements (init_sub_type, false))
4534 clear_this = false;
4538 *p_must_clear = clear_this;
4541 *p_nz_elts += nz_elts;
4542 *p_nc_elts += nc_elts;
4543 *p_elt_count += elt_count;
4546 void
4547 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4548 HOST_WIDE_INT *p_nc_elts,
4549 HOST_WIDE_INT *p_elt_count,
4550 bool *p_must_clear)
4552 *p_nz_elts = 0;
4553 *p_nc_elts = 0;
4554 *p_elt_count = 0;
4555 *p_must_clear = false;
4556 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts, p_elt_count,
4557 p_must_clear);
4560 /* Count the number of scalars in TYPE. Return -1 on overflow or
4561 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4562 array member at the end of the structure. */
4564 HOST_WIDE_INT
4565 count_type_elements (tree type, bool allow_flexarr)
4567 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4568 switch (TREE_CODE (type))
4570 case ARRAY_TYPE:
4572 tree telts = array_type_nelts (type);
4573 if (telts && host_integerp (telts, 1))
4575 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4576 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4577 if (n == 0)
4578 return 0;
4579 else if (max / n > m)
4580 return n * m;
4582 return -1;
4585 case RECORD_TYPE:
4587 HOST_WIDE_INT n = 0, t;
4588 tree f;
4590 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4591 if (TREE_CODE (f) == FIELD_DECL)
4593 t = count_type_elements (TREE_TYPE (f), false);
4594 if (t < 0)
4596 /* Check for structures with flexible array member. */
4597 tree tf = TREE_TYPE (f);
4598 if (allow_flexarr
4599 && TREE_CHAIN (f) == NULL
4600 && TREE_CODE (tf) == ARRAY_TYPE
4601 && TYPE_DOMAIN (tf)
4602 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4603 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4604 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4605 && int_size_in_bytes (type) >= 0)
4606 break;
4608 return -1;
4610 n += t;
4613 return n;
4616 case UNION_TYPE:
4617 case QUAL_UNION_TYPE:
4619 /* Ho hum. How in the world do we guess here? Clearly it isn't
4620 right to count the fields. Guess based on the number of words. */
4621 HOST_WIDE_INT n = int_size_in_bytes (type);
4622 if (n < 0)
4623 return -1;
4624 return n / UNITS_PER_WORD;
4627 case COMPLEX_TYPE:
4628 return 2;
4630 case VECTOR_TYPE:
4631 return TYPE_VECTOR_SUBPARTS (type);
4633 case INTEGER_TYPE:
4634 case REAL_TYPE:
4635 case ENUMERAL_TYPE:
4636 case BOOLEAN_TYPE:
4637 case CHAR_TYPE:
4638 case POINTER_TYPE:
4639 case OFFSET_TYPE:
4640 case REFERENCE_TYPE:
4641 return 1;
4643 case VOID_TYPE:
4644 case METHOD_TYPE:
4645 case FUNCTION_TYPE:
4646 case LANG_TYPE:
4647 default:
4648 gcc_unreachable ();
4652 /* Return 1 if EXP contains mostly (3/4) zeros. */
4654 static int
4655 mostly_zeros_p (tree exp)
4657 if (TREE_CODE (exp) == CONSTRUCTOR)
4660 HOST_WIDE_INT nz_elts, nc_elts, count, elts;
4661 bool must_clear;
4663 categorize_ctor_elements (exp, &nz_elts, &nc_elts, &count, &must_clear);
4664 if (must_clear)
4665 return 1;
4667 elts = count_type_elements (TREE_TYPE (exp), false);
4669 return nz_elts < elts / 4;
4672 return initializer_zerop (exp);
4675 /* Return 1 if EXP contains all zeros. */
4677 static int
4678 all_zeros_p (tree exp)
4680 if (TREE_CODE (exp) == CONSTRUCTOR)
4683 HOST_WIDE_INT nz_elts, nc_elts, count;
4684 bool must_clear;
4686 categorize_ctor_elements (exp, &nz_elts, &nc_elts, &count, &must_clear);
4687 return nz_elts == 0;
4690 return initializer_zerop (exp);
4693 /* Helper function for store_constructor.
4694 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4695 TYPE is the type of the CONSTRUCTOR, not the element type.
4696 CLEARED is as for store_constructor.
4697 ALIAS_SET is the alias set to use for any stores.
4699 This provides a recursive shortcut back to store_constructor when it isn't
4700 necessary to go through store_field. This is so that we can pass through
4701 the cleared field to let store_constructor know that we may not have to
4702 clear a substructure if the outer structure has already been cleared. */
4704 static void
4705 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4706 HOST_WIDE_INT bitpos, enum machine_mode mode,
4707 tree exp, tree type, int cleared, int alias_set)
4709 if (TREE_CODE (exp) == CONSTRUCTOR
4710 /* We can only call store_constructor recursively if the size and
4711 bit position are on a byte boundary. */
4712 && bitpos % BITS_PER_UNIT == 0
4713 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4714 /* If we have a nonzero bitpos for a register target, then we just
4715 let store_field do the bitfield handling. This is unlikely to
4716 generate unnecessary clear instructions anyways. */
4717 && (bitpos == 0 || MEM_P (target)))
4719 if (MEM_P (target))
4720 target
4721 = adjust_address (target,
4722 GET_MODE (target) == BLKmode
4723 || 0 != (bitpos
4724 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4725 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4728 /* Update the alias set, if required. */
4729 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4730 && MEM_ALIAS_SET (target) != 0)
4732 target = copy_rtx (target);
4733 set_mem_alias_set (target, alias_set);
4736 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4738 else
4739 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4742 /* Store the value of constructor EXP into the rtx TARGET.
4743 TARGET is either a REG or a MEM; we know it cannot conflict, since
4744 safe_from_p has been called.
4745 CLEARED is true if TARGET is known to have been zero'd.
4746 SIZE is the number of bytes of TARGET we are allowed to modify: this
4747 may not be the same as the size of EXP if we are assigning to a field
4748 which has been packed to exclude padding bits. */
4750 static void
4751 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4753 tree type = TREE_TYPE (exp);
4754 #ifdef WORD_REGISTER_OPERATIONS
4755 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4756 #endif
4758 switch (TREE_CODE (type))
4760 case RECORD_TYPE:
4761 case UNION_TYPE:
4762 case QUAL_UNION_TYPE:
4764 unsigned HOST_WIDE_INT idx;
4765 tree field, value;
4767 /* If size is zero or the target is already cleared, do nothing. */
4768 if (size == 0 || cleared)
4769 cleared = 1;
4770 /* We either clear the aggregate or indicate the value is dead. */
4771 else if ((TREE_CODE (type) == UNION_TYPE
4772 || TREE_CODE (type) == QUAL_UNION_TYPE)
4773 && ! CONSTRUCTOR_ELTS (exp))
4774 /* If the constructor is empty, clear the union. */
4776 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
4777 cleared = 1;
4780 /* If we are building a static constructor into a register,
4781 set the initial value as zero so we can fold the value into
4782 a constant. But if more than one register is involved,
4783 this probably loses. */
4784 else if (REG_P (target) && TREE_STATIC (exp)
4785 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4787 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4788 cleared = 1;
4791 /* If the constructor has fewer fields than the structure or
4792 if we are initializing the structure to mostly zeros, clear
4793 the whole structure first. Don't do this if TARGET is a
4794 register whose mode size isn't equal to SIZE since
4795 clear_storage can't handle this case. */
4796 else if (size > 0
4797 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
4798 != fields_length (type))
4799 || mostly_zeros_p (exp))
4800 && (!REG_P (target)
4801 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4802 == size)))
4804 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
4805 cleared = 1;
4808 if (! cleared)
4809 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4811 /* Store each element of the constructor into the
4812 corresponding field of TARGET. */
4813 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
4815 enum machine_mode mode;
4816 HOST_WIDE_INT bitsize;
4817 HOST_WIDE_INT bitpos = 0;
4818 tree offset;
4819 rtx to_rtx = target;
4821 /* Just ignore missing fields. We cleared the whole
4822 structure, above, if any fields are missing. */
4823 if (field == 0)
4824 continue;
4826 if (cleared && initializer_zerop (value))
4827 continue;
4829 if (host_integerp (DECL_SIZE (field), 1))
4830 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4831 else
4832 bitsize = -1;
4834 mode = DECL_MODE (field);
4835 if (DECL_BIT_FIELD (field))
4836 mode = VOIDmode;
4838 offset = DECL_FIELD_OFFSET (field);
4839 if (host_integerp (offset, 0)
4840 && host_integerp (bit_position (field), 0))
4842 bitpos = int_bit_position (field);
4843 offset = 0;
4845 else
4846 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4848 if (offset)
4850 rtx offset_rtx;
4852 offset
4853 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4854 make_tree (TREE_TYPE (exp),
4855 target));
4857 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4858 gcc_assert (MEM_P (to_rtx));
4860 #ifdef POINTERS_EXTEND_UNSIGNED
4861 if (GET_MODE (offset_rtx) != Pmode)
4862 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4863 #else
4864 if (GET_MODE (offset_rtx) != ptr_mode)
4865 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4866 #endif
4868 to_rtx = offset_address (to_rtx, offset_rtx,
4869 highest_pow2_factor (offset));
4872 #ifdef WORD_REGISTER_OPERATIONS
4873 /* If this initializes a field that is smaller than a
4874 word, at the start of a word, try to widen it to a full
4875 word. This special case allows us to output C++ member
4876 function initializations in a form that the optimizers
4877 can understand. */
4878 if (REG_P (target)
4879 && bitsize < BITS_PER_WORD
4880 && bitpos % BITS_PER_WORD == 0
4881 && GET_MODE_CLASS (mode) == MODE_INT
4882 && TREE_CODE (value) == INTEGER_CST
4883 && exp_size >= 0
4884 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4886 tree type = TREE_TYPE (value);
4888 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4890 type = lang_hooks.types.type_for_size
4891 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4892 value = convert (type, value);
4895 if (BYTES_BIG_ENDIAN)
4896 value
4897 = fold_build2 (LSHIFT_EXPR, type, value,
4898 build_int_cst (NULL_TREE,
4899 BITS_PER_WORD - bitsize));
4900 bitsize = BITS_PER_WORD;
4901 mode = word_mode;
4903 #endif
4905 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4906 && DECL_NONADDRESSABLE_P (field))
4908 to_rtx = copy_rtx (to_rtx);
4909 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4912 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4913 value, type, cleared,
4914 get_alias_set (TREE_TYPE (field)));
4916 break;
4918 case ARRAY_TYPE:
4920 tree value, index;
4921 unsigned HOST_WIDE_INT i;
4922 int need_to_clear;
4923 tree domain;
4924 tree elttype = TREE_TYPE (type);
4925 int const_bounds_p;
4926 HOST_WIDE_INT minelt = 0;
4927 HOST_WIDE_INT maxelt = 0;
4929 domain = TYPE_DOMAIN (type);
4930 const_bounds_p = (TYPE_MIN_VALUE (domain)
4931 && TYPE_MAX_VALUE (domain)
4932 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4933 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4935 /* If we have constant bounds for the range of the type, get them. */
4936 if (const_bounds_p)
4938 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4939 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4942 /* If the constructor has fewer elements than the array, clear
4943 the whole array first. Similarly if this is static
4944 constructor of a non-BLKmode object. */
4945 if (cleared)
4946 need_to_clear = 0;
4947 else if (REG_P (target) && TREE_STATIC (exp))
4948 need_to_clear = 1;
4949 else
4951 unsigned HOST_WIDE_INT idx;
4952 tree index, value;
4953 HOST_WIDE_INT count = 0, zero_count = 0;
4954 need_to_clear = ! const_bounds_p;
4956 /* This loop is a more accurate version of the loop in
4957 mostly_zeros_p (it handles RANGE_EXPR in an index). It
4958 is also needed to check for missing elements. */
4959 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
4961 HOST_WIDE_INT this_node_count;
4963 if (need_to_clear)
4964 break;
4966 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4968 tree lo_index = TREE_OPERAND (index, 0);
4969 tree hi_index = TREE_OPERAND (index, 1);
4971 if (! host_integerp (lo_index, 1)
4972 || ! host_integerp (hi_index, 1))
4974 need_to_clear = 1;
4975 break;
4978 this_node_count = (tree_low_cst (hi_index, 1)
4979 - tree_low_cst (lo_index, 1) + 1);
4981 else
4982 this_node_count = 1;
4984 count += this_node_count;
4985 if (mostly_zeros_p (value))
4986 zero_count += this_node_count;
4989 /* Clear the entire array first if there are any missing
4990 elements, or if the incidence of zero elements is >=
4991 75%. */
4992 if (! need_to_clear
4993 && (count < maxelt - minelt + 1
4994 || 4 * zero_count >= 3 * count))
4995 need_to_clear = 1;
4998 if (need_to_clear && size > 0)
5000 if (REG_P (target))
5001 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5002 else
5003 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5004 cleared = 1;
5007 if (!cleared && REG_P (target))
5008 /* Inform later passes that the old value is dead. */
5009 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5011 /* Store each element of the constructor into the
5012 corresponding element of TARGET, determined by counting the
5013 elements. */
5014 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5016 enum machine_mode mode;
5017 HOST_WIDE_INT bitsize;
5018 HOST_WIDE_INT bitpos;
5019 int unsignedp;
5020 rtx xtarget = target;
5022 if (cleared && initializer_zerop (value))
5023 continue;
5025 unsignedp = TYPE_UNSIGNED (elttype);
5026 mode = TYPE_MODE (elttype);
5027 if (mode == BLKmode)
5028 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5029 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5030 : -1);
5031 else
5032 bitsize = GET_MODE_BITSIZE (mode);
5034 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5036 tree lo_index = TREE_OPERAND (index, 0);
5037 tree hi_index = TREE_OPERAND (index, 1);
5038 rtx index_r, pos_rtx;
5039 HOST_WIDE_INT lo, hi, count;
5040 tree position;
5042 /* If the range is constant and "small", unroll the loop. */
5043 if (const_bounds_p
5044 && host_integerp (lo_index, 0)
5045 && host_integerp (hi_index, 0)
5046 && (lo = tree_low_cst (lo_index, 0),
5047 hi = tree_low_cst (hi_index, 0),
5048 count = hi - lo + 1,
5049 (!MEM_P (target)
5050 || count <= 2
5051 || (host_integerp (TYPE_SIZE (elttype), 1)
5052 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5053 <= 40 * 8)))))
5055 lo -= minelt; hi -= minelt;
5056 for (; lo <= hi; lo++)
5058 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5060 if (MEM_P (target)
5061 && !MEM_KEEP_ALIAS_SET_P (target)
5062 && TREE_CODE (type) == ARRAY_TYPE
5063 && TYPE_NONALIASED_COMPONENT (type))
5065 target = copy_rtx (target);
5066 MEM_KEEP_ALIAS_SET_P (target) = 1;
5069 store_constructor_field
5070 (target, bitsize, bitpos, mode, value, type, cleared,
5071 get_alias_set (elttype));
5074 else
5076 rtx loop_start = gen_label_rtx ();
5077 rtx loop_end = gen_label_rtx ();
5078 tree exit_cond;
5080 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5081 unsignedp = TYPE_UNSIGNED (domain);
5083 index = build_decl (VAR_DECL, NULL_TREE, domain);
5085 index_r
5086 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5087 &unsignedp, 0));
5088 SET_DECL_RTL (index, index_r);
5089 store_expr (lo_index, index_r, 0);
5091 /* Build the head of the loop. */
5092 do_pending_stack_adjust ();
5093 emit_label (loop_start);
5095 /* Assign value to element index. */
5096 position
5097 = convert (ssizetype,
5098 fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5099 index, TYPE_MIN_VALUE (domain)));
5100 position = size_binop (MULT_EXPR, position,
5101 convert (ssizetype,
5102 TYPE_SIZE_UNIT (elttype)));
5104 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5105 xtarget = offset_address (target, pos_rtx,
5106 highest_pow2_factor (position));
5107 xtarget = adjust_address (xtarget, mode, 0);
5108 if (TREE_CODE (value) == CONSTRUCTOR)
5109 store_constructor (value, xtarget, cleared,
5110 bitsize / BITS_PER_UNIT);
5111 else
5112 store_expr (value, xtarget, 0);
5114 /* Generate a conditional jump to exit the loop. */
5115 exit_cond = build2 (LT_EXPR, integer_type_node,
5116 index, hi_index);
5117 jumpif (exit_cond, loop_end);
5119 /* Update the loop counter, and jump to the head of
5120 the loop. */
5121 expand_assignment (index,
5122 build2 (PLUS_EXPR, TREE_TYPE (index),
5123 index, integer_one_node));
5125 emit_jump (loop_start);
5127 /* Build the end of the loop. */
5128 emit_label (loop_end);
5131 else if ((index != 0 && ! host_integerp (index, 0))
5132 || ! host_integerp (TYPE_SIZE (elttype), 1))
5134 tree position;
5136 if (index == 0)
5137 index = ssize_int (1);
5139 if (minelt)
5140 index = fold_convert (ssizetype,
5141 fold_build2 (MINUS_EXPR,
5142 TREE_TYPE (index),
5143 index,
5144 TYPE_MIN_VALUE (domain)));
5146 position = size_binop (MULT_EXPR, index,
5147 convert (ssizetype,
5148 TYPE_SIZE_UNIT (elttype)));
5149 xtarget = offset_address (target,
5150 expand_expr (position, 0, VOIDmode, 0),
5151 highest_pow2_factor (position));
5152 xtarget = adjust_address (xtarget, mode, 0);
5153 store_expr (value, xtarget, 0);
5155 else
5157 if (index != 0)
5158 bitpos = ((tree_low_cst (index, 0) - minelt)
5159 * tree_low_cst (TYPE_SIZE (elttype), 1));
5160 else
5161 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5163 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5164 && TREE_CODE (type) == ARRAY_TYPE
5165 && TYPE_NONALIASED_COMPONENT (type))
5167 target = copy_rtx (target);
5168 MEM_KEEP_ALIAS_SET_P (target) = 1;
5170 store_constructor_field (target, bitsize, bitpos, mode, value,
5171 type, cleared, get_alias_set (elttype));
5174 break;
5177 case VECTOR_TYPE:
5179 unsigned HOST_WIDE_INT idx;
5180 constructor_elt *ce;
5181 int i;
5182 int need_to_clear;
5183 int icode = 0;
5184 tree elttype = TREE_TYPE (type);
5185 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5186 enum machine_mode eltmode = TYPE_MODE (elttype);
5187 HOST_WIDE_INT bitsize;
5188 HOST_WIDE_INT bitpos;
5189 rtvec vector = NULL;
5190 unsigned n_elts;
5192 gcc_assert (eltmode != BLKmode);
5194 n_elts = TYPE_VECTOR_SUBPARTS (type);
5195 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5197 enum machine_mode mode = GET_MODE (target);
5199 icode = (int) vec_init_optab->handlers[mode].insn_code;
5200 if (icode != CODE_FOR_nothing)
5202 unsigned int i;
5204 vector = rtvec_alloc (n_elts);
5205 for (i = 0; i < n_elts; i++)
5206 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5210 /* If the constructor has fewer elements than the vector,
5211 clear the whole array first. Similarly if this is static
5212 constructor of a non-BLKmode object. */
5213 if (cleared)
5214 need_to_clear = 0;
5215 else if (REG_P (target) && TREE_STATIC (exp))
5216 need_to_clear = 1;
5217 else
5219 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5220 tree value;
5222 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5224 int n_elts_here = tree_low_cst
5225 (int_const_binop (TRUNC_DIV_EXPR,
5226 TYPE_SIZE (TREE_TYPE (value)),
5227 TYPE_SIZE (elttype), 0), 1);
5229 count += n_elts_here;
5230 if (mostly_zeros_p (value))
5231 zero_count += n_elts_here;
5234 /* Clear the entire vector first if there are any missing elements,
5235 or if the incidence of zero elements is >= 75%. */
5236 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5239 if (need_to_clear && size > 0 && !vector)
5241 if (REG_P (target))
5242 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5243 else
5244 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5245 cleared = 1;
5248 /* Inform later passes that the old value is dead. */
5249 if (!cleared && REG_P (target))
5250 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5252 /* Store each element of the constructor into the corresponding
5253 element of TARGET, determined by counting the elements. */
5254 for (idx = 0, i = 0;
5255 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5256 idx++, i += bitsize / elt_size)
5258 HOST_WIDE_INT eltpos;
5259 tree value = ce->value;
5261 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5262 if (cleared && initializer_zerop (value))
5263 continue;
5265 if (ce->index)
5266 eltpos = tree_low_cst (ce->index, 1);
5267 else
5268 eltpos = i;
5270 if (vector)
5272 /* Vector CONSTRUCTORs should only be built from smaller
5273 vectors in the case of BLKmode vectors. */
5274 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5275 RTVEC_ELT (vector, eltpos)
5276 = expand_expr (value, NULL_RTX, VOIDmode, 0);
5278 else
5280 enum machine_mode value_mode =
5281 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5282 ? TYPE_MODE (TREE_TYPE (value))
5283 : eltmode;
5284 bitpos = eltpos * elt_size;
5285 store_constructor_field (target, bitsize, bitpos,
5286 value_mode, value, type,
5287 cleared, get_alias_set (elttype));
5291 if (vector)
5292 emit_insn (GEN_FCN (icode)
5293 (target,
5294 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5295 break;
5298 default:
5299 gcc_unreachable ();
5303 /* Store the value of EXP (an expression tree)
5304 into a subfield of TARGET which has mode MODE and occupies
5305 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5306 If MODE is VOIDmode, it means that we are storing into a bit-field.
5308 Always return const0_rtx unless we have something particular to
5309 return.
5311 TYPE is the type of the underlying object,
5313 ALIAS_SET is the alias set for the destination. This value will
5314 (in general) be different from that for TARGET, since TARGET is a
5315 reference to the containing structure. */
5317 static rtx
5318 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5319 enum machine_mode mode, tree exp, tree type, int alias_set)
5321 HOST_WIDE_INT width_mask = 0;
5323 if (TREE_CODE (exp) == ERROR_MARK)
5324 return const0_rtx;
5326 /* If we have nothing to store, do nothing unless the expression has
5327 side-effects. */
5328 if (bitsize == 0)
5329 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5330 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5331 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5333 /* If we are storing into an unaligned field of an aligned union that is
5334 in a register, we may have the mode of TARGET being an integer mode but
5335 MODE == BLKmode. In that case, get an aligned object whose size and
5336 alignment are the same as TARGET and store TARGET into it (we can avoid
5337 the store if the field being stored is the entire width of TARGET). Then
5338 call ourselves recursively to store the field into a BLKmode version of
5339 that object. Finally, load from the object into TARGET. This is not
5340 very efficient in general, but should only be slightly more expensive
5341 than the otherwise-required unaligned accesses. Perhaps this can be
5342 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5343 twice, once with emit_move_insn and once via store_field. */
5345 if (mode == BLKmode
5346 && (REG_P (target) || GET_CODE (target) == SUBREG))
5348 rtx object = assign_temp (type, 0, 1, 1);
5349 rtx blk_object = adjust_address (object, BLKmode, 0);
5351 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5352 emit_move_insn (object, target);
5354 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5356 emit_move_insn (target, object);
5358 /* We want to return the BLKmode version of the data. */
5359 return blk_object;
5362 if (GET_CODE (target) == CONCAT)
5364 /* We're storing into a struct containing a single __complex. */
5366 gcc_assert (!bitpos);
5367 return store_expr (exp, target, 0);
5370 /* If the structure is in a register or if the component
5371 is a bit field, we cannot use addressing to access it.
5372 Use bit-field techniques or SUBREG to store in it. */
5374 if (mode == VOIDmode
5375 || (mode != BLKmode && ! direct_store[(int) mode]
5376 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5377 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5378 || REG_P (target)
5379 || GET_CODE (target) == SUBREG
5380 /* If the field isn't aligned enough to store as an ordinary memref,
5381 store it as a bit field. */
5382 || (mode != BLKmode
5383 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5384 || bitpos % GET_MODE_ALIGNMENT (mode))
5385 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5386 || (bitpos % BITS_PER_UNIT != 0)))
5387 /* If the RHS and field are a constant size and the size of the
5388 RHS isn't the same size as the bitfield, we must use bitfield
5389 operations. */
5390 || (bitsize >= 0
5391 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5392 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5394 rtx temp;
5396 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5397 implies a mask operation. If the precision is the same size as
5398 the field we're storing into, that mask is redundant. This is
5399 particularly common with bit field assignments generated by the
5400 C front end. */
5401 if (TREE_CODE (exp) == NOP_EXPR)
5403 tree type = TREE_TYPE (exp);
5404 if (INTEGRAL_TYPE_P (type)
5405 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5406 && bitsize == TYPE_PRECISION (type))
5408 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5409 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5410 exp = TREE_OPERAND (exp, 0);
5414 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5416 /* If BITSIZE is narrower than the size of the type of EXP
5417 we will be narrowing TEMP. Normally, what's wanted are the
5418 low-order bits. However, if EXP's type is a record and this is
5419 big-endian machine, we want the upper BITSIZE bits. */
5420 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5421 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5422 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5423 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5424 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5425 - bitsize),
5426 NULL_RTX, 1);
5428 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5429 MODE. */
5430 if (mode != VOIDmode && mode != BLKmode
5431 && mode != TYPE_MODE (TREE_TYPE (exp)))
5432 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5434 /* If the modes of TARGET and TEMP are both BLKmode, both
5435 must be in memory and BITPOS must be aligned on a byte
5436 boundary. If so, we simply do a block copy. */
5437 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5439 gcc_assert (MEM_P (target) && MEM_P (temp)
5440 && !(bitpos % BITS_PER_UNIT));
5442 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5443 emit_block_move (target, temp,
5444 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5445 / BITS_PER_UNIT),
5446 BLOCK_OP_NORMAL);
5448 return const0_rtx;
5451 /* Store the value in the bitfield. */
5452 store_bit_field (target, bitsize, bitpos, mode, temp);
5454 return const0_rtx;
5456 else
5458 /* Now build a reference to just the desired component. */
5459 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5461 if (to_rtx == target)
5462 to_rtx = copy_rtx (to_rtx);
5464 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5465 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5466 set_mem_alias_set (to_rtx, alias_set);
5468 return store_expr (exp, to_rtx, 0);
5472 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5473 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5474 codes and find the ultimate containing object, which we return.
5476 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5477 bit position, and *PUNSIGNEDP to the signedness of the field.
5478 If the position of the field is variable, we store a tree
5479 giving the variable offset (in units) in *POFFSET.
5480 This offset is in addition to the bit position.
5481 If the position is not variable, we store 0 in *POFFSET.
5483 If any of the extraction expressions is volatile,
5484 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5486 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5487 is a mode that can be used to access the field. In that case, *PBITSIZE
5488 is redundant.
5490 If the field describes a variable-sized object, *PMODE is set to
5491 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5492 this case, but the address of the object can be found.
5494 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5495 look through nodes that serve as markers of a greater alignment than
5496 the one that can be deduced from the expression. These nodes make it
5497 possible for front-ends to prevent temporaries from being created by
5498 the middle-end on alignment considerations. For that purpose, the
5499 normal operating mode at high-level is to always pass FALSE so that
5500 the ultimate containing object is really returned; moreover, the
5501 associated predicate handled_component_p will always return TRUE
5502 on these nodes, thus indicating that they are essentially handled
5503 by get_inner_reference. TRUE should only be passed when the caller
5504 is scanning the expression in order to build another representation
5505 and specifically knows how to handle these nodes; as such, this is
5506 the normal operating mode in the RTL expanders. */
5508 tree
5509 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5510 HOST_WIDE_INT *pbitpos, tree *poffset,
5511 enum machine_mode *pmode, int *punsignedp,
5512 int *pvolatilep, bool keep_aligning)
5514 tree size_tree = 0;
5515 enum machine_mode mode = VOIDmode;
5516 tree offset = size_zero_node;
5517 tree bit_offset = bitsize_zero_node;
5518 tree tem;
5520 /* First get the mode, signedness, and size. We do this from just the
5521 outermost expression. */
5522 if (TREE_CODE (exp) == COMPONENT_REF)
5524 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5525 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5526 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5528 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5530 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5532 size_tree = TREE_OPERAND (exp, 1);
5533 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5535 else
5537 mode = TYPE_MODE (TREE_TYPE (exp));
5538 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5540 if (mode == BLKmode)
5541 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5542 else
5543 *pbitsize = GET_MODE_BITSIZE (mode);
5546 if (size_tree != 0)
5548 if (! host_integerp (size_tree, 1))
5549 mode = BLKmode, *pbitsize = -1;
5550 else
5551 *pbitsize = tree_low_cst (size_tree, 1);
5554 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5555 and find the ultimate containing object. */
5556 while (1)
5558 switch (TREE_CODE (exp))
5560 case BIT_FIELD_REF:
5561 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5562 TREE_OPERAND (exp, 2));
5563 break;
5565 case COMPONENT_REF:
5567 tree field = TREE_OPERAND (exp, 1);
5568 tree this_offset = component_ref_field_offset (exp);
5570 /* If this field hasn't been filled in yet, don't go past it.
5571 This should only happen when folding expressions made during
5572 type construction. */
5573 if (this_offset == 0)
5574 break;
5576 offset = size_binop (PLUS_EXPR, offset, this_offset);
5577 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5578 DECL_FIELD_BIT_OFFSET (field));
5580 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5582 break;
5584 case ARRAY_REF:
5585 case ARRAY_RANGE_REF:
5587 tree index = TREE_OPERAND (exp, 1);
5588 tree low_bound = array_ref_low_bound (exp);
5589 tree unit_size = array_ref_element_size (exp);
5591 /* We assume all arrays have sizes that are a multiple of a byte.
5592 First subtract the lower bound, if any, in the type of the
5593 index, then convert to sizetype and multiply by the size of
5594 the array element. */
5595 if (! integer_zerop (low_bound))
5596 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5597 index, low_bound);
5599 offset = size_binop (PLUS_EXPR, offset,
5600 size_binop (MULT_EXPR,
5601 convert (sizetype, index),
5602 unit_size));
5604 break;
5606 case REALPART_EXPR:
5607 break;
5609 case IMAGPART_EXPR:
5610 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5611 bitsize_int (*pbitsize));
5612 break;
5614 case VIEW_CONVERT_EXPR:
5615 if (keep_aligning && STRICT_ALIGNMENT
5616 && (TYPE_ALIGN (TREE_TYPE (exp))
5617 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5618 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5619 < BIGGEST_ALIGNMENT)
5620 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5621 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5622 goto done;
5623 break;
5625 default:
5626 goto done;
5629 /* If any reference in the chain is volatile, the effect is volatile. */
5630 if (TREE_THIS_VOLATILE (exp))
5631 *pvolatilep = 1;
5633 exp = TREE_OPERAND (exp, 0);
5635 done:
5637 /* If OFFSET is constant, see if we can return the whole thing as a
5638 constant bit position. Otherwise, split it up. */
5639 if (host_integerp (offset, 0)
5640 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5641 bitsize_unit_node))
5642 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5643 && host_integerp (tem, 0))
5644 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5645 else
5646 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5648 *pmode = mode;
5649 return exp;
5652 /* Return a tree of sizetype representing the size, in bytes, of the element
5653 of EXP, an ARRAY_REF. */
5655 tree
5656 array_ref_element_size (tree exp)
5658 tree aligned_size = TREE_OPERAND (exp, 3);
5659 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5661 /* If a size was specified in the ARRAY_REF, it's the size measured
5662 in alignment units of the element type. So multiply by that value. */
5663 if (aligned_size)
5665 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5666 sizetype from another type of the same width and signedness. */
5667 if (TREE_TYPE (aligned_size) != sizetype)
5668 aligned_size = fold_convert (sizetype, aligned_size);
5669 return size_binop (MULT_EXPR, aligned_size,
5670 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5673 /* Otherwise, take the size from that of the element type. Substitute
5674 any PLACEHOLDER_EXPR that we have. */
5675 else
5676 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5679 /* Return a tree representing the lower bound of the array mentioned in
5680 EXP, an ARRAY_REF. */
5682 tree
5683 array_ref_low_bound (tree exp)
5685 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5687 /* If a lower bound is specified in EXP, use it. */
5688 if (TREE_OPERAND (exp, 2))
5689 return TREE_OPERAND (exp, 2);
5691 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5692 substituting for a PLACEHOLDER_EXPR as needed. */
5693 if (domain_type && TYPE_MIN_VALUE (domain_type))
5694 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5696 /* Otherwise, return a zero of the appropriate type. */
5697 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5700 /* Return a tree representing the upper bound of the array mentioned in
5701 EXP, an ARRAY_REF. */
5703 tree
5704 array_ref_up_bound (tree exp)
5706 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5708 /* If there is a domain type and it has an upper bound, use it, substituting
5709 for a PLACEHOLDER_EXPR as needed. */
5710 if (domain_type && TYPE_MAX_VALUE (domain_type))
5711 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5713 /* Otherwise fail. */
5714 return NULL_TREE;
5717 /* Return a tree representing the offset, in bytes, of the field referenced
5718 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5720 tree
5721 component_ref_field_offset (tree exp)
5723 tree aligned_offset = TREE_OPERAND (exp, 2);
5724 tree field = TREE_OPERAND (exp, 1);
5726 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5727 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5728 value. */
5729 if (aligned_offset)
5731 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5732 sizetype from another type of the same width and signedness. */
5733 if (TREE_TYPE (aligned_offset) != sizetype)
5734 aligned_offset = fold_convert (sizetype, aligned_offset);
5735 return size_binop (MULT_EXPR, aligned_offset,
5736 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5739 /* Otherwise, take the offset from that of the field. Substitute
5740 any PLACEHOLDER_EXPR that we have. */
5741 else
5742 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5745 /* Return 1 if T is an expression that get_inner_reference handles. */
5748 handled_component_p (tree t)
5750 switch (TREE_CODE (t))
5752 case BIT_FIELD_REF:
5753 case COMPONENT_REF:
5754 case ARRAY_REF:
5755 case ARRAY_RANGE_REF:
5756 case VIEW_CONVERT_EXPR:
5757 case REALPART_EXPR:
5758 case IMAGPART_EXPR:
5759 return 1;
5761 default:
5762 return 0;
5766 /* Given an rtx VALUE that may contain additions and multiplications, return
5767 an equivalent value that just refers to a register, memory, or constant.
5768 This is done by generating instructions to perform the arithmetic and
5769 returning a pseudo-register containing the value.
5771 The returned value may be a REG, SUBREG, MEM or constant. */
5774 force_operand (rtx value, rtx target)
5776 rtx op1, op2;
5777 /* Use subtarget as the target for operand 0 of a binary operation. */
5778 rtx subtarget = get_subtarget (target);
5779 enum rtx_code code = GET_CODE (value);
5781 /* Check for subreg applied to an expression produced by loop optimizer. */
5782 if (code == SUBREG
5783 && !REG_P (SUBREG_REG (value))
5784 && !MEM_P (SUBREG_REG (value)))
5786 value = simplify_gen_subreg (GET_MODE (value),
5787 force_reg (GET_MODE (SUBREG_REG (value)),
5788 force_operand (SUBREG_REG (value),
5789 NULL_RTX)),
5790 GET_MODE (SUBREG_REG (value)),
5791 SUBREG_BYTE (value));
5792 code = GET_CODE (value);
5795 /* Check for a PIC address load. */
5796 if ((code == PLUS || code == MINUS)
5797 && XEXP (value, 0) == pic_offset_table_rtx
5798 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5799 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5800 || GET_CODE (XEXP (value, 1)) == CONST))
5802 if (!subtarget)
5803 subtarget = gen_reg_rtx (GET_MODE (value));
5804 emit_move_insn (subtarget, value);
5805 return subtarget;
5808 if (ARITHMETIC_P (value))
5810 op2 = XEXP (value, 1);
5811 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5812 subtarget = 0;
5813 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5815 code = PLUS;
5816 op2 = negate_rtx (GET_MODE (value), op2);
5819 /* Check for an addition with OP2 a constant integer and our first
5820 operand a PLUS of a virtual register and something else. In that
5821 case, we want to emit the sum of the virtual register and the
5822 constant first and then add the other value. This allows virtual
5823 register instantiation to simply modify the constant rather than
5824 creating another one around this addition. */
5825 if (code == PLUS && GET_CODE (op2) == CONST_INT
5826 && GET_CODE (XEXP (value, 0)) == PLUS
5827 && REG_P (XEXP (XEXP (value, 0), 0))
5828 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5829 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5831 rtx temp = expand_simple_binop (GET_MODE (value), code,
5832 XEXP (XEXP (value, 0), 0), op2,
5833 subtarget, 0, OPTAB_LIB_WIDEN);
5834 return expand_simple_binop (GET_MODE (value), code, temp,
5835 force_operand (XEXP (XEXP (value,
5836 0), 1), 0),
5837 target, 0, OPTAB_LIB_WIDEN);
5840 op1 = force_operand (XEXP (value, 0), subtarget);
5841 op2 = force_operand (op2, NULL_RTX);
5842 switch (code)
5844 case MULT:
5845 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5846 case DIV:
5847 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5848 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5849 target, 1, OPTAB_LIB_WIDEN);
5850 else
5851 return expand_divmod (0,
5852 FLOAT_MODE_P (GET_MODE (value))
5853 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5854 GET_MODE (value), op1, op2, target, 0);
5855 break;
5856 case MOD:
5857 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5858 target, 0);
5859 break;
5860 case UDIV:
5861 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5862 target, 1);
5863 break;
5864 case UMOD:
5865 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5866 target, 1);
5867 break;
5868 case ASHIFTRT:
5869 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5870 target, 0, OPTAB_LIB_WIDEN);
5871 break;
5872 default:
5873 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5874 target, 1, OPTAB_LIB_WIDEN);
5877 if (UNARY_P (value))
5879 if (!target)
5880 target = gen_reg_rtx (GET_MODE (value));
5881 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5882 switch (code)
5884 case ZERO_EXTEND:
5885 case SIGN_EXTEND:
5886 case TRUNCATE:
5887 convert_move (target, op1, code == ZERO_EXTEND);
5888 return target;
5890 case FIX:
5891 case UNSIGNED_FIX:
5892 expand_fix (target, op1, code == UNSIGNED_FIX);
5893 return target;
5895 case FLOAT:
5896 case UNSIGNED_FLOAT:
5897 expand_float (target, op1, code == UNSIGNED_FLOAT);
5898 return target;
5900 default:
5901 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5905 #ifdef INSN_SCHEDULING
5906 /* On machines that have insn scheduling, we want all memory reference to be
5907 explicit, so we need to deal with such paradoxical SUBREGs. */
5908 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5909 && (GET_MODE_SIZE (GET_MODE (value))
5910 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5911 value
5912 = simplify_gen_subreg (GET_MODE (value),
5913 force_reg (GET_MODE (SUBREG_REG (value)),
5914 force_operand (SUBREG_REG (value),
5915 NULL_RTX)),
5916 GET_MODE (SUBREG_REG (value)),
5917 SUBREG_BYTE (value));
5918 #endif
5920 return value;
5923 /* Subroutine of expand_expr: return nonzero iff there is no way that
5924 EXP can reference X, which is being modified. TOP_P is nonzero if this
5925 call is going to be used to determine whether we need a temporary
5926 for EXP, as opposed to a recursive call to this function.
5928 It is always safe for this routine to return zero since it merely
5929 searches for optimization opportunities. */
5932 safe_from_p (rtx x, tree exp, int top_p)
5934 rtx exp_rtl = 0;
5935 int i, nops;
5937 if (x == 0
5938 /* If EXP has varying size, we MUST use a target since we currently
5939 have no way of allocating temporaries of variable size
5940 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5941 So we assume here that something at a higher level has prevented a
5942 clash. This is somewhat bogus, but the best we can do. Only
5943 do this when X is BLKmode and when we are at the top level. */
5944 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5945 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5946 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5947 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5948 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5949 != INTEGER_CST)
5950 && GET_MODE (x) == BLKmode)
5951 /* If X is in the outgoing argument area, it is always safe. */
5952 || (MEM_P (x)
5953 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5954 || (GET_CODE (XEXP (x, 0)) == PLUS
5955 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5956 return 1;
5958 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5959 find the underlying pseudo. */
5960 if (GET_CODE (x) == SUBREG)
5962 x = SUBREG_REG (x);
5963 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5964 return 0;
5967 /* Now look at our tree code and possibly recurse. */
5968 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5970 case tcc_declaration:
5971 exp_rtl = DECL_RTL_IF_SET (exp);
5972 break;
5974 case tcc_constant:
5975 return 1;
5977 case tcc_exceptional:
5978 if (TREE_CODE (exp) == TREE_LIST)
5980 while (1)
5982 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5983 return 0;
5984 exp = TREE_CHAIN (exp);
5985 if (!exp)
5986 return 1;
5987 if (TREE_CODE (exp) != TREE_LIST)
5988 return safe_from_p (x, exp, 0);
5991 else if (TREE_CODE (exp) == ERROR_MARK)
5992 return 1; /* An already-visited SAVE_EXPR? */
5993 else
5994 return 0;
5996 case tcc_statement:
5997 /* The only case we look at here is the DECL_INITIAL inside a
5998 DECL_EXPR. */
5999 return (TREE_CODE (exp) != DECL_EXPR
6000 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6001 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6002 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6004 case tcc_binary:
6005 case tcc_comparison:
6006 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6007 return 0;
6008 /* Fall through. */
6010 case tcc_unary:
6011 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6013 case tcc_expression:
6014 case tcc_reference:
6015 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6016 the expression. If it is set, we conflict iff we are that rtx or
6017 both are in memory. Otherwise, we check all operands of the
6018 expression recursively. */
6020 switch (TREE_CODE (exp))
6022 case ADDR_EXPR:
6023 /* If the operand is static or we are static, we can't conflict.
6024 Likewise if we don't conflict with the operand at all. */
6025 if (staticp (TREE_OPERAND (exp, 0))
6026 || TREE_STATIC (exp)
6027 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6028 return 1;
6030 /* Otherwise, the only way this can conflict is if we are taking
6031 the address of a DECL a that address if part of X, which is
6032 very rare. */
6033 exp = TREE_OPERAND (exp, 0);
6034 if (DECL_P (exp))
6036 if (!DECL_RTL_SET_P (exp)
6037 || !MEM_P (DECL_RTL (exp)))
6038 return 0;
6039 else
6040 exp_rtl = XEXP (DECL_RTL (exp), 0);
6042 break;
6044 case MISALIGNED_INDIRECT_REF:
6045 case ALIGN_INDIRECT_REF:
6046 case INDIRECT_REF:
6047 if (MEM_P (x)
6048 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6049 get_alias_set (exp)))
6050 return 0;
6051 break;
6053 case CALL_EXPR:
6054 /* Assume that the call will clobber all hard registers and
6055 all of memory. */
6056 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6057 || MEM_P (x))
6058 return 0;
6059 break;
6061 case WITH_CLEANUP_EXPR:
6062 case CLEANUP_POINT_EXPR:
6063 /* Lowered by gimplify.c. */
6064 gcc_unreachable ();
6066 case SAVE_EXPR:
6067 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6069 default:
6070 break;
6073 /* If we have an rtx, we do not need to scan our operands. */
6074 if (exp_rtl)
6075 break;
6077 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
6078 for (i = 0; i < nops; i++)
6079 if (TREE_OPERAND (exp, i) != 0
6080 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6081 return 0;
6083 /* If this is a language-specific tree code, it may require
6084 special handling. */
6085 if ((unsigned int) TREE_CODE (exp)
6086 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6087 && !lang_hooks.safe_from_p (x, exp))
6088 return 0;
6089 break;
6091 case tcc_type:
6092 /* Should never get a type here. */
6093 gcc_unreachable ();
6096 /* If we have an rtl, find any enclosed object. Then see if we conflict
6097 with it. */
6098 if (exp_rtl)
6100 if (GET_CODE (exp_rtl) == SUBREG)
6102 exp_rtl = SUBREG_REG (exp_rtl);
6103 if (REG_P (exp_rtl)
6104 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6105 return 0;
6108 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6109 are memory and they conflict. */
6110 return ! (rtx_equal_p (x, exp_rtl)
6111 || (MEM_P (x) && MEM_P (exp_rtl)
6112 && true_dependence (exp_rtl, VOIDmode, x,
6113 rtx_addr_varies_p)));
6116 /* If we reach here, it is safe. */
6117 return 1;
6121 /* Return the highest power of two that EXP is known to be a multiple of.
6122 This is used in updating alignment of MEMs in array references. */
6124 unsigned HOST_WIDE_INT
6125 highest_pow2_factor (tree exp)
6127 unsigned HOST_WIDE_INT c0, c1;
6129 switch (TREE_CODE (exp))
6131 case INTEGER_CST:
6132 /* We can find the lowest bit that's a one. If the low
6133 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6134 We need to handle this case since we can find it in a COND_EXPR,
6135 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6136 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6137 later ICE. */
6138 if (TREE_CONSTANT_OVERFLOW (exp))
6139 return BIGGEST_ALIGNMENT;
6140 else
6142 /* Note: tree_low_cst is intentionally not used here,
6143 we don't care about the upper bits. */
6144 c0 = TREE_INT_CST_LOW (exp);
6145 c0 &= -c0;
6146 return c0 ? c0 : BIGGEST_ALIGNMENT;
6148 break;
6150 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6151 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6152 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6153 return MIN (c0, c1);
6155 case MULT_EXPR:
6156 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6157 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6158 return c0 * c1;
6160 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6161 case CEIL_DIV_EXPR:
6162 if (integer_pow2p (TREE_OPERAND (exp, 1))
6163 && host_integerp (TREE_OPERAND (exp, 1), 1))
6165 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6166 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6167 return MAX (1, c0 / c1);
6169 break;
6171 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6172 case SAVE_EXPR:
6173 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6175 case COMPOUND_EXPR:
6176 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6178 case COND_EXPR:
6179 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6180 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6181 return MIN (c0, c1);
6183 default:
6184 break;
6187 return 1;
6190 /* Similar, except that the alignment requirements of TARGET are
6191 taken into account. Assume it is at least as aligned as its
6192 type, unless it is a COMPONENT_REF in which case the layout of
6193 the structure gives the alignment. */
6195 static unsigned HOST_WIDE_INT
6196 highest_pow2_factor_for_target (tree target, tree exp)
6198 unsigned HOST_WIDE_INT target_align, factor;
6200 factor = highest_pow2_factor (exp);
6201 if (TREE_CODE (target) == COMPONENT_REF)
6202 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6203 else
6204 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6205 return MAX (factor, target_align);
6208 /* Expands variable VAR. */
6210 void
6211 expand_var (tree var)
6213 if (DECL_EXTERNAL (var))
6214 return;
6216 if (TREE_STATIC (var))
6217 /* If this is an inlined copy of a static local variable,
6218 look up the original decl. */
6219 var = DECL_ORIGIN (var);
6221 if (TREE_STATIC (var)
6222 ? !TREE_ASM_WRITTEN (var)
6223 : !DECL_RTL_SET_P (var))
6225 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6226 /* Should be ignored. */;
6227 else if (lang_hooks.expand_decl (var))
6228 /* OK. */;
6229 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6230 expand_decl (var);
6231 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6232 rest_of_decl_compilation (var, 0, 0);
6233 else
6234 /* No expansion needed. */
6235 gcc_assert (TREE_CODE (var) == TYPE_DECL
6236 || TREE_CODE (var) == CONST_DECL
6237 || TREE_CODE (var) == FUNCTION_DECL
6238 || TREE_CODE (var) == LABEL_DECL);
6242 /* Subroutine of expand_expr. Expand the two operands of a binary
6243 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6244 The value may be stored in TARGET if TARGET is nonzero. The
6245 MODIFIER argument is as documented by expand_expr. */
6247 static void
6248 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6249 enum expand_modifier modifier)
6251 if (! safe_from_p (target, exp1, 1))
6252 target = 0;
6253 if (operand_equal_p (exp0, exp1, 0))
6255 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6256 *op1 = copy_rtx (*op0);
6258 else
6260 /* If we need to preserve evaluation order, copy exp0 into its own
6261 temporary variable so that it can't be clobbered by exp1. */
6262 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6263 exp0 = save_expr (exp0);
6264 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6265 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6270 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6271 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6273 static rtx
6274 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6275 enum expand_modifier modifier)
6277 rtx result, subtarget;
6278 tree inner, offset;
6279 HOST_WIDE_INT bitsize, bitpos;
6280 int volatilep, unsignedp;
6281 enum machine_mode mode1;
6283 /* If we are taking the address of a constant and are at the top level,
6284 we have to use output_constant_def since we can't call force_const_mem
6285 at top level. */
6286 /* ??? This should be considered a front-end bug. We should not be
6287 generating ADDR_EXPR of something that isn't an LVALUE. The only
6288 exception here is STRING_CST. */
6289 if (TREE_CODE (exp) == CONSTRUCTOR
6290 || CONSTANT_CLASS_P (exp))
6291 return XEXP (output_constant_def (exp, 0), 0);
6293 /* Everything must be something allowed by is_gimple_addressable. */
6294 switch (TREE_CODE (exp))
6296 case INDIRECT_REF:
6297 /* This case will happen via recursion for &a->b. */
6298 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, EXPAND_NORMAL);
6300 case CONST_DECL:
6301 /* Recurse and make the output_constant_def clause above handle this. */
6302 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6303 tmode, modifier);
6305 case REALPART_EXPR:
6306 /* The real part of the complex number is always first, therefore
6307 the address is the same as the address of the parent object. */
6308 offset = 0;
6309 bitpos = 0;
6310 inner = TREE_OPERAND (exp, 0);
6311 break;
6313 case IMAGPART_EXPR:
6314 /* The imaginary part of the complex number is always second.
6315 The expression is therefore always offset by the size of the
6316 scalar type. */
6317 offset = 0;
6318 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6319 inner = TREE_OPERAND (exp, 0);
6320 break;
6322 default:
6323 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6324 expand_expr, as that can have various side effects; LABEL_DECLs for
6325 example, may not have their DECL_RTL set yet. Assume language
6326 specific tree nodes can be expanded in some interesting way. */
6327 if (DECL_P (exp)
6328 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6330 result = expand_expr (exp, target, tmode,
6331 modifier == EXPAND_INITIALIZER
6332 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6334 /* If the DECL isn't in memory, then the DECL wasn't properly
6335 marked TREE_ADDRESSABLE, which will be either a front-end
6336 or a tree optimizer bug. */
6337 gcc_assert (MEM_P (result));
6338 result = XEXP (result, 0);
6340 /* ??? Is this needed anymore? */
6341 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6343 assemble_external (exp);
6344 TREE_USED (exp) = 1;
6347 if (modifier != EXPAND_INITIALIZER
6348 && modifier != EXPAND_CONST_ADDRESS)
6349 result = force_operand (result, target);
6350 return result;
6353 /* Pass FALSE as the last argument to get_inner_reference although
6354 we are expanding to RTL. The rationale is that we know how to
6355 handle "aligning nodes" here: we can just bypass them because
6356 they won't change the final object whose address will be returned
6357 (they actually exist only for that purpose). */
6358 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6359 &mode1, &unsignedp, &volatilep, false);
6360 break;
6363 /* We must have made progress. */
6364 gcc_assert (inner != exp);
6366 subtarget = offset || bitpos ? NULL_RTX : target;
6367 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6369 if (offset)
6371 rtx tmp;
6373 if (modifier != EXPAND_NORMAL)
6374 result = force_operand (result, NULL);
6375 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6377 result = convert_memory_address (tmode, result);
6378 tmp = convert_memory_address (tmode, tmp);
6380 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6381 result = gen_rtx_PLUS (tmode, result, tmp);
6382 else
6384 subtarget = bitpos ? NULL_RTX : target;
6385 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6386 1, OPTAB_LIB_WIDEN);
6390 if (bitpos)
6392 /* Someone beforehand should have rejected taking the address
6393 of such an object. */
6394 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6396 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6397 if (modifier < EXPAND_SUM)
6398 result = force_operand (result, target);
6401 return result;
6404 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6405 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6407 static rtx
6408 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6409 enum expand_modifier modifier)
6411 enum machine_mode rmode;
6412 rtx result;
6414 /* Target mode of VOIDmode says "whatever's natural". */
6415 if (tmode == VOIDmode)
6416 tmode = TYPE_MODE (TREE_TYPE (exp));
6418 /* We can get called with some Weird Things if the user does silliness
6419 like "(short) &a". In that case, convert_memory_address won't do
6420 the right thing, so ignore the given target mode. */
6421 if (tmode != Pmode && tmode != ptr_mode)
6422 tmode = Pmode;
6424 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6425 tmode, modifier);
6427 /* Despite expand_expr claims concerning ignoring TMODE when not
6428 strictly convenient, stuff breaks if we don't honor it. Note
6429 that combined with the above, we only do this for pointer modes. */
6430 rmode = GET_MODE (result);
6431 if (rmode == VOIDmode)
6432 rmode = tmode;
6433 if (rmode != tmode)
6434 result = convert_memory_address (tmode, result);
6436 return result;
6440 /* expand_expr: generate code for computing expression EXP.
6441 An rtx for the computed value is returned. The value is never null.
6442 In the case of a void EXP, const0_rtx is returned.
6444 The value may be stored in TARGET if TARGET is nonzero.
6445 TARGET is just a suggestion; callers must assume that
6446 the rtx returned may not be the same as TARGET.
6448 If TARGET is CONST0_RTX, it means that the value will be ignored.
6450 If TMODE is not VOIDmode, it suggests generating the
6451 result in mode TMODE. But this is done only when convenient.
6452 Otherwise, TMODE is ignored and the value generated in its natural mode.
6453 TMODE is just a suggestion; callers must assume that
6454 the rtx returned may not have mode TMODE.
6456 Note that TARGET may have neither TMODE nor MODE. In that case, it
6457 probably will not be used.
6459 If MODIFIER is EXPAND_SUM then when EXP is an addition
6460 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6461 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6462 products as above, or REG or MEM, or constant.
6463 Ordinarily in such cases we would output mul or add instructions
6464 and then return a pseudo reg containing the sum.
6466 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6467 it also marks a label as absolutely required (it can't be dead).
6468 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6469 This is used for outputting expressions used in initializers.
6471 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6472 with a constant address even if that address is not normally legitimate.
6473 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6475 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6476 a call parameter. Such targets require special care as we haven't yet
6477 marked TARGET so that it's safe from being trashed by libcalls. We
6478 don't want to use TARGET for anything but the final result;
6479 Intermediate values must go elsewhere. Additionally, calls to
6480 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6482 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6483 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6484 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6485 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6486 recursively. */
6488 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6489 enum expand_modifier, rtx *);
6492 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6493 enum expand_modifier modifier, rtx *alt_rtl)
6495 int rn = -1;
6496 rtx ret, last = NULL;
6498 /* Handle ERROR_MARK before anybody tries to access its type. */
6499 if (TREE_CODE (exp) == ERROR_MARK
6500 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6502 ret = CONST0_RTX (tmode);
6503 return ret ? ret : const0_rtx;
6506 if (flag_non_call_exceptions)
6508 rn = lookup_stmt_eh_region (exp);
6509 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6510 if (rn >= 0)
6511 last = get_last_insn ();
6514 /* If this is an expression of some kind and it has an associated line
6515 number, then emit the line number before expanding the expression.
6517 We need to save and restore the file and line information so that
6518 errors discovered during expansion are emitted with the right
6519 information. It would be better of the diagnostic routines
6520 used the file/line information embedded in the tree nodes rather
6521 than globals. */
6522 if (cfun && cfun->ib_boundaries_block && EXPR_HAS_LOCATION (exp))
6524 location_t saved_location = input_location;
6525 input_location = EXPR_LOCATION (exp);
6526 emit_line_note (input_location);
6528 /* Record where the insns produced belong. */
6529 record_block_change (TREE_BLOCK (exp));
6531 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6533 input_location = saved_location;
6535 else
6537 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6540 /* If using non-call exceptions, mark all insns that may trap.
6541 expand_call() will mark CALL_INSNs before we get to this code,
6542 but it doesn't handle libcalls, and these may trap. */
6543 if (rn >= 0)
6545 rtx insn;
6546 for (insn = next_real_insn (last); insn;
6547 insn = next_real_insn (insn))
6549 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6550 /* If we want exceptions for non-call insns, any
6551 may_trap_p instruction may throw. */
6552 && GET_CODE (PATTERN (insn)) != CLOBBER
6553 && GET_CODE (PATTERN (insn)) != USE
6554 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6556 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6557 REG_NOTES (insn));
6562 return ret;
6565 static rtx
6566 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6567 enum expand_modifier modifier, rtx *alt_rtl)
6569 rtx op0, op1, temp;
6570 tree type = TREE_TYPE (exp);
6571 int unsignedp;
6572 enum machine_mode mode;
6573 enum tree_code code = TREE_CODE (exp);
6574 optab this_optab;
6575 rtx subtarget, original_target;
6576 int ignore;
6577 tree context, subexp0, subexp1;
6578 bool reduce_bit_field = false;
6579 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6580 ? reduce_to_bit_field_precision ((expr), \
6581 target, \
6582 type) \
6583 : (expr))
6585 mode = TYPE_MODE (type);
6586 unsignedp = TYPE_UNSIGNED (type);
6587 if (lang_hooks.reduce_bit_field_operations
6588 && TREE_CODE (type) == INTEGER_TYPE
6589 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6591 /* An operation in what may be a bit-field type needs the
6592 result to be reduced to the precision of the bit-field type,
6593 which is narrower than that of the type's mode. */
6594 reduce_bit_field = true;
6595 if (modifier == EXPAND_STACK_PARM)
6596 target = 0;
6599 /* Use subtarget as the target for operand 0 of a binary operation. */
6600 subtarget = get_subtarget (target);
6601 original_target = target;
6602 ignore = (target == const0_rtx
6603 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6604 || code == CONVERT_EXPR || code == COND_EXPR
6605 || code == VIEW_CONVERT_EXPR)
6606 && TREE_CODE (type) == VOID_TYPE));
6608 /* If we are going to ignore this result, we need only do something
6609 if there is a side-effect somewhere in the expression. If there
6610 is, short-circuit the most common cases here. Note that we must
6611 not call expand_expr with anything but const0_rtx in case this
6612 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6614 if (ignore)
6616 if (! TREE_SIDE_EFFECTS (exp))
6617 return const0_rtx;
6619 /* Ensure we reference a volatile object even if value is ignored, but
6620 don't do this if all we are doing is taking its address. */
6621 if (TREE_THIS_VOLATILE (exp)
6622 && TREE_CODE (exp) != FUNCTION_DECL
6623 && mode != VOIDmode && mode != BLKmode
6624 && modifier != EXPAND_CONST_ADDRESS)
6626 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6627 if (MEM_P (temp))
6628 temp = copy_to_reg (temp);
6629 return const0_rtx;
6632 if (TREE_CODE_CLASS (code) == tcc_unary
6633 || code == COMPONENT_REF || code == INDIRECT_REF)
6634 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6635 modifier);
6637 else if (TREE_CODE_CLASS (code) == tcc_binary
6638 || TREE_CODE_CLASS (code) == tcc_comparison
6639 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6641 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6642 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6643 return const0_rtx;
6645 else if (code == BIT_FIELD_REF)
6647 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6648 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6649 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6650 return const0_rtx;
6653 target = 0;
6657 switch (code)
6659 case LABEL_DECL:
6661 tree function = decl_function_context (exp);
6663 temp = label_rtx (exp);
6664 temp = gen_rtx_LABEL_REF (Pmode, temp);
6666 if (function != current_function_decl
6667 && function != 0)
6668 LABEL_REF_NONLOCAL_P (temp) = 1;
6670 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6671 return temp;
6674 case SSA_NAME:
6675 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6676 NULL);
6678 case PARM_DECL:
6679 case VAR_DECL:
6680 /* If a static var's type was incomplete when the decl was written,
6681 but the type is complete now, lay out the decl now. */
6682 if (DECL_SIZE (exp) == 0
6683 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6684 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6685 layout_decl (exp, 0);
6687 /* ... fall through ... */
6689 case FUNCTION_DECL:
6690 case RESULT_DECL:
6691 gcc_assert (DECL_RTL (exp));
6693 /* Ensure variable marked as used even if it doesn't go through
6694 a parser. If it hasn't be used yet, write out an external
6695 definition. */
6696 if (! TREE_USED (exp))
6698 assemble_external (exp);
6699 TREE_USED (exp) = 1;
6702 /* Show we haven't gotten RTL for this yet. */
6703 temp = 0;
6705 /* Variables inherited from containing functions should have
6706 been lowered by this point. */
6707 context = decl_function_context (exp);
6708 gcc_assert (!context
6709 || context == current_function_decl
6710 || TREE_STATIC (exp)
6711 /* ??? C++ creates functions that are not TREE_STATIC. */
6712 || TREE_CODE (exp) == FUNCTION_DECL);
6714 /* This is the case of an array whose size is to be determined
6715 from its initializer, while the initializer is still being parsed.
6716 See expand_decl. */
6718 if (MEM_P (DECL_RTL (exp))
6719 && REG_P (XEXP (DECL_RTL (exp), 0)))
6720 temp = validize_mem (DECL_RTL (exp));
6722 /* If DECL_RTL is memory, we are in the normal case and either
6723 the address is not valid or it is not a register and -fforce-addr
6724 is specified, get the address into a register. */
6726 else if (MEM_P (DECL_RTL (exp))
6727 && modifier != EXPAND_CONST_ADDRESS
6728 && modifier != EXPAND_SUM
6729 && modifier != EXPAND_INITIALIZER
6730 && (! memory_address_p (DECL_MODE (exp),
6731 XEXP (DECL_RTL (exp), 0))
6732 || (flag_force_addr
6733 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6735 if (alt_rtl)
6736 *alt_rtl = DECL_RTL (exp);
6737 temp = replace_equiv_address (DECL_RTL (exp),
6738 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6741 /* If we got something, return it. But first, set the alignment
6742 if the address is a register. */
6743 if (temp != 0)
6745 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6746 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6748 return temp;
6751 /* If the mode of DECL_RTL does not match that of the decl, it
6752 must be a promoted value. We return a SUBREG of the wanted mode,
6753 but mark it so that we know that it was already extended. */
6755 if (REG_P (DECL_RTL (exp))
6756 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6758 enum machine_mode pmode;
6760 /* Get the signedness used for this variable. Ensure we get the
6761 same mode we got when the variable was declared. */
6762 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6763 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0));
6764 gcc_assert (GET_MODE (DECL_RTL (exp)) == pmode);
6766 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6767 SUBREG_PROMOTED_VAR_P (temp) = 1;
6768 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6769 return temp;
6772 return DECL_RTL (exp);
6774 case INTEGER_CST:
6775 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6776 TREE_INT_CST_HIGH (exp), mode);
6778 /* ??? If overflow is set, fold will have done an incomplete job,
6779 which can result in (plus xx (const_int 0)), which can get
6780 simplified by validate_replace_rtx during virtual register
6781 instantiation, which can result in unrecognizable insns.
6782 Avoid this by forcing all overflows into registers. */
6783 if (TREE_CONSTANT_OVERFLOW (exp)
6784 && modifier != EXPAND_INITIALIZER)
6785 temp = force_reg (mode, temp);
6787 return temp;
6789 case VECTOR_CST:
6790 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6791 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6792 return const_vector_from_tree (exp);
6793 else
6794 return expand_expr (build_constructor_from_list
6795 (TREE_TYPE (exp),
6796 TREE_VECTOR_CST_ELTS (exp)),
6797 ignore ? const0_rtx : target, tmode, modifier);
6799 case CONST_DECL:
6800 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6802 case REAL_CST:
6803 /* If optimized, generate immediate CONST_DOUBLE
6804 which will be turned into memory by reload if necessary.
6806 We used to force a register so that loop.c could see it. But
6807 this does not allow gen_* patterns to perform optimizations with
6808 the constants. It also produces two insns in cases like "x = 1.0;".
6809 On most machines, floating-point constants are not permitted in
6810 many insns, so we'd end up copying it to a register in any case.
6812 Now, we do the copying in expand_binop, if appropriate. */
6813 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6814 TYPE_MODE (TREE_TYPE (exp)));
6816 case COMPLEX_CST:
6817 /* Handle evaluating a complex constant in a CONCAT target. */
6818 if (original_target && GET_CODE (original_target) == CONCAT)
6820 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6821 rtx rtarg, itarg;
6823 rtarg = XEXP (original_target, 0);
6824 itarg = XEXP (original_target, 1);
6826 /* Move the real and imaginary parts separately. */
6827 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6828 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6830 if (op0 != rtarg)
6831 emit_move_insn (rtarg, op0);
6832 if (op1 != itarg)
6833 emit_move_insn (itarg, op1);
6835 return original_target;
6838 /* ... fall through ... */
6840 case STRING_CST:
6841 temp = output_constant_def (exp, 1);
6843 /* temp contains a constant address.
6844 On RISC machines where a constant address isn't valid,
6845 make some insns to get that address into a register. */
6846 if (modifier != EXPAND_CONST_ADDRESS
6847 && modifier != EXPAND_INITIALIZER
6848 && modifier != EXPAND_SUM
6849 && (! memory_address_p (mode, XEXP (temp, 0))
6850 || flag_force_addr))
6851 return replace_equiv_address (temp,
6852 copy_rtx (XEXP (temp, 0)));
6853 return temp;
6855 case SAVE_EXPR:
6857 tree val = TREE_OPERAND (exp, 0);
6858 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6860 if (!SAVE_EXPR_RESOLVED_P (exp))
6862 /* We can indeed still hit this case, typically via builtin
6863 expanders calling save_expr immediately before expanding
6864 something. Assume this means that we only have to deal
6865 with non-BLKmode values. */
6866 gcc_assert (GET_MODE (ret) != BLKmode);
6868 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6869 DECL_ARTIFICIAL (val) = 1;
6870 DECL_IGNORED_P (val) = 1;
6871 TREE_OPERAND (exp, 0) = val;
6872 SAVE_EXPR_RESOLVED_P (exp) = 1;
6874 if (!CONSTANT_P (ret))
6875 ret = copy_to_reg (ret);
6876 SET_DECL_RTL (val, ret);
6879 return ret;
6882 case GOTO_EXPR:
6883 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6884 expand_goto (TREE_OPERAND (exp, 0));
6885 else
6886 expand_computed_goto (TREE_OPERAND (exp, 0));
6887 return const0_rtx;
6889 case CONSTRUCTOR:
6890 /* If we don't need the result, just ensure we evaluate any
6891 subexpressions. */
6892 if (ignore)
6894 unsigned HOST_WIDE_INT idx;
6895 tree value;
6897 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6898 expand_expr (value, const0_rtx, VOIDmode, 0);
6900 return const0_rtx;
6903 /* Try to avoid creating a temporary at all. This is possible
6904 if all of the initializer is zero.
6905 FIXME: try to handle all [0..255] initializers we can handle
6906 with memset. */
6907 else if (TREE_STATIC (exp)
6908 && !TREE_ADDRESSABLE (exp)
6909 && target != 0 && mode == BLKmode
6910 && all_zeros_p (exp))
6912 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6913 return target;
6916 /* All elts simple constants => refer to a constant in memory. But
6917 if this is a non-BLKmode mode, let it store a field at a time
6918 since that should make a CONST_INT or CONST_DOUBLE when we
6919 fold. Likewise, if we have a target we can use, it is best to
6920 store directly into the target unless the type is large enough
6921 that memcpy will be used. If we are making an initializer and
6922 all operands are constant, put it in memory as well.
6924 FIXME: Avoid trying to fill vector constructors piece-meal.
6925 Output them with output_constant_def below unless we're sure
6926 they're zeros. This should go away when vector initializers
6927 are treated like VECTOR_CST instead of arrays.
6929 else if ((TREE_STATIC (exp)
6930 && ((mode == BLKmode
6931 && ! (target != 0 && safe_from_p (target, exp, 1)))
6932 || TREE_ADDRESSABLE (exp)
6933 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6934 && (! MOVE_BY_PIECES_P
6935 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6936 TYPE_ALIGN (type)))
6937 && ! mostly_zeros_p (exp))))
6938 || ((modifier == EXPAND_INITIALIZER
6939 || modifier == EXPAND_CONST_ADDRESS)
6940 && TREE_CONSTANT (exp)))
6942 rtx constructor = output_constant_def (exp, 1);
6944 if (modifier != EXPAND_CONST_ADDRESS
6945 && modifier != EXPAND_INITIALIZER
6946 && modifier != EXPAND_SUM)
6947 constructor = validize_mem (constructor);
6949 return constructor;
6951 else
6953 /* Handle calls that pass values in multiple non-contiguous
6954 locations. The Irix 6 ABI has examples of this. */
6955 if (target == 0 || ! safe_from_p (target, exp, 1)
6956 || GET_CODE (target) == PARALLEL
6957 || modifier == EXPAND_STACK_PARM)
6958 target
6959 = assign_temp (build_qualified_type (type,
6960 (TYPE_QUALS (type)
6961 | (TREE_READONLY (exp)
6962 * TYPE_QUAL_CONST))),
6963 0, TREE_ADDRESSABLE (exp), 1);
6965 store_constructor (exp, target, 0, int_expr_size (exp));
6966 return target;
6969 case MISALIGNED_INDIRECT_REF:
6970 case ALIGN_INDIRECT_REF:
6971 case INDIRECT_REF:
6973 tree exp1 = TREE_OPERAND (exp, 0);
6975 if (modifier != EXPAND_WRITE)
6977 tree t;
6979 t = fold_read_from_constant_string (exp);
6980 if (t)
6981 return expand_expr (t, target, tmode, modifier);
6984 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6985 op0 = memory_address (mode, op0);
6987 if (code == ALIGN_INDIRECT_REF)
6989 int align = TYPE_ALIGN_UNIT (type);
6990 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
6991 op0 = memory_address (mode, op0);
6994 temp = gen_rtx_MEM (mode, op0);
6996 set_mem_attributes (temp, exp, 0);
6998 /* Resolve the misalignment now, so that we don't have to remember
6999 to resolve it later. Of course, this only works for reads. */
7000 /* ??? When we get around to supporting writes, we'll have to handle
7001 this in store_expr directly. The vectorizer isn't generating
7002 those yet, however. */
7003 if (code == MISALIGNED_INDIRECT_REF)
7005 int icode;
7006 rtx reg, insn;
7008 gcc_assert (modifier == EXPAND_NORMAL
7009 || modifier == EXPAND_STACK_PARM);
7011 /* The vectorizer should have already checked the mode. */
7012 icode = movmisalign_optab->handlers[mode].insn_code;
7013 gcc_assert (icode != CODE_FOR_nothing);
7015 /* We've already validated the memory, and we're creating a
7016 new pseudo destination. The predicates really can't fail. */
7017 reg = gen_reg_rtx (mode);
7019 /* Nor can the insn generator. */
7020 insn = GEN_FCN (icode) (reg, temp);
7021 emit_insn (insn);
7023 return reg;
7026 return temp;
7029 case TARGET_MEM_REF:
7031 struct mem_address addr;
7033 get_address_description (exp, &addr);
7034 op0 = addr_for_mem_ref (&addr, true);
7035 op0 = memory_address (mode, op0);
7036 temp = gen_rtx_MEM (mode, op0);
7037 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7039 return temp;
7041 case ARRAY_REF:
7044 tree array = TREE_OPERAND (exp, 0);
7045 tree index = TREE_OPERAND (exp, 1);
7047 /* Fold an expression like: "foo"[2].
7048 This is not done in fold so it won't happen inside &.
7049 Don't fold if this is for wide characters since it's too
7050 difficult to do correctly and this is a very rare case. */
7052 if (modifier != EXPAND_CONST_ADDRESS
7053 && modifier != EXPAND_INITIALIZER
7054 && modifier != EXPAND_MEMORY)
7056 tree t = fold_read_from_constant_string (exp);
7058 if (t)
7059 return expand_expr (t, target, tmode, modifier);
7062 /* If this is a constant index into a constant array,
7063 just get the value from the array. Handle both the cases when
7064 we have an explicit constructor and when our operand is a variable
7065 that was declared const. */
7067 if (modifier != EXPAND_CONST_ADDRESS
7068 && modifier != EXPAND_INITIALIZER
7069 && modifier != EXPAND_MEMORY
7070 && TREE_CODE (array) == CONSTRUCTOR
7071 && ! TREE_SIDE_EFFECTS (array)
7072 && TREE_CODE (index) == INTEGER_CST)
7074 unsigned HOST_WIDE_INT ix;
7075 tree field, value;
7077 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7078 field, value)
7079 if (tree_int_cst_equal (field, index))
7081 if (!TREE_SIDE_EFFECTS (value))
7082 return expand_expr (fold (value), target, tmode, modifier);
7083 break;
7087 else if (optimize >= 1
7088 && modifier != EXPAND_CONST_ADDRESS
7089 && modifier != EXPAND_INITIALIZER
7090 && modifier != EXPAND_MEMORY
7091 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7092 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7093 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7094 && targetm.binds_local_p (array))
7096 if (TREE_CODE (index) == INTEGER_CST)
7098 tree init = DECL_INITIAL (array);
7100 if (TREE_CODE (init) == CONSTRUCTOR)
7102 unsigned HOST_WIDE_INT ix;
7103 tree field, value;
7105 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7106 field, value)
7107 if (tree_int_cst_equal (field, index))
7109 if (!TREE_SIDE_EFFECTS (value))
7110 return expand_expr (fold (value), target, tmode,
7111 modifier);
7112 break;
7115 else if (TREE_CODE (init) == STRING_CST
7116 && 0 > compare_tree_int (index,
7117 TREE_STRING_LENGTH (init)))
7119 tree type = TREE_TYPE (TREE_TYPE (init));
7120 enum machine_mode mode = TYPE_MODE (type);
7122 if (GET_MODE_CLASS (mode) == MODE_INT
7123 && GET_MODE_SIZE (mode) == 1)
7124 return gen_int_mode (TREE_STRING_POINTER (init)
7125 [TREE_INT_CST_LOW (index)], mode);
7130 goto normal_inner_ref;
7132 case COMPONENT_REF:
7133 /* If the operand is a CONSTRUCTOR, we can just extract the
7134 appropriate field if it is present. */
7135 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7137 unsigned HOST_WIDE_INT idx;
7138 tree field, value;
7140 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7141 idx, field, value)
7142 if (field == TREE_OPERAND (exp, 1)
7143 /* We can normally use the value of the field in the
7144 CONSTRUCTOR. However, if this is a bitfield in
7145 an integral mode that we can fit in a HOST_WIDE_INT,
7146 we must mask only the number of bits in the bitfield,
7147 since this is done implicitly by the constructor. If
7148 the bitfield does not meet either of those conditions,
7149 we can't do this optimization. */
7150 && (! DECL_BIT_FIELD (field)
7151 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7152 && (GET_MODE_BITSIZE (DECL_MODE (field))
7153 <= HOST_BITS_PER_WIDE_INT))))
7155 if (DECL_BIT_FIELD (field)
7156 && modifier == EXPAND_STACK_PARM)
7157 target = 0;
7158 op0 = expand_expr (value, target, tmode, modifier);
7159 if (DECL_BIT_FIELD (field))
7161 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7162 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7164 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7166 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7167 op0 = expand_and (imode, op0, op1, target);
7169 else
7171 tree count
7172 = build_int_cst (NULL_TREE,
7173 GET_MODE_BITSIZE (imode) - bitsize);
7175 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7176 target, 0);
7177 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7178 target, 0);
7182 return op0;
7185 goto normal_inner_ref;
7187 case BIT_FIELD_REF:
7188 case ARRAY_RANGE_REF:
7189 normal_inner_ref:
7191 enum machine_mode mode1;
7192 HOST_WIDE_INT bitsize, bitpos;
7193 tree offset;
7194 int volatilep = 0;
7195 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7196 &mode1, &unsignedp, &volatilep, true);
7197 rtx orig_op0;
7199 /* If we got back the original object, something is wrong. Perhaps
7200 we are evaluating an expression too early. In any event, don't
7201 infinitely recurse. */
7202 gcc_assert (tem != exp);
7204 /* If TEM's type is a union of variable size, pass TARGET to the inner
7205 computation, since it will need a temporary and TARGET is known
7206 to have to do. This occurs in unchecked conversion in Ada. */
7208 orig_op0 = op0
7209 = expand_expr (tem,
7210 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7211 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7212 != INTEGER_CST)
7213 && modifier != EXPAND_STACK_PARM
7214 ? target : NULL_RTX),
7215 VOIDmode,
7216 (modifier == EXPAND_INITIALIZER
7217 || modifier == EXPAND_CONST_ADDRESS
7218 || modifier == EXPAND_STACK_PARM)
7219 ? modifier : EXPAND_NORMAL);
7221 /* If this is a constant, put it into a register if it is a legitimate
7222 constant, OFFSET is 0, and we won't try to extract outside the
7223 register (in case we were passed a partially uninitialized object
7224 or a view_conversion to a larger size). Force the constant to
7225 memory otherwise. */
7226 if (CONSTANT_P (op0))
7228 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7229 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7230 && offset == 0
7231 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7232 op0 = force_reg (mode, op0);
7233 else
7234 op0 = validize_mem (force_const_mem (mode, op0));
7237 /* Otherwise, if this object not in memory and we either have an
7238 offset, a BLKmode result, or a reference outside the object, put it
7239 there. Such cases can occur in Ada if we have unchecked conversion
7240 of an expression from a scalar type to an array or record type or
7241 for an ARRAY_RANGE_REF whose type is BLKmode. */
7242 else if (!MEM_P (op0)
7243 && (offset != 0
7244 || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
7245 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7247 tree nt = build_qualified_type (TREE_TYPE (tem),
7248 (TYPE_QUALS (TREE_TYPE (tem))
7249 | TYPE_QUAL_CONST));
7250 rtx memloc = assign_temp (nt, 1, 1, 1);
7252 emit_move_insn (memloc, op0);
7253 op0 = memloc;
7256 if (offset != 0)
7258 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7259 EXPAND_SUM);
7261 gcc_assert (MEM_P (op0));
7263 #ifdef POINTERS_EXTEND_UNSIGNED
7264 if (GET_MODE (offset_rtx) != Pmode)
7265 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7266 #else
7267 if (GET_MODE (offset_rtx) != ptr_mode)
7268 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7269 #endif
7271 if (GET_MODE (op0) == BLKmode
7272 /* A constant address in OP0 can have VOIDmode, we must
7273 not try to call force_reg in that case. */
7274 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7275 && bitsize != 0
7276 && (bitpos % bitsize) == 0
7277 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7278 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7280 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7281 bitpos = 0;
7284 op0 = offset_address (op0, offset_rtx,
7285 highest_pow2_factor (offset));
7288 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7289 record its alignment as BIGGEST_ALIGNMENT. */
7290 if (MEM_P (op0) && bitpos == 0 && offset != 0
7291 && is_aligning_offset (offset, tem))
7292 set_mem_align (op0, BIGGEST_ALIGNMENT);
7294 /* Don't forget about volatility even if this is a bitfield. */
7295 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7297 if (op0 == orig_op0)
7298 op0 = copy_rtx (op0);
7300 MEM_VOLATILE_P (op0) = 1;
7303 /* The following code doesn't handle CONCAT.
7304 Assume only bitpos == 0 can be used for CONCAT, due to
7305 one element arrays having the same mode as its element. */
7306 if (GET_CODE (op0) == CONCAT)
7308 gcc_assert (bitpos == 0
7309 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7310 return op0;
7313 /* In cases where an aligned union has an unaligned object
7314 as a field, we might be extracting a BLKmode value from
7315 an integer-mode (e.g., SImode) object. Handle this case
7316 by doing the extract into an object as wide as the field
7317 (which we know to be the width of a basic mode), then
7318 storing into memory, and changing the mode to BLKmode. */
7319 if (mode1 == VOIDmode
7320 || REG_P (op0) || GET_CODE (op0) == SUBREG
7321 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7322 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7323 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7324 && modifier != EXPAND_CONST_ADDRESS
7325 && modifier != EXPAND_INITIALIZER)
7326 /* If the field isn't aligned enough to fetch as a memref,
7327 fetch it as a bit field. */
7328 || (mode1 != BLKmode
7329 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7330 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7331 || (MEM_P (op0)
7332 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7333 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7334 && ((modifier == EXPAND_CONST_ADDRESS
7335 || modifier == EXPAND_INITIALIZER)
7336 ? STRICT_ALIGNMENT
7337 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7338 || (bitpos % BITS_PER_UNIT != 0)))
7339 /* If the type and the field are a constant size and the
7340 size of the type isn't the same size as the bitfield,
7341 we must use bitfield operations. */
7342 || (bitsize >= 0
7343 && TYPE_SIZE (TREE_TYPE (exp))
7344 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7345 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7346 bitsize)))
7348 enum machine_mode ext_mode = mode;
7350 if (ext_mode == BLKmode
7351 && ! (target != 0 && MEM_P (op0)
7352 && MEM_P (target)
7353 && bitpos % BITS_PER_UNIT == 0))
7354 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7356 if (ext_mode == BLKmode)
7358 if (target == 0)
7359 target = assign_temp (type, 0, 1, 1);
7361 if (bitsize == 0)
7362 return target;
7364 /* In this case, BITPOS must start at a byte boundary and
7365 TARGET, if specified, must be a MEM. */
7366 gcc_assert (MEM_P (op0)
7367 && (!target || MEM_P (target))
7368 && !(bitpos % BITS_PER_UNIT));
7370 emit_block_move (target,
7371 adjust_address (op0, VOIDmode,
7372 bitpos / BITS_PER_UNIT),
7373 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7374 / BITS_PER_UNIT),
7375 (modifier == EXPAND_STACK_PARM
7376 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7378 return target;
7381 op0 = validize_mem (op0);
7383 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7384 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7386 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7387 (modifier == EXPAND_STACK_PARM
7388 ? NULL_RTX : target),
7389 ext_mode, ext_mode);
7391 /* If the result is a record type and BITSIZE is narrower than
7392 the mode of OP0, an integral mode, and this is a big endian
7393 machine, we must put the field into the high-order bits. */
7394 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7395 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7396 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7397 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7398 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7399 - bitsize),
7400 op0, 1);
7402 /* If the result type is BLKmode, store the data into a temporary
7403 of the appropriate type, but with the mode corresponding to the
7404 mode for the data we have (op0's mode). It's tempting to make
7405 this a constant type, since we know it's only being stored once,
7406 but that can cause problems if we are taking the address of this
7407 COMPONENT_REF because the MEM of any reference via that address
7408 will have flags corresponding to the type, which will not
7409 necessarily be constant. */
7410 if (mode == BLKmode)
7412 rtx new
7413 = assign_stack_temp_for_type
7414 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7416 emit_move_insn (new, op0);
7417 op0 = copy_rtx (new);
7418 PUT_MODE (op0, BLKmode);
7419 set_mem_attributes (op0, exp, 1);
7422 return op0;
7425 /* If the result is BLKmode, use that to access the object
7426 now as well. */
7427 if (mode == BLKmode)
7428 mode1 = BLKmode;
7430 /* Get a reference to just this component. */
7431 if (modifier == EXPAND_CONST_ADDRESS
7432 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7433 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7434 else
7435 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7437 if (op0 == orig_op0)
7438 op0 = copy_rtx (op0);
7440 set_mem_attributes (op0, exp, 0);
7441 if (REG_P (XEXP (op0, 0)))
7442 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7444 MEM_VOLATILE_P (op0) |= volatilep;
7445 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7446 || modifier == EXPAND_CONST_ADDRESS
7447 || modifier == EXPAND_INITIALIZER)
7448 return op0;
7449 else if (target == 0)
7450 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7452 convert_move (target, op0, unsignedp);
7453 return target;
7456 case OBJ_TYPE_REF:
7457 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7459 case CALL_EXPR:
7460 /* Check for a built-in function. */
7461 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7462 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7463 == FUNCTION_DECL)
7464 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7466 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7467 == BUILT_IN_FRONTEND)
7468 return lang_hooks.expand_expr (exp, original_target,
7469 tmode, modifier,
7470 alt_rtl);
7471 else
7472 return expand_builtin (exp, target, subtarget, tmode, ignore);
7475 return expand_call (exp, target, ignore);
7477 case NON_LVALUE_EXPR:
7478 case NOP_EXPR:
7479 case CONVERT_EXPR:
7480 if (TREE_OPERAND (exp, 0) == error_mark_node)
7481 return const0_rtx;
7483 if (TREE_CODE (type) == UNION_TYPE)
7485 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7487 /* If both input and output are BLKmode, this conversion isn't doing
7488 anything except possibly changing memory attribute. */
7489 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7491 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7492 modifier);
7494 result = copy_rtx (result);
7495 set_mem_attributes (result, exp, 0);
7496 return result;
7499 if (target == 0)
7501 if (TYPE_MODE (type) != BLKmode)
7502 target = gen_reg_rtx (TYPE_MODE (type));
7503 else
7504 target = assign_temp (type, 0, 1, 1);
7507 if (MEM_P (target))
7508 /* Store data into beginning of memory target. */
7509 store_expr (TREE_OPERAND (exp, 0),
7510 adjust_address (target, TYPE_MODE (valtype), 0),
7511 modifier == EXPAND_STACK_PARM);
7513 else
7515 gcc_assert (REG_P (target));
7517 /* Store this field into a union of the proper type. */
7518 store_field (target,
7519 MIN ((int_size_in_bytes (TREE_TYPE
7520 (TREE_OPERAND (exp, 0)))
7521 * BITS_PER_UNIT),
7522 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7523 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7524 type, 0);
7527 /* Return the entire union. */
7528 return target;
7531 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7533 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7534 modifier);
7536 /* If the signedness of the conversion differs and OP0 is
7537 a promoted SUBREG, clear that indication since we now
7538 have to do the proper extension. */
7539 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7540 && GET_CODE (op0) == SUBREG)
7541 SUBREG_PROMOTED_VAR_P (op0) = 0;
7543 return REDUCE_BIT_FIELD (op0);
7546 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7547 if (GET_MODE (op0) == mode)
7550 /* If OP0 is a constant, just convert it into the proper mode. */
7551 else if (CONSTANT_P (op0))
7553 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7554 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7556 if (modifier == EXPAND_INITIALIZER)
7557 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7558 subreg_lowpart_offset (mode,
7559 inner_mode));
7560 else
7561 op0= convert_modes (mode, inner_mode, op0,
7562 TYPE_UNSIGNED (inner_type));
7565 else if (modifier == EXPAND_INITIALIZER)
7566 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7568 else if (target == 0)
7569 op0 = convert_to_mode (mode, op0,
7570 TYPE_UNSIGNED (TREE_TYPE
7571 (TREE_OPERAND (exp, 0))));
7572 else
7574 convert_move (target, op0,
7575 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7576 op0 = target;
7579 return REDUCE_BIT_FIELD (op0);
7581 case VIEW_CONVERT_EXPR:
7582 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7584 /* If the input and output modes are both the same, we are done. */
7585 if (TYPE_MODE (type) == GET_MODE (op0))
7587 /* If neither mode is BLKmode, and both modes are the same size
7588 then we can use gen_lowpart. */
7589 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7590 && GET_MODE_SIZE (TYPE_MODE (type))
7591 == GET_MODE_SIZE (GET_MODE (op0)))
7593 if (GET_CODE (op0) == SUBREG)
7594 op0 = force_reg (GET_MODE (op0), op0);
7595 op0 = gen_lowpart (TYPE_MODE (type), op0);
7597 /* If both modes are integral, then we can convert from one to the
7598 other. */
7599 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
7600 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
7601 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
7602 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7603 /* As a last resort, spill op0 to memory, and reload it in a
7604 different mode. */
7605 else if (!MEM_P (op0))
7607 /* If the operand is not a MEM, force it into memory. Since we
7608 are going to be be changing the mode of the MEM, don't call
7609 force_const_mem for constants because we don't allow pool
7610 constants to change mode. */
7611 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7613 gcc_assert (!TREE_ADDRESSABLE (exp));
7615 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7616 target
7617 = assign_stack_temp_for_type
7618 (TYPE_MODE (inner_type),
7619 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7621 emit_move_insn (target, op0);
7622 op0 = target;
7625 /* At this point, OP0 is in the correct mode. If the output type is such
7626 that the operand is known to be aligned, indicate that it is.
7627 Otherwise, we need only be concerned about alignment for non-BLKmode
7628 results. */
7629 if (MEM_P (op0))
7631 op0 = copy_rtx (op0);
7633 if (TYPE_ALIGN_OK (type))
7634 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7635 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7636 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7638 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7639 HOST_WIDE_INT temp_size
7640 = MAX (int_size_in_bytes (inner_type),
7641 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7642 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7643 temp_size, 0, type);
7644 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7646 gcc_assert (!TREE_ADDRESSABLE (exp));
7648 if (GET_MODE (op0) == BLKmode)
7649 emit_block_move (new_with_op0_mode, op0,
7650 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7651 (modifier == EXPAND_STACK_PARM
7652 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7653 else
7654 emit_move_insn (new_with_op0_mode, op0);
7656 op0 = new;
7659 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7662 return op0;
7664 case PLUS_EXPR:
7665 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7666 something else, make sure we add the register to the constant and
7667 then to the other thing. This case can occur during strength
7668 reduction and doing it this way will produce better code if the
7669 frame pointer or argument pointer is eliminated.
7671 fold-const.c will ensure that the constant is always in the inner
7672 PLUS_EXPR, so the only case we need to do anything about is if
7673 sp, ap, or fp is our second argument, in which case we must swap
7674 the innermost first argument and our second argument. */
7676 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7677 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7678 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7679 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7680 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7681 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7683 tree t = TREE_OPERAND (exp, 1);
7685 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7686 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7689 /* If the result is to be ptr_mode and we are adding an integer to
7690 something, we might be forming a constant. So try to use
7691 plus_constant. If it produces a sum and we can't accept it,
7692 use force_operand. This allows P = &ARR[const] to generate
7693 efficient code on machines where a SYMBOL_REF is not a valid
7694 address.
7696 If this is an EXPAND_SUM call, always return the sum. */
7697 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7698 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7700 if (modifier == EXPAND_STACK_PARM)
7701 target = 0;
7702 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7703 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7704 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7706 rtx constant_part;
7708 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7709 EXPAND_SUM);
7710 /* Use immed_double_const to ensure that the constant is
7711 truncated according to the mode of OP1, then sign extended
7712 to a HOST_WIDE_INT. Using the constant directly can result
7713 in non-canonical RTL in a 64x32 cross compile. */
7714 constant_part
7715 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7716 (HOST_WIDE_INT) 0,
7717 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7718 op1 = plus_constant (op1, INTVAL (constant_part));
7719 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7720 op1 = force_operand (op1, target);
7721 return REDUCE_BIT_FIELD (op1);
7724 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7725 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7726 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7728 rtx constant_part;
7730 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7731 (modifier == EXPAND_INITIALIZER
7732 ? EXPAND_INITIALIZER : EXPAND_SUM));
7733 if (! CONSTANT_P (op0))
7735 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7736 VOIDmode, modifier);
7737 /* Return a PLUS if modifier says it's OK. */
7738 if (modifier == EXPAND_SUM
7739 || modifier == EXPAND_INITIALIZER)
7740 return simplify_gen_binary (PLUS, mode, op0, op1);
7741 goto binop2;
7743 /* Use immed_double_const to ensure that the constant is
7744 truncated according to the mode of OP1, then sign extended
7745 to a HOST_WIDE_INT. Using the constant directly can result
7746 in non-canonical RTL in a 64x32 cross compile. */
7747 constant_part
7748 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7749 (HOST_WIDE_INT) 0,
7750 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7751 op0 = plus_constant (op0, INTVAL (constant_part));
7752 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7753 op0 = force_operand (op0, target);
7754 return REDUCE_BIT_FIELD (op0);
7758 /* No sense saving up arithmetic to be done
7759 if it's all in the wrong mode to form part of an address.
7760 And force_operand won't know whether to sign-extend or
7761 zero-extend. */
7762 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7763 || mode != ptr_mode)
7765 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7766 subtarget, &op0, &op1, 0);
7767 if (op0 == const0_rtx)
7768 return op1;
7769 if (op1 == const0_rtx)
7770 return op0;
7771 goto binop2;
7774 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7775 subtarget, &op0, &op1, modifier);
7776 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7778 case MINUS_EXPR:
7779 /* For initializers, we are allowed to return a MINUS of two
7780 symbolic constants. Here we handle all cases when both operands
7781 are constant. */
7782 /* Handle difference of two symbolic constants,
7783 for the sake of an initializer. */
7784 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7785 && really_constant_p (TREE_OPERAND (exp, 0))
7786 && really_constant_p (TREE_OPERAND (exp, 1)))
7788 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7789 NULL_RTX, &op0, &op1, modifier);
7791 /* If the last operand is a CONST_INT, use plus_constant of
7792 the negated constant. Else make the MINUS. */
7793 if (GET_CODE (op1) == CONST_INT)
7794 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7795 else
7796 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7799 /* No sense saving up arithmetic to be done
7800 if it's all in the wrong mode to form part of an address.
7801 And force_operand won't know whether to sign-extend or
7802 zero-extend. */
7803 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7804 || mode != ptr_mode)
7805 goto binop;
7807 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7808 subtarget, &op0, &op1, modifier);
7810 /* Convert A - const to A + (-const). */
7811 if (GET_CODE (op1) == CONST_INT)
7813 op1 = negate_rtx (mode, op1);
7814 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7817 goto binop2;
7819 case MULT_EXPR:
7820 /* If first operand is constant, swap them.
7821 Thus the following special case checks need only
7822 check the second operand. */
7823 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7825 tree t1 = TREE_OPERAND (exp, 0);
7826 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7827 TREE_OPERAND (exp, 1) = t1;
7830 /* Attempt to return something suitable for generating an
7831 indexed address, for machines that support that. */
7833 if (modifier == EXPAND_SUM && mode == ptr_mode
7834 && host_integerp (TREE_OPERAND (exp, 1), 0))
7836 tree exp1 = TREE_OPERAND (exp, 1);
7838 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7839 EXPAND_SUM);
7841 if (!REG_P (op0))
7842 op0 = force_operand (op0, NULL_RTX);
7843 if (!REG_P (op0))
7844 op0 = copy_to_mode_reg (mode, op0);
7846 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7847 gen_int_mode (tree_low_cst (exp1, 0),
7848 TYPE_MODE (TREE_TYPE (exp1)))));
7851 if (modifier == EXPAND_STACK_PARM)
7852 target = 0;
7854 /* Check for multiplying things that have been extended
7855 from a narrower type. If this machine supports multiplying
7856 in that narrower type with a result in the desired type,
7857 do it that way, and avoid the explicit type-conversion. */
7859 subexp0 = TREE_OPERAND (exp, 0);
7860 subexp1 = TREE_OPERAND (exp, 1);
7861 /* First, check if we have a multiplication of one signed and one
7862 unsigned operand. */
7863 if (TREE_CODE (subexp0) == NOP_EXPR
7864 && TREE_CODE (subexp1) == NOP_EXPR
7865 && TREE_CODE (type) == INTEGER_TYPE
7866 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
7867 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7868 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
7869 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
7870 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
7871 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
7873 enum machine_mode innermode
7874 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
7875 this_optab = usmul_widen_optab;
7876 if (mode == GET_MODE_WIDER_MODE (innermode))
7878 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7880 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
7881 expand_operands (TREE_OPERAND (subexp0, 0),
7882 TREE_OPERAND (subexp1, 0),
7883 NULL_RTX, &op0, &op1, 0);
7884 else
7885 expand_operands (TREE_OPERAND (subexp0, 0),
7886 TREE_OPERAND (subexp1, 0),
7887 NULL_RTX, &op1, &op0, 0);
7889 goto binop3;
7893 /* Check for a multiplication with matching signedness. */
7894 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7895 && TREE_CODE (type) == INTEGER_TYPE
7896 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7897 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7898 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7899 && int_fits_type_p (TREE_OPERAND (exp, 1),
7900 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7901 /* Don't use a widening multiply if a shift will do. */
7902 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7903 > HOST_BITS_PER_WIDE_INT)
7904 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7906 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7907 && (TYPE_PRECISION (TREE_TYPE
7908 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7909 == TYPE_PRECISION (TREE_TYPE
7910 (TREE_OPERAND
7911 (TREE_OPERAND (exp, 0), 0))))
7912 /* If both operands are extended, they must either both
7913 be zero-extended or both be sign-extended. */
7914 && (TYPE_UNSIGNED (TREE_TYPE
7915 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7916 == TYPE_UNSIGNED (TREE_TYPE
7917 (TREE_OPERAND
7918 (TREE_OPERAND (exp, 0), 0)))))))
7920 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7921 enum machine_mode innermode = TYPE_MODE (op0type);
7922 bool zextend_p = TYPE_UNSIGNED (op0type);
7923 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7924 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7926 if (mode == GET_MODE_2XWIDER_MODE (innermode))
7928 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7930 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7931 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7932 TREE_OPERAND (exp, 1),
7933 NULL_RTX, &op0, &op1, 0);
7934 else
7935 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7936 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7937 NULL_RTX, &op0, &op1, 0);
7938 goto binop3;
7940 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7941 && innermode == word_mode)
7943 rtx htem, hipart;
7944 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7945 NULL_RTX, VOIDmode, 0);
7946 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7947 op1 = convert_modes (innermode, mode,
7948 expand_expr (TREE_OPERAND (exp, 1),
7949 NULL_RTX, VOIDmode, 0),
7950 unsignedp);
7951 else
7952 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7953 NULL_RTX, VOIDmode, 0);
7954 temp = expand_binop (mode, other_optab, op0, op1, target,
7955 unsignedp, OPTAB_LIB_WIDEN);
7956 hipart = gen_highpart (innermode, temp);
7957 htem = expand_mult_highpart_adjust (innermode, hipart,
7958 op0, op1, hipart,
7959 zextend_p);
7960 if (htem != hipart)
7961 emit_move_insn (hipart, htem);
7962 return REDUCE_BIT_FIELD (temp);
7966 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7967 subtarget, &op0, &op1, 0);
7968 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7970 case TRUNC_DIV_EXPR:
7971 case FLOOR_DIV_EXPR:
7972 case CEIL_DIV_EXPR:
7973 case ROUND_DIV_EXPR:
7974 case EXACT_DIV_EXPR:
7975 if (modifier == EXPAND_STACK_PARM)
7976 target = 0;
7977 /* Possible optimization: compute the dividend with EXPAND_SUM
7978 then if the divisor is constant can optimize the case
7979 where some terms of the dividend have coeffs divisible by it. */
7980 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7981 subtarget, &op0, &op1, 0);
7982 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7984 case RDIV_EXPR:
7985 goto binop;
7987 case TRUNC_MOD_EXPR:
7988 case FLOOR_MOD_EXPR:
7989 case CEIL_MOD_EXPR:
7990 case ROUND_MOD_EXPR:
7991 if (modifier == EXPAND_STACK_PARM)
7992 target = 0;
7993 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7994 subtarget, &op0, &op1, 0);
7995 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7997 case FIX_ROUND_EXPR:
7998 case FIX_FLOOR_EXPR:
7999 case FIX_CEIL_EXPR:
8000 gcc_unreachable (); /* Not used for C. */
8002 case FIX_TRUNC_EXPR:
8003 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8004 if (target == 0 || modifier == EXPAND_STACK_PARM)
8005 target = gen_reg_rtx (mode);
8006 expand_fix (target, op0, unsignedp);
8007 return target;
8009 case FLOAT_EXPR:
8010 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8011 if (target == 0 || modifier == EXPAND_STACK_PARM)
8012 target = gen_reg_rtx (mode);
8013 /* expand_float can't figure out what to do if FROM has VOIDmode.
8014 So give it the correct mode. With -O, cse will optimize this. */
8015 if (GET_MODE (op0) == VOIDmode)
8016 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8017 op0);
8018 expand_float (target, op0,
8019 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8020 return target;
8022 case NEGATE_EXPR:
8023 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8024 if (modifier == EXPAND_STACK_PARM)
8025 target = 0;
8026 temp = expand_unop (mode,
8027 optab_for_tree_code (NEGATE_EXPR, type),
8028 op0, target, 0);
8029 gcc_assert (temp);
8030 return REDUCE_BIT_FIELD (temp);
8032 case ABS_EXPR:
8033 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8034 if (modifier == EXPAND_STACK_PARM)
8035 target = 0;
8037 /* ABS_EXPR is not valid for complex arguments. */
8038 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8039 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8041 /* Unsigned abs is simply the operand. Testing here means we don't
8042 risk generating incorrect code below. */
8043 if (TYPE_UNSIGNED (type))
8044 return op0;
8046 return expand_abs (mode, op0, target, unsignedp,
8047 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8049 case MAX_EXPR:
8050 case MIN_EXPR:
8051 target = original_target;
8052 if (target == 0
8053 || modifier == EXPAND_STACK_PARM
8054 || (MEM_P (target) && MEM_VOLATILE_P (target))
8055 || GET_MODE (target) != mode
8056 || (REG_P (target)
8057 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8058 target = gen_reg_rtx (mode);
8059 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8060 target, &op0, &op1, 0);
8062 /* First try to do it with a special MIN or MAX instruction.
8063 If that does not win, use a conditional jump to select the proper
8064 value. */
8065 this_optab = optab_for_tree_code (code, type);
8066 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8067 OPTAB_WIDEN);
8068 if (temp != 0)
8069 return temp;
8071 /* At this point, a MEM target is no longer useful; we will get better
8072 code without it. */
8074 if (! REG_P (target))
8075 target = gen_reg_rtx (mode);
8077 /* If op1 was placed in target, swap op0 and op1. */
8078 if (target != op0 && target == op1)
8080 temp = op0;
8081 op0 = op1;
8082 op1 = temp;
8085 /* We generate better code and avoid problems with op1 mentioning
8086 target by forcing op1 into a pseudo if it isn't a constant. */
8087 if (! CONSTANT_P (op1))
8088 op1 = force_reg (mode, op1);
8091 enum rtx_code comparison_code;
8092 rtx cmpop1 = op1;
8094 if (code == MAX_EXPR)
8095 comparison_code = unsignedp ? GEU : GE;
8096 else
8097 comparison_code = unsignedp ? LEU : LE;
8099 /* Canonicalize to comparisons against 0. */
8100 if (op1 == const1_rtx)
8102 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8103 or (a != 0 ? a : 1) for unsigned.
8104 For MIN we are safe converting (a <= 1 ? a : 1)
8105 into (a <= 0 ? a : 1) */
8106 cmpop1 = const0_rtx;
8107 if (code == MAX_EXPR)
8108 comparison_code = unsignedp ? NE : GT;
8110 if (op1 == constm1_rtx && !unsignedp)
8112 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8113 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8114 cmpop1 = const0_rtx;
8115 if (code == MIN_EXPR)
8116 comparison_code = LT;
8118 #ifdef HAVE_conditional_move
8119 /* Use a conditional move if possible. */
8120 if (can_conditionally_move_p (mode))
8122 rtx insn;
8124 /* ??? Same problem as in expmed.c: emit_conditional_move
8125 forces a stack adjustment via compare_from_rtx, and we
8126 lose the stack adjustment if the sequence we are about
8127 to create is discarded. */
8128 do_pending_stack_adjust ();
8130 start_sequence ();
8132 /* Try to emit the conditional move. */
8133 insn = emit_conditional_move (target, comparison_code,
8134 op0, cmpop1, mode,
8135 op0, op1, mode,
8136 unsignedp);
8138 /* If we could do the conditional move, emit the sequence,
8139 and return. */
8140 if (insn)
8142 rtx seq = get_insns ();
8143 end_sequence ();
8144 emit_insn (seq);
8145 return target;
8148 /* Otherwise discard the sequence and fall back to code with
8149 branches. */
8150 end_sequence ();
8152 #endif
8153 if (target != op0)
8154 emit_move_insn (target, op0);
8156 temp = gen_label_rtx ();
8158 /* If this mode is an integer too wide to compare properly,
8159 compare word by word. Rely on cse to optimize constant cases. */
8160 if (GET_MODE_CLASS (mode) == MODE_INT
8161 && ! can_compare_p (GE, mode, ccp_jump))
8163 if (code == MAX_EXPR)
8164 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
8165 NULL_RTX, temp);
8166 else
8167 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
8168 NULL_RTX, temp);
8170 else
8172 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8173 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8176 emit_move_insn (target, op1);
8177 emit_label (temp);
8178 return target;
8180 case BIT_NOT_EXPR:
8181 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8182 if (modifier == EXPAND_STACK_PARM)
8183 target = 0;
8184 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8185 gcc_assert (temp);
8186 return temp;
8188 /* ??? Can optimize bitwise operations with one arg constant.
8189 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8190 and (a bitwise1 b) bitwise2 b (etc)
8191 but that is probably not worth while. */
8193 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8194 boolean values when we want in all cases to compute both of them. In
8195 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8196 as actual zero-or-1 values and then bitwise anding. In cases where
8197 there cannot be any side effects, better code would be made by
8198 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8199 how to recognize those cases. */
8201 case TRUTH_AND_EXPR:
8202 code = BIT_AND_EXPR;
8203 case BIT_AND_EXPR:
8204 goto binop;
8206 case TRUTH_OR_EXPR:
8207 code = BIT_IOR_EXPR;
8208 case BIT_IOR_EXPR:
8209 goto binop;
8211 case TRUTH_XOR_EXPR:
8212 code = BIT_XOR_EXPR;
8213 case BIT_XOR_EXPR:
8214 goto binop;
8216 case LSHIFT_EXPR:
8217 case RSHIFT_EXPR:
8218 case LROTATE_EXPR:
8219 case RROTATE_EXPR:
8220 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8221 subtarget = 0;
8222 if (modifier == EXPAND_STACK_PARM)
8223 target = 0;
8224 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8225 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8226 unsignedp);
8228 /* Could determine the answer when only additive constants differ. Also,
8229 the addition of one can be handled by changing the condition. */
8230 case LT_EXPR:
8231 case LE_EXPR:
8232 case GT_EXPR:
8233 case GE_EXPR:
8234 case EQ_EXPR:
8235 case NE_EXPR:
8236 case UNORDERED_EXPR:
8237 case ORDERED_EXPR:
8238 case UNLT_EXPR:
8239 case UNLE_EXPR:
8240 case UNGT_EXPR:
8241 case UNGE_EXPR:
8242 case UNEQ_EXPR:
8243 case LTGT_EXPR:
8244 temp = do_store_flag (exp,
8245 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8246 tmode != VOIDmode ? tmode : mode, 0);
8247 if (temp != 0)
8248 return temp;
8250 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8251 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8252 && original_target
8253 && REG_P (original_target)
8254 && (GET_MODE (original_target)
8255 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8257 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8258 VOIDmode, 0);
8260 /* If temp is constant, we can just compute the result. */
8261 if (GET_CODE (temp) == CONST_INT)
8263 if (INTVAL (temp) != 0)
8264 emit_move_insn (target, const1_rtx);
8265 else
8266 emit_move_insn (target, const0_rtx);
8268 return target;
8271 if (temp != original_target)
8273 enum machine_mode mode1 = GET_MODE (temp);
8274 if (mode1 == VOIDmode)
8275 mode1 = tmode != VOIDmode ? tmode : mode;
8277 temp = copy_to_mode_reg (mode1, temp);
8280 op1 = gen_label_rtx ();
8281 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8282 GET_MODE (temp), unsignedp, op1);
8283 emit_move_insn (temp, const1_rtx);
8284 emit_label (op1);
8285 return temp;
8288 /* If no set-flag instruction, must generate a conditional store
8289 into a temporary variable. Drop through and handle this
8290 like && and ||. */
8292 if (! ignore
8293 && (target == 0
8294 || modifier == EXPAND_STACK_PARM
8295 || ! safe_from_p (target, exp, 1)
8296 /* Make sure we don't have a hard reg (such as function's return
8297 value) live across basic blocks, if not optimizing. */
8298 || (!optimize && REG_P (target)
8299 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8300 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8302 if (target)
8303 emit_move_insn (target, const0_rtx);
8305 op1 = gen_label_rtx ();
8306 jumpifnot (exp, op1);
8308 if (target)
8309 emit_move_insn (target, const1_rtx);
8311 emit_label (op1);
8312 return ignore ? const0_rtx : target;
8314 case TRUTH_NOT_EXPR:
8315 if (modifier == EXPAND_STACK_PARM)
8316 target = 0;
8317 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8318 /* The parser is careful to generate TRUTH_NOT_EXPR
8319 only with operands that are always zero or one. */
8320 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8321 target, 1, OPTAB_LIB_WIDEN);
8322 gcc_assert (temp);
8323 return temp;
8325 case STATEMENT_LIST:
8327 tree_stmt_iterator iter;
8329 gcc_assert (ignore);
8331 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8332 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8334 return const0_rtx;
8336 case COND_EXPR:
8337 /* A COND_EXPR with its type being VOID_TYPE represents a
8338 conditional jump and is handled in
8339 expand_gimple_cond_expr. */
8340 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
8342 /* Note that COND_EXPRs whose type is a structure or union
8343 are required to be constructed to contain assignments of
8344 a temporary variable, so that we can evaluate them here
8345 for side effect only. If type is void, we must do likewise. */
8347 gcc_assert (!TREE_ADDRESSABLE (type)
8348 && !ignore
8349 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8350 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
8352 /* If we are not to produce a result, we have no target. Otherwise,
8353 if a target was specified use it; it will not be used as an
8354 intermediate target unless it is safe. If no target, use a
8355 temporary. */
8357 if (modifier != EXPAND_STACK_PARM
8358 && original_target
8359 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8360 && GET_MODE (original_target) == mode
8361 #ifdef HAVE_conditional_move
8362 && (! can_conditionally_move_p (mode)
8363 || REG_P (original_target))
8364 #endif
8365 && !MEM_P (original_target))
8366 temp = original_target;
8367 else
8368 temp = assign_temp (type, 0, 0, 1);
8370 do_pending_stack_adjust ();
8371 NO_DEFER_POP;
8372 op0 = gen_label_rtx ();
8373 op1 = gen_label_rtx ();
8374 jumpifnot (TREE_OPERAND (exp, 0), op0);
8375 store_expr (TREE_OPERAND (exp, 1), temp,
8376 modifier == EXPAND_STACK_PARM);
8378 emit_jump_insn (gen_jump (op1));
8379 emit_barrier ();
8380 emit_label (op0);
8381 store_expr (TREE_OPERAND (exp, 2), temp,
8382 modifier == EXPAND_STACK_PARM);
8384 emit_label (op1);
8385 OK_DEFER_POP;
8386 return temp;
8388 case VEC_COND_EXPR:
8389 target = expand_vec_cond_expr (exp, target);
8390 return target;
8392 case MODIFY_EXPR:
8394 tree lhs = TREE_OPERAND (exp, 0);
8395 tree rhs = TREE_OPERAND (exp, 1);
8397 gcc_assert (ignore);
8399 /* Check for |= or &= of a bitfield of size one into another bitfield
8400 of size 1. In this case, (unless we need the result of the
8401 assignment) we can do this more efficiently with a
8402 test followed by an assignment, if necessary.
8404 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8405 things change so we do, this code should be enhanced to
8406 support it. */
8407 if (TREE_CODE (lhs) == COMPONENT_REF
8408 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8409 || TREE_CODE (rhs) == BIT_AND_EXPR)
8410 && TREE_OPERAND (rhs, 0) == lhs
8411 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8412 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8413 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8415 rtx label = gen_label_rtx ();
8417 do_jump (TREE_OPERAND (rhs, 1),
8418 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8419 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8420 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8421 (TREE_CODE (rhs) == BIT_IOR_EXPR
8422 ? integer_one_node
8423 : integer_zero_node)));
8424 do_pending_stack_adjust ();
8425 emit_label (label);
8426 return const0_rtx;
8429 expand_assignment (lhs, rhs);
8431 return const0_rtx;
8434 case RETURN_EXPR:
8435 if (!TREE_OPERAND (exp, 0))
8436 expand_null_return ();
8437 else
8438 expand_return (TREE_OPERAND (exp, 0));
8439 return const0_rtx;
8441 case ADDR_EXPR:
8442 return expand_expr_addr_expr (exp, target, tmode, modifier);
8444 case COMPLEX_EXPR:
8445 /* Get the rtx code of the operands. */
8446 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8447 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8449 if (!target)
8450 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8452 /* Move the real (op0) and imaginary (op1) parts to their location. */
8453 write_complex_part (target, op0, false);
8454 write_complex_part (target, op1, true);
8456 return target;
8458 case REALPART_EXPR:
8459 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8460 return read_complex_part (op0, false);
8462 case IMAGPART_EXPR:
8463 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8464 return read_complex_part (op0, true);
8466 case RESX_EXPR:
8467 expand_resx_expr (exp);
8468 return const0_rtx;
8470 case TRY_CATCH_EXPR:
8471 case CATCH_EXPR:
8472 case EH_FILTER_EXPR:
8473 case TRY_FINALLY_EXPR:
8474 /* Lowered by tree-eh.c. */
8475 gcc_unreachable ();
8477 case WITH_CLEANUP_EXPR:
8478 case CLEANUP_POINT_EXPR:
8479 case TARGET_EXPR:
8480 case CASE_LABEL_EXPR:
8481 case VA_ARG_EXPR:
8482 case BIND_EXPR:
8483 case INIT_EXPR:
8484 case CONJ_EXPR:
8485 case COMPOUND_EXPR:
8486 case PREINCREMENT_EXPR:
8487 case PREDECREMENT_EXPR:
8488 case POSTINCREMENT_EXPR:
8489 case POSTDECREMENT_EXPR:
8490 case LOOP_EXPR:
8491 case EXIT_EXPR:
8492 case TRUTH_ANDIF_EXPR:
8493 case TRUTH_ORIF_EXPR:
8494 /* Lowered by gimplify.c. */
8495 gcc_unreachable ();
8497 case EXC_PTR_EXPR:
8498 return get_exception_pointer (cfun);
8500 case FILTER_EXPR:
8501 return get_exception_filter (cfun);
8503 case FDESC_EXPR:
8504 /* Function descriptors are not valid except for as
8505 initialization constants, and should not be expanded. */
8506 gcc_unreachable ();
8508 case SWITCH_EXPR:
8509 expand_case (exp);
8510 return const0_rtx;
8512 case LABEL_EXPR:
8513 expand_label (TREE_OPERAND (exp, 0));
8514 return const0_rtx;
8516 case ASM_EXPR:
8517 expand_asm_expr (exp);
8518 return const0_rtx;
8520 case WITH_SIZE_EXPR:
8521 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8522 have pulled out the size to use in whatever context it needed. */
8523 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8524 modifier, alt_rtl);
8526 case REALIGN_LOAD_EXPR:
8528 tree oprnd0 = TREE_OPERAND (exp, 0);
8529 tree oprnd1 = TREE_OPERAND (exp, 1);
8530 tree oprnd2 = TREE_OPERAND (exp, 2);
8531 rtx op2;
8533 this_optab = optab_for_tree_code (code, type);
8534 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8535 op2 = expand_expr (oprnd2, NULL_RTX, VOIDmode, 0);
8536 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8537 target, unsignedp);
8538 gcc_assert (temp);
8539 return temp;
8542 case REDUC_MAX_EXPR:
8543 case REDUC_MIN_EXPR:
8544 case REDUC_PLUS_EXPR:
8546 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8547 this_optab = optab_for_tree_code (code, type);
8548 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8549 gcc_assert (temp);
8550 return temp;
8553 case VEC_LSHIFT_EXPR:
8554 case VEC_RSHIFT_EXPR:
8556 target = expand_vec_shift_expr (exp, target);
8557 return target;
8560 default:
8561 return lang_hooks.expand_expr (exp, original_target, tmode,
8562 modifier, alt_rtl);
8565 /* Here to do an ordinary binary operator. */
8566 binop:
8567 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8568 subtarget, &op0, &op1, 0);
8569 binop2:
8570 this_optab = optab_for_tree_code (code, type);
8571 binop3:
8572 if (modifier == EXPAND_STACK_PARM)
8573 target = 0;
8574 temp = expand_binop (mode, this_optab, op0, op1, target,
8575 unsignedp, OPTAB_LIB_WIDEN);
8576 gcc_assert (temp);
8577 return REDUCE_BIT_FIELD (temp);
8579 #undef REDUCE_BIT_FIELD
8581 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8582 signedness of TYPE), possibly returning the result in TARGET. */
8583 static rtx
8584 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8586 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8587 if (target && GET_MODE (target) != GET_MODE (exp))
8588 target = 0;
8589 if (TYPE_UNSIGNED (type))
8591 rtx mask;
8592 if (prec < HOST_BITS_PER_WIDE_INT)
8593 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8594 GET_MODE (exp));
8595 else
8596 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8597 ((unsigned HOST_WIDE_INT) 1
8598 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8599 GET_MODE (exp));
8600 return expand_and (GET_MODE (exp), exp, mask, target);
8602 else
8604 tree count = build_int_cst (NULL_TREE,
8605 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8606 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8607 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8611 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8612 when applied to the address of EXP produces an address known to be
8613 aligned more than BIGGEST_ALIGNMENT. */
8615 static int
8616 is_aligning_offset (tree offset, tree exp)
8618 /* Strip off any conversions. */
8619 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8620 || TREE_CODE (offset) == NOP_EXPR
8621 || TREE_CODE (offset) == CONVERT_EXPR)
8622 offset = TREE_OPERAND (offset, 0);
8624 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8625 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8626 if (TREE_CODE (offset) != BIT_AND_EXPR
8627 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8628 || compare_tree_int (TREE_OPERAND (offset, 1),
8629 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8630 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8631 return 0;
8633 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8634 It must be NEGATE_EXPR. Then strip any more conversions. */
8635 offset = TREE_OPERAND (offset, 0);
8636 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8637 || TREE_CODE (offset) == NOP_EXPR
8638 || TREE_CODE (offset) == CONVERT_EXPR)
8639 offset = TREE_OPERAND (offset, 0);
8641 if (TREE_CODE (offset) != NEGATE_EXPR)
8642 return 0;
8644 offset = TREE_OPERAND (offset, 0);
8645 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8646 || TREE_CODE (offset) == NOP_EXPR
8647 || TREE_CODE (offset) == CONVERT_EXPR)
8648 offset = TREE_OPERAND (offset, 0);
8650 /* This must now be the address of EXP. */
8651 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8654 /* Return the tree node if an ARG corresponds to a string constant or zero
8655 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8656 in bytes within the string that ARG is accessing. The type of the
8657 offset will be `sizetype'. */
8659 tree
8660 string_constant (tree arg, tree *ptr_offset)
8662 tree array, offset;
8663 STRIP_NOPS (arg);
8665 if (TREE_CODE (arg) == ADDR_EXPR)
8667 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8669 *ptr_offset = size_zero_node;
8670 return TREE_OPERAND (arg, 0);
8672 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8674 array = TREE_OPERAND (arg, 0);
8675 offset = size_zero_node;
8677 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8679 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8680 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8681 if (TREE_CODE (array) != STRING_CST
8682 && TREE_CODE (array) != VAR_DECL)
8683 return 0;
8685 else
8686 return 0;
8688 else if (TREE_CODE (arg) == PLUS_EXPR)
8690 tree arg0 = TREE_OPERAND (arg, 0);
8691 tree arg1 = TREE_OPERAND (arg, 1);
8693 STRIP_NOPS (arg0);
8694 STRIP_NOPS (arg1);
8696 if (TREE_CODE (arg0) == ADDR_EXPR
8697 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8698 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
8700 array = TREE_OPERAND (arg0, 0);
8701 offset = arg1;
8703 else if (TREE_CODE (arg1) == ADDR_EXPR
8704 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8705 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
8707 array = TREE_OPERAND (arg1, 0);
8708 offset = arg0;
8710 else
8711 return 0;
8713 else
8714 return 0;
8716 if (TREE_CODE (array) == STRING_CST)
8718 *ptr_offset = convert (sizetype, offset);
8719 return array;
8721 else if (TREE_CODE (array) == VAR_DECL)
8723 int length;
8725 /* Variables initialized to string literals can be handled too. */
8726 if (DECL_INITIAL (array) == NULL_TREE
8727 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8728 return 0;
8730 /* If they are read-only, non-volatile and bind locally. */
8731 if (! TREE_READONLY (array)
8732 || TREE_SIDE_EFFECTS (array)
8733 || ! targetm.binds_local_p (array))
8734 return 0;
8736 /* Avoid const char foo[4] = "abcde"; */
8737 if (DECL_SIZE_UNIT (array) == NULL_TREE
8738 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8739 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8740 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8741 return 0;
8743 /* If variable is bigger than the string literal, OFFSET must be constant
8744 and inside of the bounds of the string literal. */
8745 offset = convert (sizetype, offset);
8746 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
8747 && (! host_integerp (offset, 1)
8748 || compare_tree_int (offset, length) >= 0))
8749 return 0;
8751 *ptr_offset = offset;
8752 return DECL_INITIAL (array);
8755 return 0;
8758 /* Generate code to calculate EXP using a store-flag instruction
8759 and return an rtx for the result. EXP is either a comparison
8760 or a TRUTH_NOT_EXPR whose operand is a comparison.
8762 If TARGET is nonzero, store the result there if convenient.
8764 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8765 cheap.
8767 Return zero if there is no suitable set-flag instruction
8768 available on this machine.
8770 Once expand_expr has been called on the arguments of the comparison,
8771 we are committed to doing the store flag, since it is not safe to
8772 re-evaluate the expression. We emit the store-flag insn by calling
8773 emit_store_flag, but only expand the arguments if we have a reason
8774 to believe that emit_store_flag will be successful. If we think that
8775 it will, but it isn't, we have to simulate the store-flag with a
8776 set/jump/set sequence. */
8778 static rtx
8779 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
8781 enum rtx_code code;
8782 tree arg0, arg1, type;
8783 tree tem;
8784 enum machine_mode operand_mode;
8785 int invert = 0;
8786 int unsignedp;
8787 rtx op0, op1;
8788 enum insn_code icode;
8789 rtx subtarget = target;
8790 rtx result, label;
8792 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8793 result at the end. We can't simply invert the test since it would
8794 have already been inverted if it were valid. This case occurs for
8795 some floating-point comparisons. */
8797 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8798 invert = 1, exp = TREE_OPERAND (exp, 0);
8800 arg0 = TREE_OPERAND (exp, 0);
8801 arg1 = TREE_OPERAND (exp, 1);
8803 /* Don't crash if the comparison was erroneous. */
8804 if (arg0 == error_mark_node || arg1 == error_mark_node)
8805 return const0_rtx;
8807 type = TREE_TYPE (arg0);
8808 operand_mode = TYPE_MODE (type);
8809 unsignedp = TYPE_UNSIGNED (type);
8811 /* We won't bother with BLKmode store-flag operations because it would mean
8812 passing a lot of information to emit_store_flag. */
8813 if (operand_mode == BLKmode)
8814 return 0;
8816 /* We won't bother with store-flag operations involving function pointers
8817 when function pointers must be canonicalized before comparisons. */
8818 #ifdef HAVE_canonicalize_funcptr_for_compare
8819 if (HAVE_canonicalize_funcptr_for_compare
8820 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8821 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8822 == FUNCTION_TYPE))
8823 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8824 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8825 == FUNCTION_TYPE))))
8826 return 0;
8827 #endif
8829 STRIP_NOPS (arg0);
8830 STRIP_NOPS (arg1);
8832 /* Get the rtx comparison code to use. We know that EXP is a comparison
8833 operation of some type. Some comparisons against 1 and -1 can be
8834 converted to comparisons with zero. Do so here so that the tests
8835 below will be aware that we have a comparison with zero. These
8836 tests will not catch constants in the first operand, but constants
8837 are rarely passed as the first operand. */
8839 switch (TREE_CODE (exp))
8841 case EQ_EXPR:
8842 code = EQ;
8843 break;
8844 case NE_EXPR:
8845 code = NE;
8846 break;
8847 case LT_EXPR:
8848 if (integer_onep (arg1))
8849 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8850 else
8851 code = unsignedp ? LTU : LT;
8852 break;
8853 case LE_EXPR:
8854 if (! unsignedp && integer_all_onesp (arg1))
8855 arg1 = integer_zero_node, code = LT;
8856 else
8857 code = unsignedp ? LEU : LE;
8858 break;
8859 case GT_EXPR:
8860 if (! unsignedp && integer_all_onesp (arg1))
8861 arg1 = integer_zero_node, code = GE;
8862 else
8863 code = unsignedp ? GTU : GT;
8864 break;
8865 case GE_EXPR:
8866 if (integer_onep (arg1))
8867 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8868 else
8869 code = unsignedp ? GEU : GE;
8870 break;
8872 case UNORDERED_EXPR:
8873 code = UNORDERED;
8874 break;
8875 case ORDERED_EXPR:
8876 code = ORDERED;
8877 break;
8878 case UNLT_EXPR:
8879 code = UNLT;
8880 break;
8881 case UNLE_EXPR:
8882 code = UNLE;
8883 break;
8884 case UNGT_EXPR:
8885 code = UNGT;
8886 break;
8887 case UNGE_EXPR:
8888 code = UNGE;
8889 break;
8890 case UNEQ_EXPR:
8891 code = UNEQ;
8892 break;
8893 case LTGT_EXPR:
8894 code = LTGT;
8895 break;
8897 default:
8898 gcc_unreachable ();
8901 /* Put a constant second. */
8902 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8904 tem = arg0; arg0 = arg1; arg1 = tem;
8905 code = swap_condition (code);
8908 /* If this is an equality or inequality test of a single bit, we can
8909 do this by shifting the bit being tested to the low-order bit and
8910 masking the result with the constant 1. If the condition was EQ,
8911 we xor it with 1. This does not require an scc insn and is faster
8912 than an scc insn even if we have it.
8914 The code to make this transformation was moved into fold_single_bit_test,
8915 so we just call into the folder and expand its result. */
8917 if ((code == NE || code == EQ)
8918 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8919 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8921 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
8922 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
8923 arg0, arg1, type),
8924 target, VOIDmode, EXPAND_NORMAL);
8927 /* Now see if we are likely to be able to do this. Return if not. */
8928 if (! can_compare_p (code, operand_mode, ccp_store_flag))
8929 return 0;
8931 icode = setcc_gen_code[(int) code];
8932 if (icode == CODE_FOR_nothing
8933 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
8935 /* We can only do this if it is one of the special cases that
8936 can be handled without an scc insn. */
8937 if ((code == LT && integer_zerop (arg1))
8938 || (! only_cheap && code == GE && integer_zerop (arg1)))
8940 else if (! only_cheap && (code == NE || code == EQ)
8941 && TREE_CODE (type) != REAL_TYPE
8942 && ((abs_optab->handlers[(int) operand_mode].insn_code
8943 != CODE_FOR_nothing)
8944 || (ffs_optab->handlers[(int) operand_mode].insn_code
8945 != CODE_FOR_nothing)))
8947 else
8948 return 0;
8951 if (! get_subtarget (target)
8952 || GET_MODE (subtarget) != operand_mode)
8953 subtarget = 0;
8955 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
8957 if (target == 0)
8958 target = gen_reg_rtx (mode);
8960 result = emit_store_flag (target, code, op0, op1,
8961 operand_mode, unsignedp, 1);
8963 if (result)
8965 if (invert)
8966 result = expand_binop (mode, xor_optab, result, const1_rtx,
8967 result, 0, OPTAB_LIB_WIDEN);
8968 return result;
8971 /* If this failed, we have to do this with set/compare/jump/set code. */
8972 if (!REG_P (target)
8973 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8974 target = gen_reg_rtx (GET_MODE (target));
8976 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8977 result = compare_from_rtx (op0, op1, code, unsignedp,
8978 operand_mode, NULL_RTX);
8979 if (GET_CODE (result) == CONST_INT)
8980 return (((result == const0_rtx && ! invert)
8981 || (result != const0_rtx && invert))
8982 ? const0_rtx : const1_rtx);
8984 /* The code of RESULT may not match CODE if compare_from_rtx
8985 decided to swap its operands and reverse the original code.
8987 We know that compare_from_rtx returns either a CONST_INT or
8988 a new comparison code, so it is safe to just extract the
8989 code from RESULT. */
8990 code = GET_CODE (result);
8992 label = gen_label_rtx ();
8993 gcc_assert (bcc_gen_fctn[(int) code]);
8995 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8996 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8997 emit_label (label);
8999 return target;
9003 /* Stubs in case we haven't got a casesi insn. */
9004 #ifndef HAVE_casesi
9005 # define HAVE_casesi 0
9006 # define gen_casesi(a, b, c, d, e) (0)
9007 # define CODE_FOR_casesi CODE_FOR_nothing
9008 #endif
9010 /* If the machine does not have a case insn that compares the bounds,
9011 this means extra overhead for dispatch tables, which raises the
9012 threshold for using them. */
9013 #ifndef CASE_VALUES_THRESHOLD
9014 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9015 #endif /* CASE_VALUES_THRESHOLD */
9017 unsigned int
9018 case_values_threshold (void)
9020 return CASE_VALUES_THRESHOLD;
9023 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9024 0 otherwise (i.e. if there is no casesi instruction). */
9026 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9027 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9029 enum machine_mode index_mode = SImode;
9030 int index_bits = GET_MODE_BITSIZE (index_mode);
9031 rtx op1, op2, index;
9032 enum machine_mode op_mode;
9034 if (! HAVE_casesi)
9035 return 0;
9037 /* Convert the index to SImode. */
9038 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9040 enum machine_mode omode = TYPE_MODE (index_type);
9041 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9043 /* We must handle the endpoints in the original mode. */
9044 index_expr = build2 (MINUS_EXPR, index_type,
9045 index_expr, minval);
9046 minval = integer_zero_node;
9047 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9048 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9049 omode, 1, default_label);
9050 /* Now we can safely truncate. */
9051 index = convert_to_mode (index_mode, index, 0);
9053 else
9055 if (TYPE_MODE (index_type) != index_mode)
9057 index_expr = convert (lang_hooks.types.type_for_size
9058 (index_bits, 0), index_expr);
9059 index_type = TREE_TYPE (index_expr);
9062 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9065 do_pending_stack_adjust ();
9067 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9068 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9069 (index, op_mode))
9070 index = copy_to_mode_reg (op_mode, index);
9072 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9074 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9075 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9076 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9077 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9078 (op1, op_mode))
9079 op1 = copy_to_mode_reg (op_mode, op1);
9081 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9083 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9084 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9085 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9086 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9087 (op2, op_mode))
9088 op2 = copy_to_mode_reg (op_mode, op2);
9090 emit_jump_insn (gen_casesi (index, op1, op2,
9091 table_label, default_label));
9092 return 1;
9095 /* Attempt to generate a tablejump instruction; same concept. */
9096 #ifndef HAVE_tablejump
9097 #define HAVE_tablejump 0
9098 #define gen_tablejump(x, y) (0)
9099 #endif
9101 /* Subroutine of the next function.
9103 INDEX is the value being switched on, with the lowest value
9104 in the table already subtracted.
9105 MODE is its expected mode (needed if INDEX is constant).
9106 RANGE is the length of the jump table.
9107 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9109 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9110 index value is out of range. */
9112 static void
9113 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9114 rtx default_label)
9116 rtx temp, vector;
9118 if (INTVAL (range) > cfun->max_jumptable_ents)
9119 cfun->max_jumptable_ents = INTVAL (range);
9121 /* Do an unsigned comparison (in the proper mode) between the index
9122 expression and the value which represents the length of the range.
9123 Since we just finished subtracting the lower bound of the range
9124 from the index expression, this comparison allows us to simultaneously
9125 check that the original index expression value is both greater than
9126 or equal to the minimum value of the range and less than or equal to
9127 the maximum value of the range. */
9129 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9130 default_label);
9132 /* If index is in range, it must fit in Pmode.
9133 Convert to Pmode so we can index with it. */
9134 if (mode != Pmode)
9135 index = convert_to_mode (Pmode, index, 1);
9137 /* Don't let a MEM slip through, because then INDEX that comes
9138 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9139 and break_out_memory_refs will go to work on it and mess it up. */
9140 #ifdef PIC_CASE_VECTOR_ADDRESS
9141 if (flag_pic && !REG_P (index))
9142 index = copy_to_mode_reg (Pmode, index);
9143 #endif
9145 /* If flag_force_addr were to affect this address
9146 it could interfere with the tricky assumptions made
9147 about addresses that contain label-refs,
9148 which may be valid only very near the tablejump itself. */
9149 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9150 GET_MODE_SIZE, because this indicates how large insns are. The other
9151 uses should all be Pmode, because they are addresses. This code
9152 could fail if addresses and insns are not the same size. */
9153 index = gen_rtx_PLUS (Pmode,
9154 gen_rtx_MULT (Pmode, index,
9155 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9156 gen_rtx_LABEL_REF (Pmode, table_label));
9157 #ifdef PIC_CASE_VECTOR_ADDRESS
9158 if (flag_pic)
9159 index = PIC_CASE_VECTOR_ADDRESS (index);
9160 else
9161 #endif
9162 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9163 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9164 vector = gen_const_mem (CASE_VECTOR_MODE, index);
9165 convert_move (temp, vector, 0);
9167 emit_jump_insn (gen_tablejump (temp, table_label));
9169 /* If we are generating PIC code or if the table is PC-relative, the
9170 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9171 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9172 emit_barrier ();
9176 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9177 rtx table_label, rtx default_label)
9179 rtx index;
9181 if (! HAVE_tablejump)
9182 return 0;
9184 index_expr = fold_build2 (MINUS_EXPR, index_type,
9185 convert (index_type, index_expr),
9186 convert (index_type, minval));
9187 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9188 do_pending_stack_adjust ();
9190 do_tablejump (index, TYPE_MODE (index_type),
9191 convert_modes (TYPE_MODE (index_type),
9192 TYPE_MODE (TREE_TYPE (range)),
9193 expand_expr (range, NULL_RTX,
9194 VOIDmode, 0),
9195 TYPE_UNSIGNED (TREE_TYPE (range))),
9196 table_label, default_label);
9197 return 1;
9200 /* Nonzero if the mode is a valid vector mode for this architecture.
9201 This returns nonzero even if there is no hardware support for the
9202 vector mode, but we can emulate with narrower modes. */
9205 vector_mode_valid_p (enum machine_mode mode)
9207 enum mode_class class = GET_MODE_CLASS (mode);
9208 enum machine_mode innermode;
9210 /* Doh! What's going on? */
9211 if (class != MODE_VECTOR_INT
9212 && class != MODE_VECTOR_FLOAT)
9213 return 0;
9215 /* Hardware support. Woo hoo! */
9216 if (targetm.vector_mode_supported_p (mode))
9217 return 1;
9219 innermode = GET_MODE_INNER (mode);
9221 /* We should probably return 1 if requesting V4DI and we have no DI,
9222 but we have V2DI, but this is probably very unlikely. */
9224 /* If we have support for the inner mode, we can safely emulate it.
9225 We may not have V2DI, but me can emulate with a pair of DIs. */
9226 return targetm.scalar_mode_supported_p (innermode);
9229 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9230 static rtx
9231 const_vector_from_tree (tree exp)
9233 rtvec v;
9234 int units, i;
9235 tree link, elt;
9236 enum machine_mode inner, mode;
9238 mode = TYPE_MODE (TREE_TYPE (exp));
9240 if (initializer_zerop (exp))
9241 return CONST0_RTX (mode);
9243 units = GET_MODE_NUNITS (mode);
9244 inner = GET_MODE_INNER (mode);
9246 v = rtvec_alloc (units);
9248 link = TREE_VECTOR_CST_ELTS (exp);
9249 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9251 elt = TREE_VALUE (link);
9253 if (TREE_CODE (elt) == REAL_CST)
9254 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9255 inner);
9256 else
9257 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9258 TREE_INT_CST_HIGH (elt),
9259 inner);
9262 /* Initialize remaining elements to 0. */
9263 for (; i < units; ++i)
9264 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9266 return gen_rtx_CONST_VECTOR (mode, v);
9268 #include "gt-expr.h"