2005-12-23 Paolo Bonzini <bonzini@gnu.org>
[official-gcc.git] / gcc / expr.c
blob322db4ff27380ab39c0d14fb2961016769a019f9
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
53 #include "target.h"
54 #include "timevar.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
62 #ifdef PUSH_ROUNDING
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
67 #endif
68 #endif
70 #endif
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
75 #else
76 #define STACK_PUSH_CODE PRE_INC
77 #endif
78 #endif
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
87 int cse_not_expected;
89 /* This structure is used by move_by_pieces to describe the move to
90 be performed. */
91 struct move_by_pieces
93 rtx to;
94 rtx to_addr;
95 int autinc_to;
96 int explicit_inc_to;
97 rtx from;
98 rtx from_addr;
99 int autinc_from;
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
103 int reverse;
106 /* This structure is used by store_by_pieces to describe the clear to
107 be performed. */
109 struct store_by_pieces
111 rtx to;
112 rtx to_addr;
113 int autinc_to;
114 int explicit_inc_to;
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
118 void *constfundata;
119 int reverse;
122 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
123 unsigned int,
124 unsigned int);
125 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126 struct move_by_pieces *);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
129 static rtx emit_block_move_via_libcall (rtx, rtx, rtx, bool);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static rtx clear_storage_via_libcall (rtx, rtx, bool);
138 static tree clear_storage_libcall_fn (int);
139 static rtx compress_float_constant (rtx, rtx);
140 static rtx get_subtarget (rtx);
141 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, int);
144 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
146 tree, tree, int);
148 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
150 static int is_aligning_offset (tree, tree);
151 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
152 enum expand_modifier);
153 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
154 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
155 #ifdef PUSH_ROUNDING
156 static void emit_single_push_insn (enum machine_mode, rtx, tree);
157 #endif
158 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
159 static rtx const_vector_from_tree (tree);
160 static void write_complex_part (rtx, rtx, bool);
162 /* Record for each mode whether we can move a register directly to or
163 from an object of that mode in memory. If we can't, we won't try
164 to use that mode directly when accessing a field of that mode. */
166 static char direct_load[NUM_MACHINE_MODES];
167 static char direct_store[NUM_MACHINE_MODES];
169 /* Record for each mode whether we can float-extend from memory. */
171 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
173 /* This macro is used to determine whether move_by_pieces should be called
174 to perform a structure copy. */
175 #ifndef MOVE_BY_PIECES_P
176 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
177 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
178 < (unsigned int) MOVE_RATIO)
179 #endif
181 /* This macro is used to determine whether clear_by_pieces should be
182 called to clear storage. */
183 #ifndef CLEAR_BY_PIECES_P
184 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
185 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
186 < (unsigned int) CLEAR_RATIO)
187 #endif
189 /* This macro is used to determine whether store_by_pieces should be
190 called to "memset" storage with byte values other than zero, or
191 to "memcpy" storage when the source is a constant string. */
192 #ifndef STORE_BY_PIECES_P
193 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
194 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
195 < (unsigned int) MOVE_RATIO)
196 #endif
198 /* This array records the insn_code of insns to perform block moves. */
199 enum insn_code movmem_optab[NUM_MACHINE_MODES];
201 /* This array records the insn_code of insns to perform block sets. */
202 enum insn_code setmem_optab[NUM_MACHINE_MODES];
204 /* These arrays record the insn_code of three different kinds of insns
205 to perform block compares. */
206 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
207 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
208 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
210 /* Synchronization primitives. */
211 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
212 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
213 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
214 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
215 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
216 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
217 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
218 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
230 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
231 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
232 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
234 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
236 #ifndef SLOW_UNALIGNED_ACCESS
237 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
238 #endif
240 /* This is run once per compilation to set up which modes can be used
241 directly in memory and to initialize the block move optab. */
243 void
244 init_expr_once (void)
246 rtx insn, pat;
247 enum machine_mode mode;
248 int num_clobbers;
249 rtx mem, mem1;
250 rtx reg;
252 /* Try indexing by frame ptr and try by stack ptr.
253 It is known that on the Convex the stack ptr isn't a valid index.
254 With luck, one or the other is valid on any machine. */
255 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
256 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
258 /* A scratch register we can modify in-place below to avoid
259 useless RTL allocations. */
260 reg = gen_rtx_REG (VOIDmode, -1);
262 insn = rtx_alloc (INSN);
263 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
264 PATTERN (insn) = pat;
266 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
267 mode = (enum machine_mode) ((int) mode + 1))
269 int regno;
271 direct_load[(int) mode] = direct_store[(int) mode] = 0;
272 PUT_MODE (mem, mode);
273 PUT_MODE (mem1, mode);
274 PUT_MODE (reg, mode);
276 /* See if there is some register that can be used in this mode and
277 directly loaded or stored from memory. */
279 if (mode != VOIDmode && mode != BLKmode)
280 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
281 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
282 regno++)
284 if (! HARD_REGNO_MODE_OK (regno, mode))
285 continue;
287 REGNO (reg) = regno;
289 SET_SRC (pat) = mem;
290 SET_DEST (pat) = reg;
291 if (recog (pat, insn, &num_clobbers) >= 0)
292 direct_load[(int) mode] = 1;
294 SET_SRC (pat) = mem1;
295 SET_DEST (pat) = reg;
296 if (recog (pat, insn, &num_clobbers) >= 0)
297 direct_load[(int) mode] = 1;
299 SET_SRC (pat) = reg;
300 SET_DEST (pat) = mem;
301 if (recog (pat, insn, &num_clobbers) >= 0)
302 direct_store[(int) mode] = 1;
304 SET_SRC (pat) = reg;
305 SET_DEST (pat) = mem1;
306 if (recog (pat, insn, &num_clobbers) >= 0)
307 direct_store[(int) mode] = 1;
311 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
313 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
314 mode = GET_MODE_WIDER_MODE (mode))
316 enum machine_mode srcmode;
317 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
318 srcmode = GET_MODE_WIDER_MODE (srcmode))
320 enum insn_code ic;
322 ic = can_extend_p (mode, srcmode, 0);
323 if (ic == CODE_FOR_nothing)
324 continue;
326 PUT_MODE (mem, srcmode);
328 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
329 float_extend_from_mem[mode][srcmode] = true;
334 /* This is run at the start of compiling a function. */
336 void
337 init_expr (void)
339 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
342 /* Copy data from FROM to TO, where the machine modes are not the same.
343 Both modes may be integer, or both may be floating.
344 UNSIGNEDP should be nonzero if FROM is an unsigned type.
345 This causes zero-extension instead of sign-extension. */
347 void
348 convert_move (rtx to, rtx from, int unsignedp)
350 enum machine_mode to_mode = GET_MODE (to);
351 enum machine_mode from_mode = GET_MODE (from);
352 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
353 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
354 enum insn_code code;
355 rtx libcall;
357 /* rtx code for making an equivalent value. */
358 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
359 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
362 gcc_assert (to_real == from_real);
364 /* If the source and destination are already the same, then there's
365 nothing to do. */
366 if (to == from)
367 return;
369 /* If FROM is a SUBREG that indicates that we have already done at least
370 the required extension, strip it. We don't handle such SUBREGs as
371 TO here. */
373 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
374 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
375 >= GET_MODE_SIZE (to_mode))
376 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
377 from = gen_lowpart (to_mode, from), from_mode = to_mode;
379 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
381 if (to_mode == from_mode
382 || (from_mode == VOIDmode && CONSTANT_P (from)))
384 emit_move_insn (to, from);
385 return;
388 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
390 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
392 if (VECTOR_MODE_P (to_mode))
393 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
394 else
395 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
397 emit_move_insn (to, from);
398 return;
401 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
403 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
404 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
405 return;
408 if (to_real)
410 rtx value, insns;
411 convert_optab tab;
413 gcc_assert ((GET_MODE_PRECISION (from_mode)
414 != GET_MODE_PRECISION (to_mode))
415 || (DECIMAL_FLOAT_MODE_P (from_mode)
416 != DECIMAL_FLOAT_MODE_P (to_mode)));
418 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
419 /* Conversion between decimal float and binary float, same size. */
420 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
421 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
422 tab = sext_optab;
423 else
424 tab = trunc_optab;
426 /* Try converting directly if the insn is supported. */
428 code = tab->handlers[to_mode][from_mode].insn_code;
429 if (code != CODE_FOR_nothing)
431 emit_unop_insn (code, to, from,
432 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
433 return;
436 /* Otherwise use a libcall. */
437 libcall = tab->handlers[to_mode][from_mode].libfunc;
439 /* Is this conversion implemented yet? */
440 gcc_assert (libcall);
442 start_sequence ();
443 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
444 1, from, from_mode);
445 insns = get_insns ();
446 end_sequence ();
447 emit_libcall_block (insns, to, value,
448 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
449 from)
450 : gen_rtx_FLOAT_EXTEND (to_mode, from));
451 return;
454 /* Handle pointer conversion. */ /* SPEE 900220. */
455 /* Targets are expected to provide conversion insns between PxImode and
456 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
457 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
459 enum machine_mode full_mode
460 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
462 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
463 != CODE_FOR_nothing);
465 if (full_mode != from_mode)
466 from = convert_to_mode (full_mode, from, unsignedp);
467 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
468 to, from, UNKNOWN);
469 return;
471 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
473 rtx new_from;
474 enum machine_mode full_mode
475 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
477 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
478 != CODE_FOR_nothing);
480 if (to_mode == full_mode)
482 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
483 to, from, UNKNOWN);
484 return;
487 new_from = gen_reg_rtx (full_mode);
488 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
489 new_from, from, UNKNOWN);
491 /* else proceed to integer conversions below. */
492 from_mode = full_mode;
493 from = new_from;
496 /* Now both modes are integers. */
498 /* Handle expanding beyond a word. */
499 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
500 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
502 rtx insns;
503 rtx lowpart;
504 rtx fill_value;
505 rtx lowfrom;
506 int i;
507 enum machine_mode lowpart_mode;
508 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
510 /* Try converting directly if the insn is supported. */
511 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
512 != CODE_FOR_nothing)
514 /* If FROM is a SUBREG, put it into a register. Do this
515 so that we always generate the same set of insns for
516 better cse'ing; if an intermediate assignment occurred,
517 we won't be doing the operation directly on the SUBREG. */
518 if (optimize > 0 && GET_CODE (from) == SUBREG)
519 from = force_reg (from_mode, from);
520 emit_unop_insn (code, to, from, equiv_code);
521 return;
523 /* Next, try converting via full word. */
524 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
525 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
526 != CODE_FOR_nothing))
528 if (REG_P (to))
530 if (reg_overlap_mentioned_p (to, from))
531 from = force_reg (from_mode, from);
532 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
534 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
535 emit_unop_insn (code, to,
536 gen_lowpart (word_mode, to), equiv_code);
537 return;
540 /* No special multiword conversion insn; do it by hand. */
541 start_sequence ();
543 /* Since we will turn this into a no conflict block, we must ensure
544 that the source does not overlap the target. */
546 if (reg_overlap_mentioned_p (to, from))
547 from = force_reg (from_mode, from);
549 /* Get a copy of FROM widened to a word, if necessary. */
550 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
551 lowpart_mode = word_mode;
552 else
553 lowpart_mode = from_mode;
555 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
557 lowpart = gen_lowpart (lowpart_mode, to);
558 emit_move_insn (lowpart, lowfrom);
560 /* Compute the value to put in each remaining word. */
561 if (unsignedp)
562 fill_value = const0_rtx;
563 else
565 #ifdef HAVE_slt
566 if (HAVE_slt
567 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
568 && STORE_FLAG_VALUE == -1)
570 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
571 lowpart_mode, 0);
572 fill_value = gen_reg_rtx (word_mode);
573 emit_insn (gen_slt (fill_value));
575 else
576 #endif
578 fill_value
579 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
580 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
581 NULL_RTX, 0);
582 fill_value = convert_to_mode (word_mode, fill_value, 1);
586 /* Fill the remaining words. */
587 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
589 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
590 rtx subword = operand_subword (to, index, 1, to_mode);
592 gcc_assert (subword);
594 if (fill_value != subword)
595 emit_move_insn (subword, fill_value);
598 insns = get_insns ();
599 end_sequence ();
601 emit_no_conflict_block (insns, to, from, NULL_RTX,
602 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
603 return;
606 /* Truncating multi-word to a word or less. */
607 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
608 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
610 if (!((MEM_P (from)
611 && ! MEM_VOLATILE_P (from)
612 && direct_load[(int) to_mode]
613 && ! mode_dependent_address_p (XEXP (from, 0)))
614 || REG_P (from)
615 || GET_CODE (from) == SUBREG))
616 from = force_reg (from_mode, from);
617 convert_move (to, gen_lowpart (word_mode, from), 0);
618 return;
621 /* Now follow all the conversions between integers
622 no more than a word long. */
624 /* For truncation, usually we can just refer to FROM in a narrower mode. */
625 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
626 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
627 GET_MODE_BITSIZE (from_mode)))
629 if (!((MEM_P (from)
630 && ! MEM_VOLATILE_P (from)
631 && direct_load[(int) to_mode]
632 && ! mode_dependent_address_p (XEXP (from, 0)))
633 || REG_P (from)
634 || GET_CODE (from) == SUBREG))
635 from = force_reg (from_mode, from);
636 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
637 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
638 from = copy_to_reg (from);
639 emit_move_insn (to, gen_lowpart (to_mode, from));
640 return;
643 /* Handle extension. */
644 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
646 /* Convert directly if that works. */
647 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
648 != CODE_FOR_nothing)
650 emit_unop_insn (code, to, from, equiv_code);
651 return;
653 else
655 enum machine_mode intermediate;
656 rtx tmp;
657 tree shift_amount;
659 /* Search for a mode to convert via. */
660 for (intermediate = from_mode; intermediate != VOIDmode;
661 intermediate = GET_MODE_WIDER_MODE (intermediate))
662 if (((can_extend_p (to_mode, intermediate, unsignedp)
663 != CODE_FOR_nothing)
664 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
665 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
666 GET_MODE_BITSIZE (intermediate))))
667 && (can_extend_p (intermediate, from_mode, unsignedp)
668 != CODE_FOR_nothing))
670 convert_move (to, convert_to_mode (intermediate, from,
671 unsignedp), unsignedp);
672 return;
675 /* No suitable intermediate mode.
676 Generate what we need with shifts. */
677 shift_amount = build_int_cst (NULL_TREE,
678 GET_MODE_BITSIZE (to_mode)
679 - GET_MODE_BITSIZE (from_mode));
680 from = gen_lowpart (to_mode, force_reg (from_mode, from));
681 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
682 to, unsignedp);
683 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
684 to, unsignedp);
685 if (tmp != to)
686 emit_move_insn (to, tmp);
687 return;
691 /* Support special truncate insns for certain modes. */
692 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
694 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
695 to, from, UNKNOWN);
696 return;
699 /* Handle truncation of volatile memrefs, and so on;
700 the things that couldn't be truncated directly,
701 and for which there was no special instruction.
703 ??? Code above formerly short-circuited this, for most integer
704 mode pairs, with a force_reg in from_mode followed by a recursive
705 call to this routine. Appears always to have been wrong. */
706 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
708 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
709 emit_move_insn (to, temp);
710 return;
713 /* Mode combination is not recognized. */
714 gcc_unreachable ();
717 /* Return an rtx for a value that would result
718 from converting X to mode MODE.
719 Both X and MODE may be floating, or both integer.
720 UNSIGNEDP is nonzero if X is an unsigned value.
721 This can be done by referring to a part of X in place
722 or by copying to a new temporary with conversion. */
725 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
727 return convert_modes (mode, VOIDmode, x, unsignedp);
730 /* Return an rtx for a value that would result
731 from converting X from mode OLDMODE to mode MODE.
732 Both modes may be floating, or both integer.
733 UNSIGNEDP is nonzero if X is an unsigned value.
735 This can be done by referring to a part of X in place
736 or by copying to a new temporary with conversion.
738 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
741 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
743 rtx temp;
745 /* If FROM is a SUBREG that indicates that we have already done at least
746 the required extension, strip it. */
748 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
749 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
750 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
751 x = gen_lowpart (mode, x);
753 if (GET_MODE (x) != VOIDmode)
754 oldmode = GET_MODE (x);
756 if (mode == oldmode)
757 return x;
759 /* There is one case that we must handle specially: If we are converting
760 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
761 we are to interpret the constant as unsigned, gen_lowpart will do
762 the wrong if the constant appears negative. What we want to do is
763 make the high-order word of the constant zero, not all ones. */
765 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
766 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
767 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
769 HOST_WIDE_INT val = INTVAL (x);
771 if (oldmode != VOIDmode
772 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
774 int width = GET_MODE_BITSIZE (oldmode);
776 /* We need to zero extend VAL. */
777 val &= ((HOST_WIDE_INT) 1 << width) - 1;
780 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
783 /* We can do this with a gen_lowpart if both desired and current modes
784 are integer, and this is either a constant integer, a register, or a
785 non-volatile MEM. Except for the constant case where MODE is no
786 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
788 if ((GET_CODE (x) == CONST_INT
789 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
790 || (GET_MODE_CLASS (mode) == MODE_INT
791 && GET_MODE_CLASS (oldmode) == MODE_INT
792 && (GET_CODE (x) == CONST_DOUBLE
793 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
794 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
795 && direct_load[(int) mode])
796 || (REG_P (x)
797 && (! HARD_REGISTER_P (x)
798 || HARD_REGNO_MODE_OK (REGNO (x), mode))
799 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
800 GET_MODE_BITSIZE (GET_MODE (x)))))))))
802 /* ?? If we don't know OLDMODE, we have to assume here that
803 X does not need sign- or zero-extension. This may not be
804 the case, but it's the best we can do. */
805 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
806 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
808 HOST_WIDE_INT val = INTVAL (x);
809 int width = GET_MODE_BITSIZE (oldmode);
811 /* We must sign or zero-extend in this case. Start by
812 zero-extending, then sign extend if we need to. */
813 val &= ((HOST_WIDE_INT) 1 << width) - 1;
814 if (! unsignedp
815 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
816 val |= (HOST_WIDE_INT) (-1) << width;
818 return gen_int_mode (val, mode);
821 return gen_lowpart (mode, x);
824 /* Converting from integer constant into mode is always equivalent to an
825 subreg operation. */
826 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
828 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
829 return simplify_gen_subreg (mode, x, oldmode, 0);
832 temp = gen_reg_rtx (mode);
833 convert_move (temp, x, unsignedp);
834 return temp;
837 /* STORE_MAX_PIECES is the number of bytes at a time that we can
838 store efficiently. Due to internal GCC limitations, this is
839 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
840 for an immediate constant. */
842 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
844 /* Determine whether the LEN bytes can be moved by using several move
845 instructions. Return nonzero if a call to move_by_pieces should
846 succeed. */
849 can_move_by_pieces (unsigned HOST_WIDE_INT len,
850 unsigned int align ATTRIBUTE_UNUSED)
852 return MOVE_BY_PIECES_P (len, align);
855 /* Generate several move instructions to copy LEN bytes from block FROM to
856 block TO. (These are MEM rtx's with BLKmode).
858 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
859 used to push FROM to the stack.
861 ALIGN is maximum stack alignment we can assume.
863 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
864 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
865 stpcpy. */
868 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
869 unsigned int align, int endp)
871 struct move_by_pieces data;
872 rtx to_addr, from_addr = XEXP (from, 0);
873 unsigned int max_size = MOVE_MAX_PIECES + 1;
874 enum machine_mode mode = VOIDmode, tmode;
875 enum insn_code icode;
877 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
879 data.offset = 0;
880 data.from_addr = from_addr;
881 if (to)
883 to_addr = XEXP (to, 0);
884 data.to = to;
885 data.autinc_to
886 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
887 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
888 data.reverse
889 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
891 else
893 to_addr = NULL_RTX;
894 data.to = NULL_RTX;
895 data.autinc_to = 1;
896 #ifdef STACK_GROWS_DOWNWARD
897 data.reverse = 1;
898 #else
899 data.reverse = 0;
900 #endif
902 data.to_addr = to_addr;
903 data.from = from;
904 data.autinc_from
905 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
906 || GET_CODE (from_addr) == POST_INC
907 || GET_CODE (from_addr) == POST_DEC);
909 data.explicit_inc_from = 0;
910 data.explicit_inc_to = 0;
911 if (data.reverse) data.offset = len;
912 data.len = len;
914 /* If copying requires more than two move insns,
915 copy addresses to registers (to make displacements shorter)
916 and use post-increment if available. */
917 if (!(data.autinc_from && data.autinc_to)
918 && move_by_pieces_ninsns (len, align, max_size) > 2)
920 /* Find the mode of the largest move... */
921 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
922 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
923 if (GET_MODE_SIZE (tmode) < max_size)
924 mode = tmode;
926 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
928 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
929 data.autinc_from = 1;
930 data.explicit_inc_from = -1;
932 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
934 data.from_addr = copy_addr_to_reg (from_addr);
935 data.autinc_from = 1;
936 data.explicit_inc_from = 1;
938 if (!data.autinc_from && CONSTANT_P (from_addr))
939 data.from_addr = copy_addr_to_reg (from_addr);
940 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
942 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
943 data.autinc_to = 1;
944 data.explicit_inc_to = -1;
946 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
948 data.to_addr = copy_addr_to_reg (to_addr);
949 data.autinc_to = 1;
950 data.explicit_inc_to = 1;
952 if (!data.autinc_to && CONSTANT_P (to_addr))
953 data.to_addr = copy_addr_to_reg (to_addr);
956 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
957 if (align >= GET_MODE_ALIGNMENT (tmode))
958 align = GET_MODE_ALIGNMENT (tmode);
959 else
961 enum machine_mode xmode;
963 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
964 tmode != VOIDmode;
965 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
966 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
967 || SLOW_UNALIGNED_ACCESS (tmode, align))
968 break;
970 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
973 /* First move what we can in the largest integer mode, then go to
974 successively smaller modes. */
976 while (max_size > 1)
978 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
979 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
980 if (GET_MODE_SIZE (tmode) < max_size)
981 mode = tmode;
983 if (mode == VOIDmode)
984 break;
986 icode = mov_optab->handlers[(int) mode].insn_code;
987 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
988 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
990 max_size = GET_MODE_SIZE (mode);
993 /* The code above should have handled everything. */
994 gcc_assert (!data.len);
996 if (endp)
998 rtx to1;
1000 gcc_assert (!data.reverse);
1001 if (data.autinc_to)
1003 if (endp == 2)
1005 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1006 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1007 else
1008 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1009 -1));
1011 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1012 data.offset);
1014 else
1016 if (endp == 2)
1017 --data.offset;
1018 to1 = adjust_address (data.to, QImode, data.offset);
1020 return to1;
1022 else
1023 return data.to;
1026 /* Return number of insns required to move L bytes by pieces.
1027 ALIGN (in bits) is maximum alignment we can assume. */
1029 static unsigned HOST_WIDE_INT
1030 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1031 unsigned int max_size)
1033 unsigned HOST_WIDE_INT n_insns = 0;
1034 enum machine_mode tmode;
1036 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1037 if (align >= GET_MODE_ALIGNMENT (tmode))
1038 align = GET_MODE_ALIGNMENT (tmode);
1039 else
1041 enum machine_mode tmode, xmode;
1043 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1044 tmode != VOIDmode;
1045 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1046 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1047 || SLOW_UNALIGNED_ACCESS (tmode, align))
1048 break;
1050 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1053 while (max_size > 1)
1055 enum machine_mode mode = VOIDmode;
1056 enum insn_code icode;
1058 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1059 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1060 if (GET_MODE_SIZE (tmode) < max_size)
1061 mode = tmode;
1063 if (mode == VOIDmode)
1064 break;
1066 icode = mov_optab->handlers[(int) mode].insn_code;
1067 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1068 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1070 max_size = GET_MODE_SIZE (mode);
1073 gcc_assert (!l);
1074 return n_insns;
1077 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1078 with move instructions for mode MODE. GENFUN is the gen_... function
1079 to make a move insn for that mode. DATA has all the other info. */
1081 static void
1082 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1083 struct move_by_pieces *data)
1085 unsigned int size = GET_MODE_SIZE (mode);
1086 rtx to1 = NULL_RTX, from1;
1088 while (data->len >= size)
1090 if (data->reverse)
1091 data->offset -= size;
1093 if (data->to)
1095 if (data->autinc_to)
1096 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1097 data->offset);
1098 else
1099 to1 = adjust_address (data->to, mode, data->offset);
1102 if (data->autinc_from)
1103 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1104 data->offset);
1105 else
1106 from1 = adjust_address (data->from, mode, data->offset);
1108 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1109 emit_insn (gen_add2_insn (data->to_addr,
1110 GEN_INT (-(HOST_WIDE_INT)size)));
1111 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1112 emit_insn (gen_add2_insn (data->from_addr,
1113 GEN_INT (-(HOST_WIDE_INT)size)));
1115 if (data->to)
1116 emit_insn ((*genfun) (to1, from1));
1117 else
1119 #ifdef PUSH_ROUNDING
1120 emit_single_push_insn (mode, from1, NULL);
1121 #else
1122 gcc_unreachable ();
1123 #endif
1126 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1127 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1128 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1129 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1131 if (! data->reverse)
1132 data->offset += size;
1134 data->len -= size;
1138 /* Emit code to move a block Y to a block X. This may be done with
1139 string-move instructions, with multiple scalar move instructions,
1140 or with a library call.
1142 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1143 SIZE is an rtx that says how long they are.
1144 ALIGN is the maximum alignment we can assume they have.
1145 METHOD describes what kind of copy this is, and what mechanisms may be used.
1147 Return the address of the new block, if memcpy is called and returns it,
1148 0 otherwise. */
1151 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1153 bool may_use_call;
1154 rtx retval = 0;
1155 unsigned int align;
1157 switch (method)
1159 case BLOCK_OP_NORMAL:
1160 case BLOCK_OP_TAILCALL:
1161 may_use_call = true;
1162 break;
1164 case BLOCK_OP_CALL_PARM:
1165 may_use_call = block_move_libcall_safe_for_call_parm ();
1167 /* Make inhibit_defer_pop nonzero around the library call
1168 to force it to pop the arguments right away. */
1169 NO_DEFER_POP;
1170 break;
1172 case BLOCK_OP_NO_LIBCALL:
1173 may_use_call = false;
1174 break;
1176 default:
1177 gcc_unreachable ();
1180 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1182 gcc_assert (MEM_P (x));
1183 gcc_assert (MEM_P (y));
1184 gcc_assert (size);
1186 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1187 block copy is more efficient for other large modes, e.g. DCmode. */
1188 x = adjust_address (x, BLKmode, 0);
1189 y = adjust_address (y, BLKmode, 0);
1191 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1192 can be incorrect is coming from __builtin_memcpy. */
1193 if (GET_CODE (size) == CONST_INT)
1195 if (INTVAL (size) == 0)
1196 return 0;
1198 x = shallow_copy_rtx (x);
1199 y = shallow_copy_rtx (y);
1200 set_mem_size (x, size);
1201 set_mem_size (y, size);
1204 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1205 move_by_pieces (x, y, INTVAL (size), align, 0);
1206 else if (emit_block_move_via_movmem (x, y, size, align))
1208 else if (may_use_call)
1209 retval = emit_block_move_via_libcall (x, y, size,
1210 method == BLOCK_OP_TAILCALL);
1211 else
1212 emit_block_move_via_loop (x, y, size, align);
1214 if (method == BLOCK_OP_CALL_PARM)
1215 OK_DEFER_POP;
1217 return retval;
1220 /* A subroutine of emit_block_move. Returns true if calling the
1221 block move libcall will not clobber any parameters which may have
1222 already been placed on the stack. */
1224 static bool
1225 block_move_libcall_safe_for_call_parm (void)
1227 /* If arguments are pushed on the stack, then they're safe. */
1228 if (PUSH_ARGS)
1229 return true;
1231 /* If registers go on the stack anyway, any argument is sure to clobber
1232 an outgoing argument. */
1233 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1235 tree fn = emit_block_move_libcall_fn (false);
1236 (void) fn;
1237 if (REG_PARM_STACK_SPACE (fn) != 0)
1238 return false;
1240 #endif
1242 /* If any argument goes in memory, then it might clobber an outgoing
1243 argument. */
1245 CUMULATIVE_ARGS args_so_far;
1246 tree fn, arg;
1248 fn = emit_block_move_libcall_fn (false);
1249 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1251 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1252 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1254 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1255 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1256 if (!tmp || !REG_P (tmp))
1257 return false;
1258 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1259 return false;
1260 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1263 return true;
1266 /* A subroutine of emit_block_move. Expand a movmem pattern;
1267 return true if successful. */
1269 static bool
1270 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1272 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1273 int save_volatile_ok = volatile_ok;
1274 enum machine_mode mode;
1276 /* Since this is a move insn, we don't care about volatility. */
1277 volatile_ok = 1;
1279 /* Try the most limited insn first, because there's no point
1280 including more than one in the machine description unless
1281 the more limited one has some advantage. */
1283 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1284 mode = GET_MODE_WIDER_MODE (mode))
1286 enum insn_code code = movmem_optab[(int) mode];
1287 insn_operand_predicate_fn pred;
1289 if (code != CODE_FOR_nothing
1290 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1291 here because if SIZE is less than the mode mask, as it is
1292 returned by the macro, it will definitely be less than the
1293 actual mode mask. */
1294 && ((GET_CODE (size) == CONST_INT
1295 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1296 <= (GET_MODE_MASK (mode) >> 1)))
1297 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1298 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1299 || (*pred) (x, BLKmode))
1300 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1301 || (*pred) (y, BLKmode))
1302 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1303 || (*pred) (opalign, VOIDmode)))
1305 rtx op2;
1306 rtx last = get_last_insn ();
1307 rtx pat;
1309 op2 = convert_to_mode (mode, size, 1);
1310 pred = insn_data[(int) code].operand[2].predicate;
1311 if (pred != 0 && ! (*pred) (op2, mode))
1312 op2 = copy_to_mode_reg (mode, op2);
1314 /* ??? When called via emit_block_move_for_call, it'd be
1315 nice if there were some way to inform the backend, so
1316 that it doesn't fail the expansion because it thinks
1317 emitting the libcall would be more efficient. */
1319 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1320 if (pat)
1322 emit_insn (pat);
1323 volatile_ok = save_volatile_ok;
1324 return true;
1326 else
1327 delete_insns_since (last);
1331 volatile_ok = save_volatile_ok;
1332 return false;
1335 /* A subroutine of emit_block_move. Expand a call to memcpy.
1336 Return the return value from memcpy, 0 otherwise. */
1338 static rtx
1339 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1341 rtx dst_addr, src_addr;
1342 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1343 enum machine_mode size_mode;
1344 rtx retval;
1346 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1347 pseudos. We can then place those new pseudos into a VAR_DECL and
1348 use them later. */
1350 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1351 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1353 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1354 src_addr = convert_memory_address (ptr_mode, src_addr);
1356 dst_tree = make_tree (ptr_type_node, dst_addr);
1357 src_tree = make_tree (ptr_type_node, src_addr);
1359 size_mode = TYPE_MODE (sizetype);
1361 size = convert_to_mode (size_mode, size, 1);
1362 size = copy_to_mode_reg (size_mode, size);
1364 /* It is incorrect to use the libcall calling conventions to call
1365 memcpy in this context. This could be a user call to memcpy and
1366 the user may wish to examine the return value from memcpy. For
1367 targets where libcalls and normal calls have different conventions
1368 for returning pointers, we could end up generating incorrect code. */
1370 size_tree = make_tree (sizetype, size);
1372 fn = emit_block_move_libcall_fn (true);
1373 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1374 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1375 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1377 /* Now we have to build up the CALL_EXPR itself. */
1378 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1379 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1380 call_expr, arg_list, NULL_TREE);
1381 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1383 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1385 return retval;
1388 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1389 for the function we use for block copies. The first time FOR_CALL
1390 is true, we call assemble_external. */
1392 static GTY(()) tree block_move_fn;
1394 void
1395 init_block_move_fn (const char *asmspec)
1397 if (!block_move_fn)
1399 tree args, fn;
1401 fn = get_identifier ("memcpy");
1402 args = build_function_type_list (ptr_type_node, ptr_type_node,
1403 const_ptr_type_node, sizetype,
1404 NULL_TREE);
1406 fn = build_decl (FUNCTION_DECL, fn, args);
1407 DECL_EXTERNAL (fn) = 1;
1408 TREE_PUBLIC (fn) = 1;
1409 DECL_ARTIFICIAL (fn) = 1;
1410 TREE_NOTHROW (fn) = 1;
1412 block_move_fn = fn;
1415 if (asmspec)
1416 set_user_assembler_name (block_move_fn, asmspec);
1419 static tree
1420 emit_block_move_libcall_fn (int for_call)
1422 static bool emitted_extern;
1424 if (!block_move_fn)
1425 init_block_move_fn (NULL);
1427 if (for_call && !emitted_extern)
1429 emitted_extern = true;
1430 make_decl_rtl (block_move_fn);
1431 assemble_external (block_move_fn);
1434 return block_move_fn;
1437 /* A subroutine of emit_block_move. Copy the data via an explicit
1438 loop. This is used only when libcalls are forbidden. */
1439 /* ??? It'd be nice to copy in hunks larger than QImode. */
1441 static void
1442 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1443 unsigned int align ATTRIBUTE_UNUSED)
1445 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1446 enum machine_mode iter_mode;
1448 iter_mode = GET_MODE (size);
1449 if (iter_mode == VOIDmode)
1450 iter_mode = word_mode;
1452 top_label = gen_label_rtx ();
1453 cmp_label = gen_label_rtx ();
1454 iter = gen_reg_rtx (iter_mode);
1456 emit_move_insn (iter, const0_rtx);
1458 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1459 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1460 do_pending_stack_adjust ();
1462 emit_jump (cmp_label);
1463 emit_label (top_label);
1465 tmp = convert_modes (Pmode, iter_mode, iter, true);
1466 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1467 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1468 x = change_address (x, QImode, x_addr);
1469 y = change_address (y, QImode, y_addr);
1471 emit_move_insn (x, y);
1473 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1474 true, OPTAB_LIB_WIDEN);
1475 if (tmp != iter)
1476 emit_move_insn (iter, tmp);
1478 emit_label (cmp_label);
1480 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1481 true, top_label);
1484 /* Copy all or part of a value X into registers starting at REGNO.
1485 The number of registers to be filled is NREGS. */
1487 void
1488 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1490 int i;
1491 #ifdef HAVE_load_multiple
1492 rtx pat;
1493 rtx last;
1494 #endif
1496 if (nregs == 0)
1497 return;
1499 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1500 x = validize_mem (force_const_mem (mode, x));
1502 /* See if the machine can do this with a load multiple insn. */
1503 #ifdef HAVE_load_multiple
1504 if (HAVE_load_multiple)
1506 last = get_last_insn ();
1507 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1508 GEN_INT (nregs));
1509 if (pat)
1511 emit_insn (pat);
1512 return;
1514 else
1515 delete_insns_since (last);
1517 #endif
1519 for (i = 0; i < nregs; i++)
1520 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1521 operand_subword_force (x, i, mode));
1524 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1525 The number of registers to be filled is NREGS. */
1527 void
1528 move_block_from_reg (int regno, rtx x, int nregs)
1530 int i;
1532 if (nregs == 0)
1533 return;
1535 /* See if the machine can do this with a store multiple insn. */
1536 #ifdef HAVE_store_multiple
1537 if (HAVE_store_multiple)
1539 rtx last = get_last_insn ();
1540 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1541 GEN_INT (nregs));
1542 if (pat)
1544 emit_insn (pat);
1545 return;
1547 else
1548 delete_insns_since (last);
1550 #endif
1552 for (i = 0; i < nregs; i++)
1554 rtx tem = operand_subword (x, i, 1, BLKmode);
1556 gcc_assert (tem);
1558 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1562 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1563 ORIG, where ORIG is a non-consecutive group of registers represented by
1564 a PARALLEL. The clone is identical to the original except in that the
1565 original set of registers is replaced by a new set of pseudo registers.
1566 The new set has the same modes as the original set. */
1569 gen_group_rtx (rtx orig)
1571 int i, length;
1572 rtx *tmps;
1574 gcc_assert (GET_CODE (orig) == PARALLEL);
1576 length = XVECLEN (orig, 0);
1577 tmps = alloca (sizeof (rtx) * length);
1579 /* Skip a NULL entry in first slot. */
1580 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1582 if (i)
1583 tmps[0] = 0;
1585 for (; i < length; i++)
1587 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1588 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1590 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1593 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1596 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1597 except that values are placed in TMPS[i], and must later be moved
1598 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1600 static void
1601 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1603 rtx src;
1604 int start, i;
1605 enum machine_mode m = GET_MODE (orig_src);
1607 gcc_assert (GET_CODE (dst) == PARALLEL);
1609 if (m != VOIDmode
1610 && !SCALAR_INT_MODE_P (m)
1611 && !MEM_P (orig_src)
1612 && GET_CODE (orig_src) != CONCAT)
1614 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1615 if (imode == BLKmode)
1616 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1617 else
1618 src = gen_reg_rtx (imode);
1619 if (imode != BLKmode)
1620 src = gen_lowpart (GET_MODE (orig_src), src);
1621 emit_move_insn (src, orig_src);
1622 /* ...and back again. */
1623 if (imode != BLKmode)
1624 src = gen_lowpart (imode, src);
1625 emit_group_load_1 (tmps, dst, src, type, ssize);
1626 return;
1629 /* Check for a NULL entry, used to indicate that the parameter goes
1630 both on the stack and in registers. */
1631 if (XEXP (XVECEXP (dst, 0, 0), 0))
1632 start = 0;
1633 else
1634 start = 1;
1636 /* Process the pieces. */
1637 for (i = start; i < XVECLEN (dst, 0); i++)
1639 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1640 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1641 unsigned int bytelen = GET_MODE_SIZE (mode);
1642 int shift = 0;
1644 /* Handle trailing fragments that run over the size of the struct. */
1645 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1647 /* Arrange to shift the fragment to where it belongs.
1648 extract_bit_field loads to the lsb of the reg. */
1649 if (
1650 #ifdef BLOCK_REG_PADDING
1651 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1652 == (BYTES_BIG_ENDIAN ? upward : downward)
1653 #else
1654 BYTES_BIG_ENDIAN
1655 #endif
1657 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1658 bytelen = ssize - bytepos;
1659 gcc_assert (bytelen > 0);
1662 /* If we won't be loading directly from memory, protect the real source
1663 from strange tricks we might play; but make sure that the source can
1664 be loaded directly into the destination. */
1665 src = orig_src;
1666 if (!MEM_P (orig_src)
1667 && (!CONSTANT_P (orig_src)
1668 || (GET_MODE (orig_src) != mode
1669 && GET_MODE (orig_src) != VOIDmode)))
1671 if (GET_MODE (orig_src) == VOIDmode)
1672 src = gen_reg_rtx (mode);
1673 else
1674 src = gen_reg_rtx (GET_MODE (orig_src));
1676 emit_move_insn (src, orig_src);
1679 /* Optimize the access just a bit. */
1680 if (MEM_P (src)
1681 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1682 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1683 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1684 && bytelen == GET_MODE_SIZE (mode))
1686 tmps[i] = gen_reg_rtx (mode);
1687 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1689 else if (COMPLEX_MODE_P (mode)
1690 && GET_MODE (src) == mode
1691 && bytelen == GET_MODE_SIZE (mode))
1692 /* Let emit_move_complex do the bulk of the work. */
1693 tmps[i] = src;
1694 else if (GET_CODE (src) == CONCAT)
1696 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1697 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1699 if ((bytepos == 0 && bytelen == slen0)
1700 || (bytepos != 0 && bytepos + bytelen <= slen))
1702 /* The following assumes that the concatenated objects all
1703 have the same size. In this case, a simple calculation
1704 can be used to determine the object and the bit field
1705 to be extracted. */
1706 tmps[i] = XEXP (src, bytepos / slen0);
1707 if (! CONSTANT_P (tmps[i])
1708 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1709 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1710 (bytepos % slen0) * BITS_PER_UNIT,
1711 1, NULL_RTX, mode, mode);
1713 else
1715 rtx mem;
1717 gcc_assert (!bytepos);
1718 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1719 emit_move_insn (mem, src);
1720 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1721 0, 1, NULL_RTX, mode, mode);
1724 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1725 SIMD register, which is currently broken. While we get GCC
1726 to emit proper RTL for these cases, let's dump to memory. */
1727 else if (VECTOR_MODE_P (GET_MODE (dst))
1728 && REG_P (src))
1730 int slen = GET_MODE_SIZE (GET_MODE (src));
1731 rtx mem;
1733 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1734 emit_move_insn (mem, src);
1735 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1737 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1738 && XVECLEN (dst, 0) > 1)
1739 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1740 else if (CONSTANT_P (src)
1741 || (REG_P (src) && GET_MODE (src) == mode))
1742 tmps[i] = src;
1743 else
1744 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1745 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1746 mode, mode);
1748 if (shift)
1749 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1750 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1754 /* Emit code to move a block SRC of type TYPE to a block DST,
1755 where DST is non-consecutive registers represented by a PARALLEL.
1756 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1757 if not known. */
1759 void
1760 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1762 rtx *tmps;
1763 int i;
1765 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1766 emit_group_load_1 (tmps, dst, src, type, ssize);
1768 /* Copy the extracted pieces into the proper (probable) hard regs. */
1769 for (i = 0; i < XVECLEN (dst, 0); i++)
1771 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1772 if (d == NULL)
1773 continue;
1774 emit_move_insn (d, tmps[i]);
1778 /* Similar, but load SRC into new pseudos in a format that looks like
1779 PARALLEL. This can later be fed to emit_group_move to get things
1780 in the right place. */
1783 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1785 rtvec vec;
1786 int i;
1788 vec = rtvec_alloc (XVECLEN (parallel, 0));
1789 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1791 /* Convert the vector to look just like the original PARALLEL, except
1792 with the computed values. */
1793 for (i = 0; i < XVECLEN (parallel, 0); i++)
1795 rtx e = XVECEXP (parallel, 0, i);
1796 rtx d = XEXP (e, 0);
1798 if (d)
1800 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1801 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1803 RTVEC_ELT (vec, i) = e;
1806 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1809 /* Emit code to move a block SRC to block DST, where SRC and DST are
1810 non-consecutive groups of registers, each represented by a PARALLEL. */
1812 void
1813 emit_group_move (rtx dst, rtx src)
1815 int i;
1817 gcc_assert (GET_CODE (src) == PARALLEL
1818 && GET_CODE (dst) == PARALLEL
1819 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1821 /* Skip first entry if NULL. */
1822 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1823 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1824 XEXP (XVECEXP (src, 0, i), 0));
1827 /* Move a group of registers represented by a PARALLEL into pseudos. */
1830 emit_group_move_into_temps (rtx src)
1832 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1833 int i;
1835 for (i = 0; i < XVECLEN (src, 0); i++)
1837 rtx e = XVECEXP (src, 0, i);
1838 rtx d = XEXP (e, 0);
1840 if (d)
1841 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1842 RTVEC_ELT (vec, i) = e;
1845 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1848 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1849 where SRC is non-consecutive registers represented by a PARALLEL.
1850 SSIZE represents the total size of block ORIG_DST, or -1 if not
1851 known. */
1853 void
1854 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1856 rtx *tmps, dst;
1857 int start, i;
1858 enum machine_mode m = GET_MODE (orig_dst);
1860 gcc_assert (GET_CODE (src) == PARALLEL);
1862 if (!SCALAR_INT_MODE_P (m)
1863 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1865 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1866 if (imode == BLKmode)
1867 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1868 else
1869 dst = gen_reg_rtx (imode);
1870 emit_group_store (dst, src, type, ssize);
1871 if (imode != BLKmode)
1872 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1873 emit_move_insn (orig_dst, dst);
1874 return;
1877 /* Check for a NULL entry, used to indicate that the parameter goes
1878 both on the stack and in registers. */
1879 if (XEXP (XVECEXP (src, 0, 0), 0))
1880 start = 0;
1881 else
1882 start = 1;
1884 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1886 /* Copy the (probable) hard regs into pseudos. */
1887 for (i = start; i < XVECLEN (src, 0); i++)
1889 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1890 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1891 emit_move_insn (tmps[i], reg);
1894 /* If we won't be storing directly into memory, protect the real destination
1895 from strange tricks we might play. */
1896 dst = orig_dst;
1897 if (GET_CODE (dst) == PARALLEL)
1899 rtx temp;
1901 /* We can get a PARALLEL dst if there is a conditional expression in
1902 a return statement. In that case, the dst and src are the same,
1903 so no action is necessary. */
1904 if (rtx_equal_p (dst, src))
1905 return;
1907 /* It is unclear if we can ever reach here, but we may as well handle
1908 it. Allocate a temporary, and split this into a store/load to/from
1909 the temporary. */
1911 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1912 emit_group_store (temp, src, type, ssize);
1913 emit_group_load (dst, temp, type, ssize);
1914 return;
1916 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1918 dst = gen_reg_rtx (GET_MODE (orig_dst));
1919 /* Make life a bit easier for combine. */
1920 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
1923 /* Process the pieces. */
1924 for (i = start; i < XVECLEN (src, 0); i++)
1926 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1927 enum machine_mode mode = GET_MODE (tmps[i]);
1928 unsigned int bytelen = GET_MODE_SIZE (mode);
1929 rtx dest = dst;
1931 /* Handle trailing fragments that run over the size of the struct. */
1932 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1934 /* store_bit_field always takes its value from the lsb.
1935 Move the fragment to the lsb if it's not already there. */
1936 if (
1937 #ifdef BLOCK_REG_PADDING
1938 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1939 == (BYTES_BIG_ENDIAN ? upward : downward)
1940 #else
1941 BYTES_BIG_ENDIAN
1942 #endif
1945 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1946 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
1947 build_int_cst (NULL_TREE, shift),
1948 tmps[i], 0);
1950 bytelen = ssize - bytepos;
1953 if (GET_CODE (dst) == CONCAT)
1955 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1956 dest = XEXP (dst, 0);
1957 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1959 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1960 dest = XEXP (dst, 1);
1962 else
1964 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
1965 dest = assign_stack_temp (GET_MODE (dest),
1966 GET_MODE_SIZE (GET_MODE (dest)), 0);
1967 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
1968 tmps[i]);
1969 dst = dest;
1970 break;
1974 /* Optimize the access just a bit. */
1975 if (MEM_P (dest)
1976 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1977 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
1978 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1979 && bytelen == GET_MODE_SIZE (mode))
1980 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
1981 else
1982 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
1983 mode, tmps[i]);
1986 /* Copy from the pseudo into the (probable) hard reg. */
1987 if (orig_dst != dst)
1988 emit_move_insn (orig_dst, dst);
1991 /* Generate code to copy a BLKmode object of TYPE out of a
1992 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1993 is null, a stack temporary is created. TGTBLK is returned.
1995 The purpose of this routine is to handle functions that return
1996 BLKmode structures in registers. Some machines (the PA for example)
1997 want to return all small structures in registers regardless of the
1998 structure's alignment. */
2001 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2003 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2004 rtx src = NULL, dst = NULL;
2005 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2006 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2008 if (tgtblk == 0)
2010 tgtblk = assign_temp (build_qualified_type (type,
2011 (TYPE_QUALS (type)
2012 | TYPE_QUAL_CONST)),
2013 0, 1, 1);
2014 preserve_temp_slots (tgtblk);
2017 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2018 into a new pseudo which is a full word. */
2020 if (GET_MODE (srcreg) != BLKmode
2021 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2022 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2024 /* If the structure doesn't take up a whole number of words, see whether
2025 SRCREG is padded on the left or on the right. If it's on the left,
2026 set PADDING_CORRECTION to the number of bits to skip.
2028 In most ABIs, the structure will be returned at the least end of
2029 the register, which translates to right padding on little-endian
2030 targets and left padding on big-endian targets. The opposite
2031 holds if the structure is returned at the most significant
2032 end of the register. */
2033 if (bytes % UNITS_PER_WORD != 0
2034 && (targetm.calls.return_in_msb (type)
2035 ? !BYTES_BIG_ENDIAN
2036 : BYTES_BIG_ENDIAN))
2037 padding_correction
2038 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2040 /* Copy the structure BITSIZE bites at a time.
2042 We could probably emit more efficient code for machines which do not use
2043 strict alignment, but it doesn't seem worth the effort at the current
2044 time. */
2045 for (bitpos = 0, xbitpos = padding_correction;
2046 bitpos < bytes * BITS_PER_UNIT;
2047 bitpos += bitsize, xbitpos += bitsize)
2049 /* We need a new source operand each time xbitpos is on a
2050 word boundary and when xbitpos == padding_correction
2051 (the first time through). */
2052 if (xbitpos % BITS_PER_WORD == 0
2053 || xbitpos == padding_correction)
2054 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2055 GET_MODE (srcreg));
2057 /* We need a new destination operand each time bitpos is on
2058 a word boundary. */
2059 if (bitpos % BITS_PER_WORD == 0)
2060 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2062 /* Use xbitpos for the source extraction (right justified) and
2063 xbitpos for the destination store (left justified). */
2064 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2065 extract_bit_field (src, bitsize,
2066 xbitpos % BITS_PER_WORD, 1,
2067 NULL_RTX, word_mode, word_mode));
2070 return tgtblk;
2073 /* Add a USE expression for REG to the (possibly empty) list pointed
2074 to by CALL_FUSAGE. REG must denote a hard register. */
2076 void
2077 use_reg (rtx *call_fusage, rtx reg)
2079 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2081 *call_fusage
2082 = gen_rtx_EXPR_LIST (VOIDmode,
2083 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2086 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2087 starting at REGNO. All of these registers must be hard registers. */
2089 void
2090 use_regs (rtx *call_fusage, int regno, int nregs)
2092 int i;
2094 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2096 for (i = 0; i < nregs; i++)
2097 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2100 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2101 PARALLEL REGS. This is for calls that pass values in multiple
2102 non-contiguous locations. The Irix 6 ABI has examples of this. */
2104 void
2105 use_group_regs (rtx *call_fusage, rtx regs)
2107 int i;
2109 for (i = 0; i < XVECLEN (regs, 0); i++)
2111 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2113 /* A NULL entry means the parameter goes both on the stack and in
2114 registers. This can also be a MEM for targets that pass values
2115 partially on the stack and partially in registers. */
2116 if (reg != 0 && REG_P (reg))
2117 use_reg (call_fusage, reg);
2122 /* Determine whether the LEN bytes generated by CONSTFUN can be
2123 stored to memory using several move instructions. CONSTFUNDATA is
2124 a pointer which will be passed as argument in every CONSTFUN call.
2125 ALIGN is maximum alignment we can assume. Return nonzero if a
2126 call to store_by_pieces should succeed. */
2129 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2130 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2131 void *constfundata, unsigned int align)
2133 unsigned HOST_WIDE_INT l;
2134 unsigned int max_size;
2135 HOST_WIDE_INT offset = 0;
2136 enum machine_mode mode, tmode;
2137 enum insn_code icode;
2138 int reverse;
2139 rtx cst;
2141 if (len == 0)
2142 return 1;
2144 if (! STORE_BY_PIECES_P (len, align))
2145 return 0;
2147 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2148 if (align >= GET_MODE_ALIGNMENT (tmode))
2149 align = GET_MODE_ALIGNMENT (tmode);
2150 else
2152 enum machine_mode xmode;
2154 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2155 tmode != VOIDmode;
2156 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2157 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2158 || SLOW_UNALIGNED_ACCESS (tmode, align))
2159 break;
2161 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2164 /* We would first store what we can in the largest integer mode, then go to
2165 successively smaller modes. */
2167 for (reverse = 0;
2168 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2169 reverse++)
2171 l = len;
2172 mode = VOIDmode;
2173 max_size = STORE_MAX_PIECES + 1;
2174 while (max_size > 1)
2176 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2177 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2178 if (GET_MODE_SIZE (tmode) < max_size)
2179 mode = tmode;
2181 if (mode == VOIDmode)
2182 break;
2184 icode = mov_optab->handlers[(int) mode].insn_code;
2185 if (icode != CODE_FOR_nothing
2186 && align >= GET_MODE_ALIGNMENT (mode))
2188 unsigned int size = GET_MODE_SIZE (mode);
2190 while (l >= size)
2192 if (reverse)
2193 offset -= size;
2195 cst = (*constfun) (constfundata, offset, mode);
2196 if (!LEGITIMATE_CONSTANT_P (cst))
2197 return 0;
2199 if (!reverse)
2200 offset += size;
2202 l -= size;
2206 max_size = GET_MODE_SIZE (mode);
2209 /* The code above should have handled everything. */
2210 gcc_assert (!l);
2213 return 1;
2216 /* Generate several move instructions to store LEN bytes generated by
2217 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2218 pointer which will be passed as argument in every CONSTFUN call.
2219 ALIGN is maximum alignment we can assume.
2220 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2221 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2222 stpcpy. */
2225 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2226 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2227 void *constfundata, unsigned int align, int endp)
2229 struct store_by_pieces data;
2231 if (len == 0)
2233 gcc_assert (endp != 2);
2234 return to;
2237 gcc_assert (STORE_BY_PIECES_P (len, align));
2238 data.constfun = constfun;
2239 data.constfundata = constfundata;
2240 data.len = len;
2241 data.to = to;
2242 store_by_pieces_1 (&data, align);
2243 if (endp)
2245 rtx to1;
2247 gcc_assert (!data.reverse);
2248 if (data.autinc_to)
2250 if (endp == 2)
2252 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2253 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2254 else
2255 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2256 -1));
2258 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2259 data.offset);
2261 else
2263 if (endp == 2)
2264 --data.offset;
2265 to1 = adjust_address (data.to, QImode, data.offset);
2267 return to1;
2269 else
2270 return data.to;
2273 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2274 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2276 static void
2277 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2279 struct store_by_pieces data;
2281 if (len == 0)
2282 return;
2284 data.constfun = clear_by_pieces_1;
2285 data.constfundata = NULL;
2286 data.len = len;
2287 data.to = to;
2288 store_by_pieces_1 (&data, align);
2291 /* Callback routine for clear_by_pieces.
2292 Return const0_rtx unconditionally. */
2294 static rtx
2295 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2296 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2297 enum machine_mode mode ATTRIBUTE_UNUSED)
2299 return const0_rtx;
2302 /* Subroutine of clear_by_pieces and store_by_pieces.
2303 Generate several move instructions to store LEN bytes of block TO. (A MEM
2304 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2306 static void
2307 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2308 unsigned int align ATTRIBUTE_UNUSED)
2310 rtx to_addr = XEXP (data->to, 0);
2311 unsigned int max_size = STORE_MAX_PIECES + 1;
2312 enum machine_mode mode = VOIDmode, tmode;
2313 enum insn_code icode;
2315 data->offset = 0;
2316 data->to_addr = to_addr;
2317 data->autinc_to
2318 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2319 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2321 data->explicit_inc_to = 0;
2322 data->reverse
2323 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2324 if (data->reverse)
2325 data->offset = data->len;
2327 /* If storing requires more than two move insns,
2328 copy addresses to registers (to make displacements shorter)
2329 and use post-increment if available. */
2330 if (!data->autinc_to
2331 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2333 /* Determine the main mode we'll be using. */
2334 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2335 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2336 if (GET_MODE_SIZE (tmode) < max_size)
2337 mode = tmode;
2339 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2341 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2342 data->autinc_to = 1;
2343 data->explicit_inc_to = -1;
2346 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2347 && ! data->autinc_to)
2349 data->to_addr = copy_addr_to_reg (to_addr);
2350 data->autinc_to = 1;
2351 data->explicit_inc_to = 1;
2354 if ( !data->autinc_to && CONSTANT_P (to_addr))
2355 data->to_addr = copy_addr_to_reg (to_addr);
2358 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2359 if (align >= GET_MODE_ALIGNMENT (tmode))
2360 align = GET_MODE_ALIGNMENT (tmode);
2361 else
2363 enum machine_mode xmode;
2365 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2366 tmode != VOIDmode;
2367 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2368 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2369 || SLOW_UNALIGNED_ACCESS (tmode, align))
2370 break;
2372 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2375 /* First store what we can in the largest integer mode, then go to
2376 successively smaller modes. */
2378 while (max_size > 1)
2380 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2381 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2382 if (GET_MODE_SIZE (tmode) < max_size)
2383 mode = tmode;
2385 if (mode == VOIDmode)
2386 break;
2388 icode = mov_optab->handlers[(int) mode].insn_code;
2389 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2390 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2392 max_size = GET_MODE_SIZE (mode);
2395 /* The code above should have handled everything. */
2396 gcc_assert (!data->len);
2399 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2400 with move instructions for mode MODE. GENFUN is the gen_... function
2401 to make a move insn for that mode. DATA has all the other info. */
2403 static void
2404 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2405 struct store_by_pieces *data)
2407 unsigned int size = GET_MODE_SIZE (mode);
2408 rtx to1, cst;
2410 while (data->len >= size)
2412 if (data->reverse)
2413 data->offset -= size;
2415 if (data->autinc_to)
2416 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2417 data->offset);
2418 else
2419 to1 = adjust_address (data->to, mode, data->offset);
2421 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2422 emit_insn (gen_add2_insn (data->to_addr,
2423 GEN_INT (-(HOST_WIDE_INT) size)));
2425 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2426 emit_insn ((*genfun) (to1, cst));
2428 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2429 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2431 if (! data->reverse)
2432 data->offset += size;
2434 data->len -= size;
2438 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2439 its length in bytes. */
2442 clear_storage (rtx object, rtx size, enum block_op_methods method)
2444 enum machine_mode mode = GET_MODE (object);
2445 unsigned int align;
2447 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2449 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2450 just move a zero. Otherwise, do this a piece at a time. */
2451 if (mode != BLKmode
2452 && GET_CODE (size) == CONST_INT
2453 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2455 rtx zero = CONST0_RTX (mode);
2456 if (zero != NULL)
2458 emit_move_insn (object, zero);
2459 return NULL;
2462 if (COMPLEX_MODE_P (mode))
2464 zero = CONST0_RTX (GET_MODE_INNER (mode));
2465 if (zero != NULL)
2467 write_complex_part (object, zero, 0);
2468 write_complex_part (object, zero, 1);
2469 return NULL;
2474 if (size == const0_rtx)
2475 return NULL;
2477 align = MEM_ALIGN (object);
2479 if (GET_CODE (size) == CONST_INT
2480 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2481 clear_by_pieces (object, INTVAL (size), align);
2482 else if (set_storage_via_setmem (object, size, const0_rtx, align))
2484 else
2485 return clear_storage_via_libcall (object, size,
2486 method == BLOCK_OP_TAILCALL);
2488 return NULL;
2491 /* A subroutine of clear_storage. Expand a call to memset.
2492 Return the return value of memset, 0 otherwise. */
2494 static rtx
2495 clear_storage_via_libcall (rtx object, rtx size, bool tailcall)
2497 tree call_expr, arg_list, fn, object_tree, size_tree;
2498 enum machine_mode size_mode;
2499 rtx retval;
2501 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2502 place those into new pseudos into a VAR_DECL and use them later. */
2504 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2506 size_mode = TYPE_MODE (sizetype);
2507 size = convert_to_mode (size_mode, size, 1);
2508 size = copy_to_mode_reg (size_mode, size);
2510 /* It is incorrect to use the libcall calling conventions to call
2511 memset in this context. This could be a user call to memset and
2512 the user may wish to examine the return value from memset. For
2513 targets where libcalls and normal calls have different conventions
2514 for returning pointers, we could end up generating incorrect code. */
2516 object_tree = make_tree (ptr_type_node, object);
2517 size_tree = make_tree (sizetype, size);
2519 fn = clear_storage_libcall_fn (true);
2520 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2521 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2522 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2524 /* Now we have to build up the CALL_EXPR itself. */
2525 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2526 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2527 call_expr, arg_list, NULL_TREE);
2528 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2530 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2532 return retval;
2535 /* A subroutine of clear_storage_via_libcall. Create the tree node
2536 for the function we use for block clears. The first time FOR_CALL
2537 is true, we call assemble_external. */
2539 static GTY(()) tree block_clear_fn;
2541 void
2542 init_block_clear_fn (const char *asmspec)
2544 if (!block_clear_fn)
2546 tree fn, args;
2548 fn = get_identifier ("memset");
2549 args = build_function_type_list (ptr_type_node, ptr_type_node,
2550 integer_type_node, sizetype,
2551 NULL_TREE);
2553 fn = build_decl (FUNCTION_DECL, fn, args);
2554 DECL_EXTERNAL (fn) = 1;
2555 TREE_PUBLIC (fn) = 1;
2556 DECL_ARTIFICIAL (fn) = 1;
2557 TREE_NOTHROW (fn) = 1;
2559 block_clear_fn = fn;
2562 if (asmspec)
2563 set_user_assembler_name (block_clear_fn, asmspec);
2566 static tree
2567 clear_storage_libcall_fn (int for_call)
2569 static bool emitted_extern;
2571 if (!block_clear_fn)
2572 init_block_clear_fn (NULL);
2574 if (for_call && !emitted_extern)
2576 emitted_extern = true;
2577 make_decl_rtl (block_clear_fn);
2578 assemble_external (block_clear_fn);
2581 return block_clear_fn;
2584 /* Expand a setmem pattern; return true if successful. */
2586 bool
2587 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align)
2589 /* Try the most limited insn first, because there's no point
2590 including more than one in the machine description unless
2591 the more limited one has some advantage. */
2593 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2594 enum machine_mode mode;
2596 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2597 mode = GET_MODE_WIDER_MODE (mode))
2599 enum insn_code code = setmem_optab[(int) mode];
2600 insn_operand_predicate_fn pred;
2602 if (code != CODE_FOR_nothing
2603 /* We don't need MODE to be narrower than
2604 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2605 the mode mask, as it is returned by the macro, it will
2606 definitely be less than the actual mode mask. */
2607 && ((GET_CODE (size) == CONST_INT
2608 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2609 <= (GET_MODE_MASK (mode) >> 1)))
2610 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2611 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2612 || (*pred) (object, BLKmode))
2613 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2614 || (*pred) (opalign, VOIDmode)))
2616 rtx opsize, opchar;
2617 enum machine_mode char_mode;
2618 rtx last = get_last_insn ();
2619 rtx pat;
2621 opsize = convert_to_mode (mode, size, 1);
2622 pred = insn_data[(int) code].operand[1].predicate;
2623 if (pred != 0 && ! (*pred) (opsize, mode))
2624 opsize = copy_to_mode_reg (mode, opsize);
2626 opchar = val;
2627 char_mode = insn_data[(int) code].operand[2].mode;
2628 if (char_mode != VOIDmode)
2630 opchar = convert_to_mode (char_mode, opchar, 1);
2631 pred = insn_data[(int) code].operand[2].predicate;
2632 if (pred != 0 && ! (*pred) (opchar, char_mode))
2633 opchar = copy_to_mode_reg (char_mode, opchar);
2636 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2637 if (pat)
2639 emit_insn (pat);
2640 return true;
2642 else
2643 delete_insns_since (last);
2647 return false;
2651 /* Write to one of the components of the complex value CPLX. Write VAL to
2652 the real part if IMAG_P is false, and the imaginary part if its true. */
2654 static void
2655 write_complex_part (rtx cplx, rtx val, bool imag_p)
2657 enum machine_mode cmode;
2658 enum machine_mode imode;
2659 unsigned ibitsize;
2661 if (GET_CODE (cplx) == CONCAT)
2663 emit_move_insn (XEXP (cplx, imag_p), val);
2664 return;
2667 cmode = GET_MODE (cplx);
2668 imode = GET_MODE_INNER (cmode);
2669 ibitsize = GET_MODE_BITSIZE (imode);
2671 /* For MEMs simplify_gen_subreg may generate an invalid new address
2672 because, e.g., the original address is considered mode-dependent
2673 by the target, which restricts simplify_subreg from invoking
2674 adjust_address_nv. Instead of preparing fallback support for an
2675 invalid address, we call adjust_address_nv directly. */
2676 if (MEM_P (cplx))
2678 emit_move_insn (adjust_address_nv (cplx, imode,
2679 imag_p ? GET_MODE_SIZE (imode) : 0),
2680 val);
2681 return;
2684 /* If the sub-object is at least word sized, then we know that subregging
2685 will work. This special case is important, since store_bit_field
2686 wants to operate on integer modes, and there's rarely an OImode to
2687 correspond to TCmode. */
2688 if (ibitsize >= BITS_PER_WORD
2689 /* For hard regs we have exact predicates. Assume we can split
2690 the original object if it spans an even number of hard regs.
2691 This special case is important for SCmode on 64-bit platforms
2692 where the natural size of floating-point regs is 32-bit. */
2693 || (REG_P (cplx)
2694 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2695 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2697 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2698 imag_p ? GET_MODE_SIZE (imode) : 0);
2699 if (part)
2701 emit_move_insn (part, val);
2702 return;
2704 else
2705 /* simplify_gen_subreg may fail for sub-word MEMs. */
2706 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2709 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2712 /* Extract one of the components of the complex value CPLX. Extract the
2713 real part if IMAG_P is false, and the imaginary part if it's true. */
2715 static rtx
2716 read_complex_part (rtx cplx, bool imag_p)
2718 enum machine_mode cmode, imode;
2719 unsigned ibitsize;
2721 if (GET_CODE (cplx) == CONCAT)
2722 return XEXP (cplx, imag_p);
2724 cmode = GET_MODE (cplx);
2725 imode = GET_MODE_INNER (cmode);
2726 ibitsize = GET_MODE_BITSIZE (imode);
2728 /* Special case reads from complex constants that got spilled to memory. */
2729 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2731 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2732 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2734 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2735 if (CONSTANT_CLASS_P (part))
2736 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2740 /* For MEMs simplify_gen_subreg may generate an invalid new address
2741 because, e.g., the original address is considered mode-dependent
2742 by the target, which restricts simplify_subreg from invoking
2743 adjust_address_nv. Instead of preparing fallback support for an
2744 invalid address, we call adjust_address_nv directly. */
2745 if (MEM_P (cplx))
2746 return adjust_address_nv (cplx, imode,
2747 imag_p ? GET_MODE_SIZE (imode) : 0);
2749 /* If the sub-object is at least word sized, then we know that subregging
2750 will work. This special case is important, since extract_bit_field
2751 wants to operate on integer modes, and there's rarely an OImode to
2752 correspond to TCmode. */
2753 if (ibitsize >= BITS_PER_WORD
2754 /* For hard regs we have exact predicates. Assume we can split
2755 the original object if it spans an even number of hard regs.
2756 This special case is important for SCmode on 64-bit platforms
2757 where the natural size of floating-point regs is 32-bit. */
2758 || (REG_P (cplx)
2759 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2760 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2762 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2763 imag_p ? GET_MODE_SIZE (imode) : 0);
2764 if (ret)
2765 return ret;
2766 else
2767 /* simplify_gen_subreg may fail for sub-word MEMs. */
2768 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2771 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2772 true, NULL_RTX, imode, imode);
2775 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2776 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2777 represented in NEW_MODE. If FORCE is true, this will never happen, as
2778 we'll force-create a SUBREG if needed. */
2780 static rtx
2781 emit_move_change_mode (enum machine_mode new_mode,
2782 enum machine_mode old_mode, rtx x, bool force)
2784 rtx ret;
2786 if (reload_in_progress && MEM_P (x))
2788 /* We can't use gen_lowpart here because it may call change_address
2789 which is not appropriate if we were called when a reload was in
2790 progress. We don't have to worry about changing the address since
2791 the size in bytes is supposed to be the same. Copy the MEM to
2792 change the mode and move any substitutions from the old MEM to
2793 the new one. */
2795 ret = adjust_address_nv (x, new_mode, 0);
2796 copy_replacements (x, ret);
2798 else
2800 /* Note that we do want simplify_subreg's behavior of validating
2801 that the new mode is ok for a hard register. If we were to use
2802 simplify_gen_subreg, we would create the subreg, but would
2803 probably run into the target not being able to implement it. */
2804 /* Except, of course, when FORCE is true, when this is exactly what
2805 we want. Which is needed for CCmodes on some targets. */
2806 if (force)
2807 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2808 else
2809 ret = simplify_subreg (new_mode, x, old_mode, 0);
2812 return ret;
2815 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2816 an integer mode of the same size as MODE. Returns the instruction
2817 emitted, or NULL if such a move could not be generated. */
2819 static rtx
2820 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2822 enum machine_mode imode;
2823 enum insn_code code;
2825 /* There must exist a mode of the exact size we require. */
2826 imode = int_mode_for_mode (mode);
2827 if (imode == BLKmode)
2828 return NULL_RTX;
2830 /* The target must support moves in this mode. */
2831 code = mov_optab->handlers[imode].insn_code;
2832 if (code == CODE_FOR_nothing)
2833 return NULL_RTX;
2835 x = emit_move_change_mode (imode, mode, x, force);
2836 if (x == NULL_RTX)
2837 return NULL_RTX;
2838 y = emit_move_change_mode (imode, mode, y, force);
2839 if (y == NULL_RTX)
2840 return NULL_RTX;
2841 return emit_insn (GEN_FCN (code) (x, y));
2844 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2845 Return an equivalent MEM that does not use an auto-increment. */
2847 static rtx
2848 emit_move_resolve_push (enum machine_mode mode, rtx x)
2850 enum rtx_code code = GET_CODE (XEXP (x, 0));
2851 HOST_WIDE_INT adjust;
2852 rtx temp;
2854 adjust = GET_MODE_SIZE (mode);
2855 #ifdef PUSH_ROUNDING
2856 adjust = PUSH_ROUNDING (adjust);
2857 #endif
2858 if (code == PRE_DEC || code == POST_DEC)
2859 adjust = -adjust;
2860 else if (code == PRE_MODIFY || code == POST_MODIFY)
2862 rtx expr = XEXP (XEXP (x, 0), 1);
2863 HOST_WIDE_INT val;
2865 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2866 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
2867 val = INTVAL (XEXP (expr, 1));
2868 if (GET_CODE (expr) == MINUS)
2869 val = -val;
2870 gcc_assert (adjust == val || adjust == -val);
2871 adjust = val;
2874 /* Do not use anti_adjust_stack, since we don't want to update
2875 stack_pointer_delta. */
2876 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2877 GEN_INT (adjust), stack_pointer_rtx,
2878 0, OPTAB_LIB_WIDEN);
2879 if (temp != stack_pointer_rtx)
2880 emit_move_insn (stack_pointer_rtx, temp);
2882 switch (code)
2884 case PRE_INC:
2885 case PRE_DEC:
2886 case PRE_MODIFY:
2887 temp = stack_pointer_rtx;
2888 break;
2889 case POST_INC:
2890 case POST_DEC:
2891 case POST_MODIFY:
2892 temp = plus_constant (stack_pointer_rtx, -adjust);
2893 break;
2894 default:
2895 gcc_unreachable ();
2898 return replace_equiv_address (x, temp);
2901 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2902 X is known to satisfy push_operand, and MODE is known to be complex.
2903 Returns the last instruction emitted. */
2905 static rtx
2906 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2908 enum machine_mode submode = GET_MODE_INNER (mode);
2909 bool imag_first;
2911 #ifdef PUSH_ROUNDING
2912 unsigned int submodesize = GET_MODE_SIZE (submode);
2914 /* In case we output to the stack, but the size is smaller than the
2915 machine can push exactly, we need to use move instructions. */
2916 if (PUSH_ROUNDING (submodesize) != submodesize)
2918 x = emit_move_resolve_push (mode, x);
2919 return emit_move_insn (x, y);
2921 #endif
2923 /* Note that the real part always precedes the imag part in memory
2924 regardless of machine's endianness. */
2925 switch (GET_CODE (XEXP (x, 0)))
2927 case PRE_DEC:
2928 case POST_DEC:
2929 imag_first = true;
2930 break;
2931 case PRE_INC:
2932 case POST_INC:
2933 imag_first = false;
2934 break;
2935 default:
2936 gcc_unreachable ();
2939 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2940 read_complex_part (y, imag_first));
2941 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2942 read_complex_part (y, !imag_first));
2945 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2946 MODE is known to be complex. Returns the last instruction emitted. */
2948 static rtx
2949 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
2951 bool try_int;
2953 /* Need to take special care for pushes, to maintain proper ordering
2954 of the data, and possibly extra padding. */
2955 if (push_operand (x, mode))
2956 return emit_move_complex_push (mode, x, y);
2958 /* See if we can coerce the target into moving both values at once. */
2960 /* Move floating point as parts. */
2961 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
2962 && mov_optab->handlers[GET_MODE_INNER (mode)].insn_code != CODE_FOR_nothing)
2963 try_int = false;
2964 /* Not possible if the values are inherently not adjacent. */
2965 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
2966 try_int = false;
2967 /* Is possible if both are registers (or subregs of registers). */
2968 else if (register_operand (x, mode) && register_operand (y, mode))
2969 try_int = true;
2970 /* If one of the operands is a memory, and alignment constraints
2971 are friendly enough, we may be able to do combined memory operations.
2972 We do not attempt this if Y is a constant because that combination is
2973 usually better with the by-parts thing below. */
2974 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
2975 && (!STRICT_ALIGNMENT
2976 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
2977 try_int = true;
2978 else
2979 try_int = false;
2981 if (try_int)
2983 rtx ret;
2985 /* For memory to memory moves, optimal behavior can be had with the
2986 existing block move logic. */
2987 if (MEM_P (x) && MEM_P (y))
2989 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
2990 BLOCK_OP_NO_LIBCALL);
2991 return get_last_insn ();
2994 ret = emit_move_via_integer (mode, x, y, true);
2995 if (ret)
2996 return ret;
2999 /* Show the output dies here. This is necessary for SUBREGs
3000 of pseudos since we cannot track their lifetimes correctly;
3001 hard regs shouldn't appear here except as return values. */
3002 if (!reload_completed && !reload_in_progress
3003 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3004 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3006 write_complex_part (x, read_complex_part (y, false), false);
3007 write_complex_part (x, read_complex_part (y, true), true);
3008 return get_last_insn ();
3011 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3012 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3014 static rtx
3015 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3017 rtx ret;
3019 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3020 if (mode != CCmode)
3022 enum insn_code code = mov_optab->handlers[CCmode].insn_code;
3023 if (code != CODE_FOR_nothing)
3025 x = emit_move_change_mode (CCmode, mode, x, true);
3026 y = emit_move_change_mode (CCmode, mode, y, true);
3027 return emit_insn (GEN_FCN (code) (x, y));
3031 /* Otherwise, find the MODE_INT mode of the same width. */
3032 ret = emit_move_via_integer (mode, x, y, false);
3033 gcc_assert (ret != NULL);
3034 return ret;
3037 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3038 MODE is any multi-word or full-word mode that lacks a move_insn
3039 pattern. Note that you will get better code if you define such
3040 patterns, even if they must turn into multiple assembler instructions. */
3042 static rtx
3043 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3045 rtx last_insn = 0;
3046 rtx seq, inner;
3047 bool need_clobber;
3048 int i;
3050 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3052 /* If X is a push on the stack, do the push now and replace
3053 X with a reference to the stack pointer. */
3054 if (push_operand (x, mode))
3055 x = emit_move_resolve_push (mode, x);
3057 /* If we are in reload, see if either operand is a MEM whose address
3058 is scheduled for replacement. */
3059 if (reload_in_progress && MEM_P (x)
3060 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3061 x = replace_equiv_address_nv (x, inner);
3062 if (reload_in_progress && MEM_P (y)
3063 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3064 y = replace_equiv_address_nv (y, inner);
3066 start_sequence ();
3068 need_clobber = false;
3069 for (i = 0;
3070 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3071 i++)
3073 rtx xpart = operand_subword (x, i, 1, mode);
3074 rtx ypart = operand_subword (y, i, 1, mode);
3076 /* If we can't get a part of Y, put Y into memory if it is a
3077 constant. Otherwise, force it into a register. Then we must
3078 be able to get a part of Y. */
3079 if (ypart == 0 && CONSTANT_P (y))
3081 y = force_const_mem (mode, y);
3082 ypart = operand_subword (y, i, 1, mode);
3084 else if (ypart == 0)
3085 ypart = operand_subword_force (y, i, mode);
3087 gcc_assert (xpart && ypart);
3089 need_clobber |= (GET_CODE (xpart) == SUBREG);
3091 last_insn = emit_move_insn (xpart, ypart);
3094 seq = get_insns ();
3095 end_sequence ();
3097 /* Show the output dies here. This is necessary for SUBREGs
3098 of pseudos since we cannot track their lifetimes correctly;
3099 hard regs shouldn't appear here except as return values.
3100 We never want to emit such a clobber after reload. */
3101 if (x != y
3102 && ! (reload_in_progress || reload_completed)
3103 && need_clobber != 0)
3104 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3106 emit_insn (seq);
3108 return last_insn;
3111 /* Low level part of emit_move_insn.
3112 Called just like emit_move_insn, but assumes X and Y
3113 are basically valid. */
3116 emit_move_insn_1 (rtx x, rtx y)
3118 enum machine_mode mode = GET_MODE (x);
3119 enum insn_code code;
3121 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3123 code = mov_optab->handlers[mode].insn_code;
3124 if (code != CODE_FOR_nothing)
3125 return emit_insn (GEN_FCN (code) (x, y));
3127 /* Expand complex moves by moving real part and imag part. */
3128 if (COMPLEX_MODE_P (mode))
3129 return emit_move_complex (mode, x, y);
3131 if (GET_MODE_CLASS (mode) == MODE_CC)
3132 return emit_move_ccmode (mode, x, y);
3134 /* Try using a move pattern for the corresponding integer mode. This is
3135 only safe when simplify_subreg can convert MODE constants into integer
3136 constants. At present, it can only do this reliably if the value
3137 fits within a HOST_WIDE_INT. */
3138 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3140 rtx ret = emit_move_via_integer (mode, x, y, false);
3141 if (ret)
3142 return ret;
3145 return emit_move_multi_word (mode, x, y);
3148 /* Generate code to copy Y into X.
3149 Both Y and X must have the same mode, except that
3150 Y can be a constant with VOIDmode.
3151 This mode cannot be BLKmode; use emit_block_move for that.
3153 Return the last instruction emitted. */
3156 emit_move_insn (rtx x, rtx y)
3158 enum machine_mode mode = GET_MODE (x);
3159 rtx y_cst = NULL_RTX;
3160 rtx last_insn, set;
3162 gcc_assert (mode != BLKmode
3163 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3165 if (CONSTANT_P (y))
3167 if (optimize
3168 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3169 && (last_insn = compress_float_constant (x, y)))
3170 return last_insn;
3172 y_cst = y;
3174 if (!LEGITIMATE_CONSTANT_P (y))
3176 y = force_const_mem (mode, y);
3178 /* If the target's cannot_force_const_mem prevented the spill,
3179 assume that the target's move expanders will also take care
3180 of the non-legitimate constant. */
3181 if (!y)
3182 y = y_cst;
3186 /* If X or Y are memory references, verify that their addresses are valid
3187 for the machine. */
3188 if (MEM_P (x)
3189 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3190 && ! push_operand (x, GET_MODE (x)))
3191 || (flag_force_addr
3192 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3193 x = validize_mem (x);
3195 if (MEM_P (y)
3196 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3197 || (flag_force_addr
3198 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3199 y = validize_mem (y);
3201 gcc_assert (mode != BLKmode);
3203 last_insn = emit_move_insn_1 (x, y);
3205 if (y_cst && REG_P (x)
3206 && (set = single_set (last_insn)) != NULL_RTX
3207 && SET_DEST (set) == x
3208 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3209 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3211 return last_insn;
3214 /* If Y is representable exactly in a narrower mode, and the target can
3215 perform the extension directly from constant or memory, then emit the
3216 move as an extension. */
3218 static rtx
3219 compress_float_constant (rtx x, rtx y)
3221 enum machine_mode dstmode = GET_MODE (x);
3222 enum machine_mode orig_srcmode = GET_MODE (y);
3223 enum machine_mode srcmode;
3224 REAL_VALUE_TYPE r;
3225 int oldcost, newcost;
3227 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3229 if (LEGITIMATE_CONSTANT_P (y))
3230 oldcost = rtx_cost (y, SET);
3231 else
3232 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3234 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3235 srcmode != orig_srcmode;
3236 srcmode = GET_MODE_WIDER_MODE (srcmode))
3238 enum insn_code ic;
3239 rtx trunc_y, last_insn;
3241 /* Skip if the target can't extend this way. */
3242 ic = can_extend_p (dstmode, srcmode, 0);
3243 if (ic == CODE_FOR_nothing)
3244 continue;
3246 /* Skip if the narrowed value isn't exact. */
3247 if (! exact_real_truncate (srcmode, &r))
3248 continue;
3250 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3252 if (LEGITIMATE_CONSTANT_P (trunc_y))
3254 /* Skip if the target needs extra instructions to perform
3255 the extension. */
3256 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3257 continue;
3258 /* This is valid, but may not be cheaper than the original. */
3259 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3260 if (oldcost < newcost)
3261 continue;
3263 else if (float_extend_from_mem[dstmode][srcmode])
3265 trunc_y = force_const_mem (srcmode, trunc_y);
3266 /* This is valid, but may not be cheaper than the original. */
3267 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3268 if (oldcost < newcost)
3269 continue;
3270 trunc_y = validize_mem (trunc_y);
3272 else
3273 continue;
3275 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3276 last_insn = get_last_insn ();
3278 if (REG_P (x))
3279 set_unique_reg_note (last_insn, REG_EQUAL, y);
3281 return last_insn;
3284 return NULL_RTX;
3287 /* Pushing data onto the stack. */
3289 /* Push a block of length SIZE (perhaps variable)
3290 and return an rtx to address the beginning of the block.
3291 The value may be virtual_outgoing_args_rtx.
3293 EXTRA is the number of bytes of padding to push in addition to SIZE.
3294 BELOW nonzero means this padding comes at low addresses;
3295 otherwise, the padding comes at high addresses. */
3298 push_block (rtx size, int extra, int below)
3300 rtx temp;
3302 size = convert_modes (Pmode, ptr_mode, size, 1);
3303 if (CONSTANT_P (size))
3304 anti_adjust_stack (plus_constant (size, extra));
3305 else if (REG_P (size) && extra == 0)
3306 anti_adjust_stack (size);
3307 else
3309 temp = copy_to_mode_reg (Pmode, size);
3310 if (extra != 0)
3311 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3312 temp, 0, OPTAB_LIB_WIDEN);
3313 anti_adjust_stack (temp);
3316 #ifndef STACK_GROWS_DOWNWARD
3317 if (0)
3318 #else
3319 if (1)
3320 #endif
3322 temp = virtual_outgoing_args_rtx;
3323 if (extra != 0 && below)
3324 temp = plus_constant (temp, extra);
3326 else
3328 if (GET_CODE (size) == CONST_INT)
3329 temp = plus_constant (virtual_outgoing_args_rtx,
3330 -INTVAL (size) - (below ? 0 : extra));
3331 else if (extra != 0 && !below)
3332 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3333 negate_rtx (Pmode, plus_constant (size, extra)));
3334 else
3335 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3336 negate_rtx (Pmode, size));
3339 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3342 #ifdef PUSH_ROUNDING
3344 /* Emit single push insn. */
3346 static void
3347 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3349 rtx dest_addr;
3350 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3351 rtx dest;
3352 enum insn_code icode;
3353 insn_operand_predicate_fn pred;
3355 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3356 /* If there is push pattern, use it. Otherwise try old way of throwing
3357 MEM representing push operation to move expander. */
3358 icode = push_optab->handlers[(int) mode].insn_code;
3359 if (icode != CODE_FOR_nothing)
3361 if (((pred = insn_data[(int) icode].operand[0].predicate)
3362 && !((*pred) (x, mode))))
3363 x = force_reg (mode, x);
3364 emit_insn (GEN_FCN (icode) (x));
3365 return;
3367 if (GET_MODE_SIZE (mode) == rounded_size)
3368 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3369 /* If we are to pad downward, adjust the stack pointer first and
3370 then store X into the stack location using an offset. This is
3371 because emit_move_insn does not know how to pad; it does not have
3372 access to type. */
3373 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3375 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3376 HOST_WIDE_INT offset;
3378 emit_move_insn (stack_pointer_rtx,
3379 expand_binop (Pmode,
3380 #ifdef STACK_GROWS_DOWNWARD
3381 sub_optab,
3382 #else
3383 add_optab,
3384 #endif
3385 stack_pointer_rtx,
3386 GEN_INT (rounded_size),
3387 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3389 offset = (HOST_WIDE_INT) padding_size;
3390 #ifdef STACK_GROWS_DOWNWARD
3391 if (STACK_PUSH_CODE == POST_DEC)
3392 /* We have already decremented the stack pointer, so get the
3393 previous value. */
3394 offset += (HOST_WIDE_INT) rounded_size;
3395 #else
3396 if (STACK_PUSH_CODE == POST_INC)
3397 /* We have already incremented the stack pointer, so get the
3398 previous value. */
3399 offset -= (HOST_WIDE_INT) rounded_size;
3400 #endif
3401 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3403 else
3405 #ifdef STACK_GROWS_DOWNWARD
3406 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3407 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3408 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3409 #else
3410 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3411 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3412 GEN_INT (rounded_size));
3413 #endif
3414 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3417 dest = gen_rtx_MEM (mode, dest_addr);
3419 if (type != 0)
3421 set_mem_attributes (dest, type, 1);
3423 if (flag_optimize_sibling_calls)
3424 /* Function incoming arguments may overlap with sibling call
3425 outgoing arguments and we cannot allow reordering of reads
3426 from function arguments with stores to outgoing arguments
3427 of sibling calls. */
3428 set_mem_alias_set (dest, 0);
3430 emit_move_insn (dest, x);
3432 #endif
3434 /* Generate code to push X onto the stack, assuming it has mode MODE and
3435 type TYPE.
3436 MODE is redundant except when X is a CONST_INT (since they don't
3437 carry mode info).
3438 SIZE is an rtx for the size of data to be copied (in bytes),
3439 needed only if X is BLKmode.
3441 ALIGN (in bits) is maximum alignment we can assume.
3443 If PARTIAL and REG are both nonzero, then copy that many of the first
3444 bytes of X into registers starting with REG, and push the rest of X.
3445 The amount of space pushed is decreased by PARTIAL bytes.
3446 REG must be a hard register in this case.
3447 If REG is zero but PARTIAL is not, take any all others actions for an
3448 argument partially in registers, but do not actually load any
3449 registers.
3451 EXTRA is the amount in bytes of extra space to leave next to this arg.
3452 This is ignored if an argument block has already been allocated.
3454 On a machine that lacks real push insns, ARGS_ADDR is the address of
3455 the bottom of the argument block for this call. We use indexing off there
3456 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3457 argument block has not been preallocated.
3459 ARGS_SO_FAR is the size of args previously pushed for this call.
3461 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3462 for arguments passed in registers. If nonzero, it will be the number
3463 of bytes required. */
3465 void
3466 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3467 unsigned int align, int partial, rtx reg, int extra,
3468 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3469 rtx alignment_pad)
3471 rtx xinner;
3472 enum direction stack_direction
3473 #ifdef STACK_GROWS_DOWNWARD
3474 = downward;
3475 #else
3476 = upward;
3477 #endif
3479 /* Decide where to pad the argument: `downward' for below,
3480 `upward' for above, or `none' for don't pad it.
3481 Default is below for small data on big-endian machines; else above. */
3482 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3484 /* Invert direction if stack is post-decrement.
3485 FIXME: why? */
3486 if (STACK_PUSH_CODE == POST_DEC)
3487 if (where_pad != none)
3488 where_pad = (where_pad == downward ? upward : downward);
3490 xinner = x;
3492 if (mode == BLKmode)
3494 /* Copy a block into the stack, entirely or partially. */
3496 rtx temp;
3497 int used;
3498 int offset;
3499 int skip;
3501 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3502 used = partial - offset;
3504 gcc_assert (size);
3506 /* USED is now the # of bytes we need not copy to the stack
3507 because registers will take care of them. */
3509 if (partial != 0)
3510 xinner = adjust_address (xinner, BLKmode, used);
3512 /* If the partial register-part of the arg counts in its stack size,
3513 skip the part of stack space corresponding to the registers.
3514 Otherwise, start copying to the beginning of the stack space,
3515 by setting SKIP to 0. */
3516 skip = (reg_parm_stack_space == 0) ? 0 : used;
3518 #ifdef PUSH_ROUNDING
3519 /* Do it with several push insns if that doesn't take lots of insns
3520 and if there is no difficulty with push insns that skip bytes
3521 on the stack for alignment purposes. */
3522 if (args_addr == 0
3523 && PUSH_ARGS
3524 && GET_CODE (size) == CONST_INT
3525 && skip == 0
3526 && MEM_ALIGN (xinner) >= align
3527 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3528 /* Here we avoid the case of a structure whose weak alignment
3529 forces many pushes of a small amount of data,
3530 and such small pushes do rounding that causes trouble. */
3531 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3532 || align >= BIGGEST_ALIGNMENT
3533 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3534 == (align / BITS_PER_UNIT)))
3535 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3537 /* Push padding now if padding above and stack grows down,
3538 or if padding below and stack grows up.
3539 But if space already allocated, this has already been done. */
3540 if (extra && args_addr == 0
3541 && where_pad != none && where_pad != stack_direction)
3542 anti_adjust_stack (GEN_INT (extra));
3544 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3546 else
3547 #endif /* PUSH_ROUNDING */
3549 rtx target;
3551 /* Otherwise make space on the stack and copy the data
3552 to the address of that space. */
3554 /* Deduct words put into registers from the size we must copy. */
3555 if (partial != 0)
3557 if (GET_CODE (size) == CONST_INT)
3558 size = GEN_INT (INTVAL (size) - used);
3559 else
3560 size = expand_binop (GET_MODE (size), sub_optab, size,
3561 GEN_INT (used), NULL_RTX, 0,
3562 OPTAB_LIB_WIDEN);
3565 /* Get the address of the stack space.
3566 In this case, we do not deal with EXTRA separately.
3567 A single stack adjust will do. */
3568 if (! args_addr)
3570 temp = push_block (size, extra, where_pad == downward);
3571 extra = 0;
3573 else if (GET_CODE (args_so_far) == CONST_INT)
3574 temp = memory_address (BLKmode,
3575 plus_constant (args_addr,
3576 skip + INTVAL (args_so_far)));
3577 else
3578 temp = memory_address (BLKmode,
3579 plus_constant (gen_rtx_PLUS (Pmode,
3580 args_addr,
3581 args_so_far),
3582 skip));
3584 if (!ACCUMULATE_OUTGOING_ARGS)
3586 /* If the source is referenced relative to the stack pointer,
3587 copy it to another register to stabilize it. We do not need
3588 to do this if we know that we won't be changing sp. */
3590 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3591 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3592 temp = copy_to_reg (temp);
3595 target = gen_rtx_MEM (BLKmode, temp);
3597 /* We do *not* set_mem_attributes here, because incoming arguments
3598 may overlap with sibling call outgoing arguments and we cannot
3599 allow reordering of reads from function arguments with stores
3600 to outgoing arguments of sibling calls. We do, however, want
3601 to record the alignment of the stack slot. */
3602 /* ALIGN may well be better aligned than TYPE, e.g. due to
3603 PARM_BOUNDARY. Assume the caller isn't lying. */
3604 set_mem_align (target, align);
3606 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3609 else if (partial > 0)
3611 /* Scalar partly in registers. */
3613 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3614 int i;
3615 int not_stack;
3616 /* # bytes of start of argument
3617 that we must make space for but need not store. */
3618 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3619 int args_offset = INTVAL (args_so_far);
3620 int skip;
3622 /* Push padding now if padding above and stack grows down,
3623 or if padding below and stack grows up.
3624 But if space already allocated, this has already been done. */
3625 if (extra && args_addr == 0
3626 && where_pad != none && where_pad != stack_direction)
3627 anti_adjust_stack (GEN_INT (extra));
3629 /* If we make space by pushing it, we might as well push
3630 the real data. Otherwise, we can leave OFFSET nonzero
3631 and leave the space uninitialized. */
3632 if (args_addr == 0)
3633 offset = 0;
3635 /* Now NOT_STACK gets the number of words that we don't need to
3636 allocate on the stack. Convert OFFSET to words too. */
3637 not_stack = (partial - offset) / UNITS_PER_WORD;
3638 offset /= UNITS_PER_WORD;
3640 /* If the partial register-part of the arg counts in its stack size,
3641 skip the part of stack space corresponding to the registers.
3642 Otherwise, start copying to the beginning of the stack space,
3643 by setting SKIP to 0. */
3644 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3646 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3647 x = validize_mem (force_const_mem (mode, x));
3649 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3650 SUBREGs of such registers are not allowed. */
3651 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3652 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3653 x = copy_to_reg (x);
3655 /* Loop over all the words allocated on the stack for this arg. */
3656 /* We can do it by words, because any scalar bigger than a word
3657 has a size a multiple of a word. */
3658 #ifndef PUSH_ARGS_REVERSED
3659 for (i = not_stack; i < size; i++)
3660 #else
3661 for (i = size - 1; i >= not_stack; i--)
3662 #endif
3663 if (i >= not_stack + offset)
3664 emit_push_insn (operand_subword_force (x, i, mode),
3665 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3666 0, args_addr,
3667 GEN_INT (args_offset + ((i - not_stack + skip)
3668 * UNITS_PER_WORD)),
3669 reg_parm_stack_space, alignment_pad);
3671 else
3673 rtx addr;
3674 rtx dest;
3676 /* Push padding now if padding above and stack grows down,
3677 or if padding below and stack grows up.
3678 But if space already allocated, this has already been done. */
3679 if (extra && args_addr == 0
3680 && where_pad != none && where_pad != stack_direction)
3681 anti_adjust_stack (GEN_INT (extra));
3683 #ifdef PUSH_ROUNDING
3684 if (args_addr == 0 && PUSH_ARGS)
3685 emit_single_push_insn (mode, x, type);
3686 else
3687 #endif
3689 if (GET_CODE (args_so_far) == CONST_INT)
3690 addr
3691 = memory_address (mode,
3692 plus_constant (args_addr,
3693 INTVAL (args_so_far)));
3694 else
3695 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3696 args_so_far));
3697 dest = gen_rtx_MEM (mode, addr);
3699 /* We do *not* set_mem_attributes here, because incoming arguments
3700 may overlap with sibling call outgoing arguments and we cannot
3701 allow reordering of reads from function arguments with stores
3702 to outgoing arguments of sibling calls. We do, however, want
3703 to record the alignment of the stack slot. */
3704 /* ALIGN may well be better aligned than TYPE, e.g. due to
3705 PARM_BOUNDARY. Assume the caller isn't lying. */
3706 set_mem_align (dest, align);
3708 emit_move_insn (dest, x);
3712 /* If part should go in registers, copy that part
3713 into the appropriate registers. Do this now, at the end,
3714 since mem-to-mem copies above may do function calls. */
3715 if (partial > 0 && reg != 0)
3717 /* Handle calls that pass values in multiple non-contiguous locations.
3718 The Irix 6 ABI has examples of this. */
3719 if (GET_CODE (reg) == PARALLEL)
3720 emit_group_load (reg, x, type, -1);
3721 else
3723 gcc_assert (partial % UNITS_PER_WORD == 0);
3724 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3728 if (extra && args_addr == 0 && where_pad == stack_direction)
3729 anti_adjust_stack (GEN_INT (extra));
3731 if (alignment_pad && args_addr == 0)
3732 anti_adjust_stack (alignment_pad);
3735 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3736 operations. */
3738 static rtx
3739 get_subtarget (rtx x)
3741 return (optimize
3742 || x == 0
3743 /* Only registers can be subtargets. */
3744 || !REG_P (x)
3745 /* Don't use hard regs to avoid extending their life. */
3746 || REGNO (x) < FIRST_PSEUDO_REGISTER
3747 ? 0 : x);
3750 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3751 FIELD is a bitfield. Returns true if the optimization was successful,
3752 and there's nothing else to do. */
3754 static bool
3755 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3756 unsigned HOST_WIDE_INT bitpos,
3757 enum machine_mode mode1, rtx str_rtx,
3758 tree to, tree src)
3760 enum machine_mode str_mode = GET_MODE (str_rtx);
3761 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3762 tree op0, op1;
3763 rtx value, result;
3764 optab binop;
3766 if (mode1 != VOIDmode
3767 || bitsize >= BITS_PER_WORD
3768 || str_bitsize > BITS_PER_WORD
3769 || TREE_SIDE_EFFECTS (to)
3770 || TREE_THIS_VOLATILE (to))
3771 return false;
3773 STRIP_NOPS (src);
3774 if (!BINARY_CLASS_P (src)
3775 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3776 return false;
3778 op0 = TREE_OPERAND (src, 0);
3779 op1 = TREE_OPERAND (src, 1);
3780 STRIP_NOPS (op0);
3782 if (!operand_equal_p (to, op0, 0))
3783 return false;
3785 if (MEM_P (str_rtx))
3787 unsigned HOST_WIDE_INT offset1;
3789 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3790 str_mode = word_mode;
3791 str_mode = get_best_mode (bitsize, bitpos,
3792 MEM_ALIGN (str_rtx), str_mode, 0);
3793 if (str_mode == VOIDmode)
3794 return false;
3795 str_bitsize = GET_MODE_BITSIZE (str_mode);
3797 offset1 = bitpos;
3798 bitpos %= str_bitsize;
3799 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3800 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3802 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3803 return false;
3805 /* If the bit field covers the whole REG/MEM, store_field
3806 will likely generate better code. */
3807 if (bitsize >= str_bitsize)
3808 return false;
3810 /* We can't handle fields split across multiple entities. */
3811 if (bitpos + bitsize > str_bitsize)
3812 return false;
3814 if (BYTES_BIG_ENDIAN)
3815 bitpos = str_bitsize - bitpos - bitsize;
3817 switch (TREE_CODE (src))
3819 case PLUS_EXPR:
3820 case MINUS_EXPR:
3821 /* For now, just optimize the case of the topmost bitfield
3822 where we don't need to do any masking and also
3823 1 bit bitfields where xor can be used.
3824 We might win by one instruction for the other bitfields
3825 too if insv/extv instructions aren't used, so that
3826 can be added later. */
3827 if (bitpos + bitsize != str_bitsize
3828 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3829 break;
3831 value = expand_expr (op1, NULL_RTX, str_mode, 0);
3832 value = convert_modes (str_mode,
3833 TYPE_MODE (TREE_TYPE (op1)), value,
3834 TYPE_UNSIGNED (TREE_TYPE (op1)));
3836 /* We may be accessing data outside the field, which means
3837 we can alias adjacent data. */
3838 if (MEM_P (str_rtx))
3840 str_rtx = shallow_copy_rtx (str_rtx);
3841 set_mem_alias_set (str_rtx, 0);
3842 set_mem_expr (str_rtx, 0);
3845 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3846 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
3848 value = expand_and (str_mode, value, const1_rtx, NULL);
3849 binop = xor_optab;
3851 value = expand_shift (LSHIFT_EXPR, str_mode, value,
3852 build_int_cst (NULL_TREE, bitpos),
3853 NULL_RTX, 1);
3854 result = expand_binop (str_mode, binop, str_rtx,
3855 value, str_rtx, 1, OPTAB_WIDEN);
3856 if (result != str_rtx)
3857 emit_move_insn (str_rtx, result);
3858 return true;
3860 case BIT_IOR_EXPR:
3861 case BIT_XOR_EXPR:
3862 if (TREE_CODE (op1) != INTEGER_CST)
3863 break;
3864 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3865 value = convert_modes (GET_MODE (str_rtx),
3866 TYPE_MODE (TREE_TYPE (op1)), value,
3867 TYPE_UNSIGNED (TREE_TYPE (op1)));
3869 /* We may be accessing data outside the field, which means
3870 we can alias adjacent data. */
3871 if (MEM_P (str_rtx))
3873 str_rtx = shallow_copy_rtx (str_rtx);
3874 set_mem_alias_set (str_rtx, 0);
3875 set_mem_expr (str_rtx, 0);
3878 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
3879 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3881 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
3882 - 1);
3883 value = expand_and (GET_MODE (str_rtx), value, mask,
3884 NULL_RTX);
3886 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
3887 build_int_cst (NULL_TREE, bitpos),
3888 NULL_RTX, 1);
3889 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
3890 value, str_rtx, 1, OPTAB_WIDEN);
3891 if (result != str_rtx)
3892 emit_move_insn (str_rtx, result);
3893 return true;
3895 default:
3896 break;
3899 return false;
3903 /* Expand an assignment that stores the value of FROM into TO. */
3905 void
3906 expand_assignment (tree to, tree from)
3908 rtx to_rtx = 0;
3909 rtx result;
3911 /* Don't crash if the lhs of the assignment was erroneous. */
3913 if (TREE_CODE (to) == ERROR_MARK)
3915 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3916 return;
3919 /* Assignment of a structure component needs special treatment
3920 if the structure component's rtx is not simply a MEM.
3921 Assignment of an array element at a constant index, and assignment of
3922 an array element in an unaligned packed structure field, has the same
3923 problem. */
3924 if (handled_component_p (to)
3925 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3927 enum machine_mode mode1;
3928 HOST_WIDE_INT bitsize, bitpos;
3929 tree offset;
3930 int unsignedp;
3931 int volatilep = 0;
3932 tree tem;
3934 push_temp_slots ();
3935 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3936 &unsignedp, &volatilep, true);
3938 /* If we are going to use store_bit_field and extract_bit_field,
3939 make sure to_rtx will be safe for multiple use. */
3941 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3943 if (offset != 0)
3945 rtx offset_rtx;
3947 if (!MEM_P (to_rtx))
3949 /* We can get constant negative offsets into arrays with broken
3950 user code. Translate this to a trap instead of ICEing. */
3951 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
3952 expand_builtin_trap ();
3953 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
3956 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3957 #ifdef POINTERS_EXTEND_UNSIGNED
3958 if (GET_MODE (offset_rtx) != Pmode)
3959 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3960 #else
3961 if (GET_MODE (offset_rtx) != ptr_mode)
3962 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3963 #endif
3965 /* A constant address in TO_RTX can have VOIDmode, we must not try
3966 to call force_reg for that case. Avoid that case. */
3967 if (MEM_P (to_rtx)
3968 && GET_MODE (to_rtx) == BLKmode
3969 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3970 && bitsize > 0
3971 && (bitpos % bitsize) == 0
3972 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3973 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3975 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3976 bitpos = 0;
3979 to_rtx = offset_address (to_rtx, offset_rtx,
3980 highest_pow2_factor_for_target (to,
3981 offset));
3984 /* Handle expand_expr of a complex value returning a CONCAT. */
3985 if (GET_CODE (to_rtx) == CONCAT)
3987 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
3989 gcc_assert (bitpos == 0);
3990 result = store_expr (from, to_rtx, false);
3992 else
3994 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
3995 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
3998 else
4000 if (MEM_P (to_rtx))
4002 /* If the field is at offset zero, we could have been given the
4003 DECL_RTX of the parent struct. Don't munge it. */
4004 to_rtx = shallow_copy_rtx (to_rtx);
4006 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4008 /* Deal with volatile and readonly fields. The former is only
4009 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4010 if (volatilep)
4011 MEM_VOLATILE_P (to_rtx) = 1;
4012 if (component_uses_parent_alias_set (to))
4013 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4016 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4017 to_rtx, to, from))
4018 result = NULL;
4019 else
4020 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4021 TREE_TYPE (tem), get_alias_set (to));
4024 if (result)
4025 preserve_temp_slots (result);
4026 free_temp_slots ();
4027 pop_temp_slots ();
4028 return;
4031 /* If the rhs is a function call and its value is not an aggregate,
4032 call the function before we start to compute the lhs.
4033 This is needed for correct code for cases such as
4034 val = setjmp (buf) on machines where reference to val
4035 requires loading up part of an address in a separate insn.
4037 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4038 since it might be a promoted variable where the zero- or sign- extension
4039 needs to be done. Handling this in the normal way is safe because no
4040 computation is done before the call. */
4041 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4042 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4043 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4044 && REG_P (DECL_RTL (to))))
4046 rtx value;
4048 push_temp_slots ();
4049 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4050 if (to_rtx == 0)
4051 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4053 /* Handle calls that return values in multiple non-contiguous locations.
4054 The Irix 6 ABI has examples of this. */
4055 if (GET_CODE (to_rtx) == PARALLEL)
4056 emit_group_load (to_rtx, value, TREE_TYPE (from),
4057 int_size_in_bytes (TREE_TYPE (from)));
4058 else if (GET_MODE (to_rtx) == BLKmode)
4059 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4060 else
4062 if (POINTER_TYPE_P (TREE_TYPE (to)))
4063 value = convert_memory_address (GET_MODE (to_rtx), value);
4064 emit_move_insn (to_rtx, value);
4066 preserve_temp_slots (to_rtx);
4067 free_temp_slots ();
4068 pop_temp_slots ();
4069 return;
4072 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4073 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4075 if (to_rtx == 0)
4076 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4078 /* Don't move directly into a return register. */
4079 if (TREE_CODE (to) == RESULT_DECL
4080 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4082 rtx temp;
4084 push_temp_slots ();
4085 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4087 if (GET_CODE (to_rtx) == PARALLEL)
4088 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4089 int_size_in_bytes (TREE_TYPE (from)));
4090 else
4091 emit_move_insn (to_rtx, temp);
4093 preserve_temp_slots (to_rtx);
4094 free_temp_slots ();
4095 pop_temp_slots ();
4096 return;
4099 /* In case we are returning the contents of an object which overlaps
4100 the place the value is being stored, use a safe function when copying
4101 a value through a pointer into a structure value return block. */
4102 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4103 && current_function_returns_struct
4104 && !current_function_returns_pcc_struct)
4106 rtx from_rtx, size;
4108 push_temp_slots ();
4109 size = expr_size (from);
4110 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4112 emit_library_call (memmove_libfunc, LCT_NORMAL,
4113 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4114 XEXP (from_rtx, 0), Pmode,
4115 convert_to_mode (TYPE_MODE (sizetype),
4116 size, TYPE_UNSIGNED (sizetype)),
4117 TYPE_MODE (sizetype));
4119 preserve_temp_slots (to_rtx);
4120 free_temp_slots ();
4121 pop_temp_slots ();
4122 return;
4125 /* Compute FROM and store the value in the rtx we got. */
4127 push_temp_slots ();
4128 result = store_expr (from, to_rtx, 0);
4129 preserve_temp_slots (result);
4130 free_temp_slots ();
4131 pop_temp_slots ();
4132 return;
4135 /* Generate code for computing expression EXP,
4136 and storing the value into TARGET.
4138 If the mode is BLKmode then we may return TARGET itself.
4139 It turns out that in BLKmode it doesn't cause a problem.
4140 because C has no operators that could combine two different
4141 assignments into the same BLKmode object with different values
4142 with no sequence point. Will other languages need this to
4143 be more thorough?
4145 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4146 stack, and block moves may need to be treated specially. */
4149 store_expr (tree exp, rtx target, int call_param_p)
4151 rtx temp;
4152 rtx alt_rtl = NULL_RTX;
4153 int dont_return_target = 0;
4155 if (VOID_TYPE_P (TREE_TYPE (exp)))
4157 /* C++ can generate ?: expressions with a throw expression in one
4158 branch and an rvalue in the other. Here, we resolve attempts to
4159 store the throw expression's nonexistent result. */
4160 gcc_assert (!call_param_p);
4161 expand_expr (exp, const0_rtx, VOIDmode, 0);
4162 return NULL_RTX;
4164 if (TREE_CODE (exp) == COMPOUND_EXPR)
4166 /* Perform first part of compound expression, then assign from second
4167 part. */
4168 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4169 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4170 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4172 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4174 /* For conditional expression, get safe form of the target. Then
4175 test the condition, doing the appropriate assignment on either
4176 side. This avoids the creation of unnecessary temporaries.
4177 For non-BLKmode, it is more efficient not to do this. */
4179 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4181 do_pending_stack_adjust ();
4182 NO_DEFER_POP;
4183 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4184 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4185 emit_jump_insn (gen_jump (lab2));
4186 emit_barrier ();
4187 emit_label (lab1);
4188 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
4189 emit_label (lab2);
4190 OK_DEFER_POP;
4192 return NULL_RTX;
4194 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4195 /* If this is a scalar in a register that is stored in a wider mode
4196 than the declared mode, compute the result into its declared mode
4197 and then convert to the wider mode. Our value is the computed
4198 expression. */
4200 rtx inner_target = 0;
4202 /* We can do the conversion inside EXP, which will often result
4203 in some optimizations. Do the conversion in two steps: first
4204 change the signedness, if needed, then the extend. But don't
4205 do this if the type of EXP is a subtype of something else
4206 since then the conversion might involve more than just
4207 converting modes. */
4208 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4209 && TREE_TYPE (TREE_TYPE (exp)) == 0
4210 && (!lang_hooks.reduce_bit_field_operations
4211 || (GET_MODE_PRECISION (GET_MODE (target))
4212 == TYPE_PRECISION (TREE_TYPE (exp)))))
4214 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4215 != SUBREG_PROMOTED_UNSIGNED_P (target))
4216 exp = convert
4217 (lang_hooks.types.signed_or_unsigned_type
4218 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4220 exp = convert (lang_hooks.types.type_for_mode
4221 (GET_MODE (SUBREG_REG (target)),
4222 SUBREG_PROMOTED_UNSIGNED_P (target)),
4223 exp);
4225 inner_target = SUBREG_REG (target);
4228 temp = expand_expr (exp, inner_target, VOIDmode,
4229 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4231 /* If TEMP is a VOIDmode constant, use convert_modes to make
4232 sure that we properly convert it. */
4233 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4235 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4236 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4237 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4238 GET_MODE (target), temp,
4239 SUBREG_PROMOTED_UNSIGNED_P (target));
4242 convert_move (SUBREG_REG (target), temp,
4243 SUBREG_PROMOTED_UNSIGNED_P (target));
4245 return NULL_RTX;
4247 else
4249 temp = expand_expr_real (exp, target, GET_MODE (target),
4250 (call_param_p
4251 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4252 &alt_rtl);
4253 /* Return TARGET if it's a specified hardware register.
4254 If TARGET is a volatile mem ref, either return TARGET
4255 or return a reg copied *from* TARGET; ANSI requires this.
4257 Otherwise, if TEMP is not TARGET, return TEMP
4258 if it is constant (for efficiency),
4259 or if we really want the correct value. */
4260 if (!(target && REG_P (target)
4261 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4262 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4263 && ! rtx_equal_p (temp, target)
4264 && CONSTANT_P (temp))
4265 dont_return_target = 1;
4268 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4269 the same as that of TARGET, adjust the constant. This is needed, for
4270 example, in case it is a CONST_DOUBLE and we want only a word-sized
4271 value. */
4272 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4273 && TREE_CODE (exp) != ERROR_MARK
4274 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4275 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4276 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4278 /* If value was not generated in the target, store it there.
4279 Convert the value to TARGET's type first if necessary and emit the
4280 pending incrementations that have been queued when expanding EXP.
4281 Note that we cannot emit the whole queue blindly because this will
4282 effectively disable the POST_INC optimization later.
4284 If TEMP and TARGET compare equal according to rtx_equal_p, but
4285 one or both of them are volatile memory refs, we have to distinguish
4286 two cases:
4287 - expand_expr has used TARGET. In this case, we must not generate
4288 another copy. This can be detected by TARGET being equal according
4289 to == .
4290 - expand_expr has not used TARGET - that means that the source just
4291 happens to have the same RTX form. Since temp will have been created
4292 by expand_expr, it will compare unequal according to == .
4293 We must generate a copy in this case, to reach the correct number
4294 of volatile memory references. */
4296 if ((! rtx_equal_p (temp, target)
4297 || (temp != target && (side_effects_p (temp)
4298 || side_effects_p (target))))
4299 && TREE_CODE (exp) != ERROR_MARK
4300 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4301 but TARGET is not valid memory reference, TEMP will differ
4302 from TARGET although it is really the same location. */
4303 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4304 /* If there's nothing to copy, don't bother. Don't call
4305 expr_size unless necessary, because some front-ends (C++)
4306 expr_size-hook must not be given objects that are not
4307 supposed to be bit-copied or bit-initialized. */
4308 && expr_size (exp) != const0_rtx)
4310 if (GET_MODE (temp) != GET_MODE (target)
4311 && GET_MODE (temp) != VOIDmode)
4313 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4314 if (dont_return_target)
4316 /* In this case, we will return TEMP,
4317 so make sure it has the proper mode.
4318 But don't forget to store the value into TARGET. */
4319 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4320 emit_move_insn (target, temp);
4322 else
4323 convert_move (target, temp, unsignedp);
4326 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4328 /* Handle copying a string constant into an array. The string
4329 constant may be shorter than the array. So copy just the string's
4330 actual length, and clear the rest. First get the size of the data
4331 type of the string, which is actually the size of the target. */
4332 rtx size = expr_size (exp);
4334 if (GET_CODE (size) == CONST_INT
4335 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4336 emit_block_move (target, temp, size,
4337 (call_param_p
4338 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4339 else
4341 /* Compute the size of the data to copy from the string. */
4342 tree copy_size
4343 = size_binop (MIN_EXPR,
4344 make_tree (sizetype, size),
4345 size_int (TREE_STRING_LENGTH (exp)));
4346 rtx copy_size_rtx
4347 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4348 (call_param_p
4349 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4350 rtx label = 0;
4352 /* Copy that much. */
4353 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4354 TYPE_UNSIGNED (sizetype));
4355 emit_block_move (target, temp, copy_size_rtx,
4356 (call_param_p
4357 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4359 /* Figure out how much is left in TARGET that we have to clear.
4360 Do all calculations in ptr_mode. */
4361 if (GET_CODE (copy_size_rtx) == CONST_INT)
4363 size = plus_constant (size, -INTVAL (copy_size_rtx));
4364 target = adjust_address (target, BLKmode,
4365 INTVAL (copy_size_rtx));
4367 else
4369 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4370 copy_size_rtx, NULL_RTX, 0,
4371 OPTAB_LIB_WIDEN);
4373 #ifdef POINTERS_EXTEND_UNSIGNED
4374 if (GET_MODE (copy_size_rtx) != Pmode)
4375 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4376 TYPE_UNSIGNED (sizetype));
4377 #endif
4379 target = offset_address (target, copy_size_rtx,
4380 highest_pow2_factor (copy_size));
4381 label = gen_label_rtx ();
4382 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4383 GET_MODE (size), 0, label);
4386 if (size != const0_rtx)
4387 clear_storage (target, size, BLOCK_OP_NORMAL);
4389 if (label)
4390 emit_label (label);
4393 /* Handle calls that return values in multiple non-contiguous locations.
4394 The Irix 6 ABI has examples of this. */
4395 else if (GET_CODE (target) == PARALLEL)
4396 emit_group_load (target, temp, TREE_TYPE (exp),
4397 int_size_in_bytes (TREE_TYPE (exp)));
4398 else if (GET_MODE (temp) == BLKmode)
4399 emit_block_move (target, temp, expr_size (exp),
4400 (call_param_p
4401 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4402 else
4404 temp = force_operand (temp, target);
4405 if (temp != target)
4406 emit_move_insn (target, temp);
4410 return NULL_RTX;
4413 /* Examine CTOR to discover:
4414 * how many scalar fields are set to nonzero values,
4415 and place it in *P_NZ_ELTS;
4416 * how many scalar fields are set to non-constant values,
4417 and place it in *P_NC_ELTS; and
4418 * how many scalar fields in total are in CTOR,
4419 and place it in *P_ELT_COUNT.
4420 * if a type is a union, and the initializer from the constructor
4421 is not the largest element in the union, then set *p_must_clear. */
4423 static void
4424 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4425 HOST_WIDE_INT *p_nc_elts,
4426 HOST_WIDE_INT *p_elt_count,
4427 bool *p_must_clear)
4429 unsigned HOST_WIDE_INT idx;
4430 HOST_WIDE_INT nz_elts, nc_elts, elt_count;
4431 tree value, purpose;
4433 nz_elts = 0;
4434 nc_elts = 0;
4435 elt_count = 0;
4437 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4439 HOST_WIDE_INT mult;
4441 mult = 1;
4442 if (TREE_CODE (purpose) == RANGE_EXPR)
4444 tree lo_index = TREE_OPERAND (purpose, 0);
4445 tree hi_index = TREE_OPERAND (purpose, 1);
4447 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4448 mult = (tree_low_cst (hi_index, 1)
4449 - tree_low_cst (lo_index, 1) + 1);
4452 switch (TREE_CODE (value))
4454 case CONSTRUCTOR:
4456 HOST_WIDE_INT nz = 0, nc = 0, ic = 0;
4457 categorize_ctor_elements_1 (value, &nz, &nc, &ic, p_must_clear);
4458 nz_elts += mult * nz;
4459 nc_elts += mult * nc;
4460 elt_count += mult * ic;
4462 break;
4464 case INTEGER_CST:
4465 case REAL_CST:
4466 if (!initializer_zerop (value))
4467 nz_elts += mult;
4468 elt_count += mult;
4469 break;
4471 case STRING_CST:
4472 nz_elts += mult * TREE_STRING_LENGTH (value);
4473 elt_count += mult * TREE_STRING_LENGTH (value);
4474 break;
4476 case COMPLEX_CST:
4477 if (!initializer_zerop (TREE_REALPART (value)))
4478 nz_elts += mult;
4479 if (!initializer_zerop (TREE_IMAGPART (value)))
4480 nz_elts += mult;
4481 elt_count += mult;
4482 break;
4484 case VECTOR_CST:
4486 tree v;
4487 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4489 if (!initializer_zerop (TREE_VALUE (v)))
4490 nz_elts += mult;
4491 elt_count += mult;
4494 break;
4496 default:
4497 nz_elts += mult;
4498 elt_count += mult;
4499 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4500 nc_elts += mult;
4501 break;
4505 if (!*p_must_clear
4506 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4507 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4509 tree init_sub_type;
4510 bool clear_this = true;
4512 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4514 /* We don't expect more than one element of the union to be
4515 initialized. Not sure what we should do otherwise... */
4516 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4517 == 1);
4519 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4520 CONSTRUCTOR_ELTS (ctor),
4521 0)->value);
4523 /* ??? We could look at each element of the union, and find the
4524 largest element. Which would avoid comparing the size of the
4525 initialized element against any tail padding in the union.
4526 Doesn't seem worth the effort... */
4527 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4528 TYPE_SIZE (init_sub_type)) == 1)
4530 /* And now we have to find out if the element itself is fully
4531 constructed. E.g. for union { struct { int a, b; } s; } u
4532 = { .s = { .a = 1 } }. */
4533 if (elt_count == count_type_elements (init_sub_type, false))
4534 clear_this = false;
4538 *p_must_clear = clear_this;
4541 *p_nz_elts += nz_elts;
4542 *p_nc_elts += nc_elts;
4543 *p_elt_count += elt_count;
4546 void
4547 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4548 HOST_WIDE_INT *p_nc_elts,
4549 HOST_WIDE_INT *p_elt_count,
4550 bool *p_must_clear)
4552 *p_nz_elts = 0;
4553 *p_nc_elts = 0;
4554 *p_elt_count = 0;
4555 *p_must_clear = false;
4556 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts, p_elt_count,
4557 p_must_clear);
4560 /* Count the number of scalars in TYPE. Return -1 on overflow or
4561 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4562 array member at the end of the structure. */
4564 HOST_WIDE_INT
4565 count_type_elements (tree type, bool allow_flexarr)
4567 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4568 switch (TREE_CODE (type))
4570 case ARRAY_TYPE:
4572 tree telts = array_type_nelts (type);
4573 if (telts && host_integerp (telts, 1))
4575 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4576 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4577 if (n == 0)
4578 return 0;
4579 else if (max / n > m)
4580 return n * m;
4582 return -1;
4585 case RECORD_TYPE:
4587 HOST_WIDE_INT n = 0, t;
4588 tree f;
4590 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4591 if (TREE_CODE (f) == FIELD_DECL)
4593 t = count_type_elements (TREE_TYPE (f), false);
4594 if (t < 0)
4596 /* Check for structures with flexible array member. */
4597 tree tf = TREE_TYPE (f);
4598 if (allow_flexarr
4599 && TREE_CHAIN (f) == NULL
4600 && TREE_CODE (tf) == ARRAY_TYPE
4601 && TYPE_DOMAIN (tf)
4602 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4603 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4604 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4605 && int_size_in_bytes (type) >= 0)
4606 break;
4608 return -1;
4610 n += t;
4613 return n;
4616 case UNION_TYPE:
4617 case QUAL_UNION_TYPE:
4619 /* Ho hum. How in the world do we guess here? Clearly it isn't
4620 right to count the fields. Guess based on the number of words. */
4621 HOST_WIDE_INT n = int_size_in_bytes (type);
4622 if (n < 0)
4623 return -1;
4624 return n / UNITS_PER_WORD;
4627 case COMPLEX_TYPE:
4628 return 2;
4630 case VECTOR_TYPE:
4631 return TYPE_VECTOR_SUBPARTS (type);
4633 case INTEGER_TYPE:
4634 case REAL_TYPE:
4635 case ENUMERAL_TYPE:
4636 case BOOLEAN_TYPE:
4637 case CHAR_TYPE:
4638 case POINTER_TYPE:
4639 case OFFSET_TYPE:
4640 case REFERENCE_TYPE:
4641 return 1;
4643 case VOID_TYPE:
4644 case METHOD_TYPE:
4645 case FUNCTION_TYPE:
4646 case LANG_TYPE:
4647 default:
4648 gcc_unreachable ();
4652 /* Return 1 if EXP contains mostly (3/4) zeros. */
4654 static int
4655 mostly_zeros_p (tree exp)
4657 if (TREE_CODE (exp) == CONSTRUCTOR)
4660 HOST_WIDE_INT nz_elts, nc_elts, count, elts;
4661 bool must_clear;
4663 categorize_ctor_elements (exp, &nz_elts, &nc_elts, &count, &must_clear);
4664 if (must_clear)
4665 return 1;
4667 elts = count_type_elements (TREE_TYPE (exp), false);
4669 return nz_elts < elts / 4;
4672 return initializer_zerop (exp);
4675 /* Return 1 if EXP contains all zeros. */
4677 static int
4678 all_zeros_p (tree exp)
4680 if (TREE_CODE (exp) == CONSTRUCTOR)
4683 HOST_WIDE_INT nz_elts, nc_elts, count;
4684 bool must_clear;
4686 categorize_ctor_elements (exp, &nz_elts, &nc_elts, &count, &must_clear);
4687 return nz_elts == 0;
4690 return initializer_zerop (exp);
4693 /* Helper function for store_constructor.
4694 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4695 TYPE is the type of the CONSTRUCTOR, not the element type.
4696 CLEARED is as for store_constructor.
4697 ALIAS_SET is the alias set to use for any stores.
4699 This provides a recursive shortcut back to store_constructor when it isn't
4700 necessary to go through store_field. This is so that we can pass through
4701 the cleared field to let store_constructor know that we may not have to
4702 clear a substructure if the outer structure has already been cleared. */
4704 static void
4705 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4706 HOST_WIDE_INT bitpos, enum machine_mode mode,
4707 tree exp, tree type, int cleared, int alias_set)
4709 if (TREE_CODE (exp) == CONSTRUCTOR
4710 /* We can only call store_constructor recursively if the size and
4711 bit position are on a byte boundary. */
4712 && bitpos % BITS_PER_UNIT == 0
4713 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4714 /* If we have a nonzero bitpos for a register target, then we just
4715 let store_field do the bitfield handling. This is unlikely to
4716 generate unnecessary clear instructions anyways. */
4717 && (bitpos == 0 || MEM_P (target)))
4719 if (MEM_P (target))
4720 target
4721 = adjust_address (target,
4722 GET_MODE (target) == BLKmode
4723 || 0 != (bitpos
4724 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4725 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4728 /* Update the alias set, if required. */
4729 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4730 && MEM_ALIAS_SET (target) != 0)
4732 target = copy_rtx (target);
4733 set_mem_alias_set (target, alias_set);
4736 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4738 else
4739 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4742 /* Store the value of constructor EXP into the rtx TARGET.
4743 TARGET is either a REG or a MEM; we know it cannot conflict, since
4744 safe_from_p has been called.
4745 CLEARED is true if TARGET is known to have been zero'd.
4746 SIZE is the number of bytes of TARGET we are allowed to modify: this
4747 may not be the same as the size of EXP if we are assigning to a field
4748 which has been packed to exclude padding bits. */
4750 static void
4751 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4753 tree type = TREE_TYPE (exp);
4754 #ifdef WORD_REGISTER_OPERATIONS
4755 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4756 #endif
4758 switch (TREE_CODE (type))
4760 case RECORD_TYPE:
4761 case UNION_TYPE:
4762 case QUAL_UNION_TYPE:
4764 unsigned HOST_WIDE_INT idx;
4765 tree field, value;
4767 /* If size is zero or the target is already cleared, do nothing. */
4768 if (size == 0 || cleared)
4769 cleared = 1;
4770 /* We either clear the aggregate or indicate the value is dead. */
4771 else if ((TREE_CODE (type) == UNION_TYPE
4772 || TREE_CODE (type) == QUAL_UNION_TYPE)
4773 && ! CONSTRUCTOR_ELTS (exp))
4774 /* If the constructor is empty, clear the union. */
4776 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
4777 cleared = 1;
4780 /* If we are building a static constructor into a register,
4781 set the initial value as zero so we can fold the value into
4782 a constant. But if more than one register is involved,
4783 this probably loses. */
4784 else if (REG_P (target) && TREE_STATIC (exp)
4785 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4787 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4788 cleared = 1;
4791 /* If the constructor has fewer fields than the structure or
4792 if we are initializing the structure to mostly zeros, clear
4793 the whole structure first. Don't do this if TARGET is a
4794 register whose mode size isn't equal to SIZE since
4795 clear_storage can't handle this case. */
4796 else if (size > 0
4797 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
4798 != fields_length (type))
4799 || mostly_zeros_p (exp))
4800 && (!REG_P (target)
4801 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4802 == size)))
4804 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
4805 cleared = 1;
4808 if (! cleared)
4809 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4811 /* Store each element of the constructor into the
4812 corresponding field of TARGET. */
4813 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
4815 enum machine_mode mode;
4816 HOST_WIDE_INT bitsize;
4817 HOST_WIDE_INT bitpos = 0;
4818 tree offset;
4819 rtx to_rtx = target;
4821 /* Just ignore missing fields. We cleared the whole
4822 structure, above, if any fields are missing. */
4823 if (field == 0)
4824 continue;
4826 if (cleared && initializer_zerop (value))
4827 continue;
4829 if (host_integerp (DECL_SIZE (field), 1))
4830 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4831 else
4832 bitsize = -1;
4834 mode = DECL_MODE (field);
4835 if (DECL_BIT_FIELD (field))
4836 mode = VOIDmode;
4838 offset = DECL_FIELD_OFFSET (field);
4839 if (host_integerp (offset, 0)
4840 && host_integerp (bit_position (field), 0))
4842 bitpos = int_bit_position (field);
4843 offset = 0;
4845 else
4846 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4848 if (offset)
4850 rtx offset_rtx;
4852 offset
4853 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4854 make_tree (TREE_TYPE (exp),
4855 target));
4857 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4858 gcc_assert (MEM_P (to_rtx));
4860 #ifdef POINTERS_EXTEND_UNSIGNED
4861 if (GET_MODE (offset_rtx) != Pmode)
4862 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4863 #else
4864 if (GET_MODE (offset_rtx) != ptr_mode)
4865 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4866 #endif
4868 to_rtx = offset_address (to_rtx, offset_rtx,
4869 highest_pow2_factor (offset));
4872 #ifdef WORD_REGISTER_OPERATIONS
4873 /* If this initializes a field that is smaller than a
4874 word, at the start of a word, try to widen it to a full
4875 word. This special case allows us to output C++ member
4876 function initializations in a form that the optimizers
4877 can understand. */
4878 if (REG_P (target)
4879 && bitsize < BITS_PER_WORD
4880 && bitpos % BITS_PER_WORD == 0
4881 && GET_MODE_CLASS (mode) == MODE_INT
4882 && TREE_CODE (value) == INTEGER_CST
4883 && exp_size >= 0
4884 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4886 tree type = TREE_TYPE (value);
4888 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4890 type = lang_hooks.types.type_for_size
4891 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4892 value = convert (type, value);
4895 if (BYTES_BIG_ENDIAN)
4896 value
4897 = fold_build2 (LSHIFT_EXPR, type, value,
4898 build_int_cst (NULL_TREE,
4899 BITS_PER_WORD - bitsize));
4900 bitsize = BITS_PER_WORD;
4901 mode = word_mode;
4903 #endif
4905 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4906 && DECL_NONADDRESSABLE_P (field))
4908 to_rtx = copy_rtx (to_rtx);
4909 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4912 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4913 value, type, cleared,
4914 get_alias_set (TREE_TYPE (field)));
4916 break;
4918 case ARRAY_TYPE:
4920 tree value, index;
4921 unsigned HOST_WIDE_INT i;
4922 int need_to_clear;
4923 tree domain;
4924 tree elttype = TREE_TYPE (type);
4925 int const_bounds_p;
4926 HOST_WIDE_INT minelt = 0;
4927 HOST_WIDE_INT maxelt = 0;
4929 domain = TYPE_DOMAIN (type);
4930 const_bounds_p = (TYPE_MIN_VALUE (domain)
4931 && TYPE_MAX_VALUE (domain)
4932 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4933 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4935 /* If we have constant bounds for the range of the type, get them. */
4936 if (const_bounds_p)
4938 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4939 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4942 /* If the constructor has fewer elements than the array, clear
4943 the whole array first. Similarly if this is static
4944 constructor of a non-BLKmode object. */
4945 if (cleared)
4946 need_to_clear = 0;
4947 else if (REG_P (target) && TREE_STATIC (exp))
4948 need_to_clear = 1;
4949 else
4951 unsigned HOST_WIDE_INT idx;
4952 tree index, value;
4953 HOST_WIDE_INT count = 0, zero_count = 0;
4954 need_to_clear = ! const_bounds_p;
4956 /* This loop is a more accurate version of the loop in
4957 mostly_zeros_p (it handles RANGE_EXPR in an index). It
4958 is also needed to check for missing elements. */
4959 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
4961 HOST_WIDE_INT this_node_count;
4963 if (need_to_clear)
4964 break;
4966 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4968 tree lo_index = TREE_OPERAND (index, 0);
4969 tree hi_index = TREE_OPERAND (index, 1);
4971 if (! host_integerp (lo_index, 1)
4972 || ! host_integerp (hi_index, 1))
4974 need_to_clear = 1;
4975 break;
4978 this_node_count = (tree_low_cst (hi_index, 1)
4979 - tree_low_cst (lo_index, 1) + 1);
4981 else
4982 this_node_count = 1;
4984 count += this_node_count;
4985 if (mostly_zeros_p (value))
4986 zero_count += this_node_count;
4989 /* Clear the entire array first if there are any missing
4990 elements, or if the incidence of zero elements is >=
4991 75%. */
4992 if (! need_to_clear
4993 && (count < maxelt - minelt + 1
4994 || 4 * zero_count >= 3 * count))
4995 need_to_clear = 1;
4998 if (need_to_clear && size > 0)
5000 if (REG_P (target))
5001 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5002 else
5003 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5004 cleared = 1;
5007 if (!cleared && REG_P (target))
5008 /* Inform later passes that the old value is dead. */
5009 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5011 /* Store each element of the constructor into the
5012 corresponding element of TARGET, determined by counting the
5013 elements. */
5014 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5016 enum machine_mode mode;
5017 HOST_WIDE_INT bitsize;
5018 HOST_WIDE_INT bitpos;
5019 int unsignedp;
5020 rtx xtarget = target;
5022 if (cleared && initializer_zerop (value))
5023 continue;
5025 unsignedp = TYPE_UNSIGNED (elttype);
5026 mode = TYPE_MODE (elttype);
5027 if (mode == BLKmode)
5028 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5029 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5030 : -1);
5031 else
5032 bitsize = GET_MODE_BITSIZE (mode);
5034 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5036 tree lo_index = TREE_OPERAND (index, 0);
5037 tree hi_index = TREE_OPERAND (index, 1);
5038 rtx index_r, pos_rtx;
5039 HOST_WIDE_INT lo, hi, count;
5040 tree position;
5042 /* If the range is constant and "small", unroll the loop. */
5043 if (const_bounds_p
5044 && host_integerp (lo_index, 0)
5045 && host_integerp (hi_index, 0)
5046 && (lo = tree_low_cst (lo_index, 0),
5047 hi = tree_low_cst (hi_index, 0),
5048 count = hi - lo + 1,
5049 (!MEM_P (target)
5050 || count <= 2
5051 || (host_integerp (TYPE_SIZE (elttype), 1)
5052 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5053 <= 40 * 8)))))
5055 lo -= minelt; hi -= minelt;
5056 for (; lo <= hi; lo++)
5058 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5060 if (MEM_P (target)
5061 && !MEM_KEEP_ALIAS_SET_P (target)
5062 && TREE_CODE (type) == ARRAY_TYPE
5063 && TYPE_NONALIASED_COMPONENT (type))
5065 target = copy_rtx (target);
5066 MEM_KEEP_ALIAS_SET_P (target) = 1;
5069 store_constructor_field
5070 (target, bitsize, bitpos, mode, value, type, cleared,
5071 get_alias_set (elttype));
5074 else
5076 rtx loop_start = gen_label_rtx ();
5077 rtx loop_end = gen_label_rtx ();
5078 tree exit_cond;
5080 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5081 unsignedp = TYPE_UNSIGNED (domain);
5083 index = build_decl (VAR_DECL, NULL_TREE, domain);
5085 index_r
5086 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5087 &unsignedp, 0));
5088 SET_DECL_RTL (index, index_r);
5089 store_expr (lo_index, index_r, 0);
5091 /* Build the head of the loop. */
5092 do_pending_stack_adjust ();
5093 emit_label (loop_start);
5095 /* Assign value to element index. */
5096 position
5097 = convert (ssizetype,
5098 fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5099 index, TYPE_MIN_VALUE (domain)));
5100 position = size_binop (MULT_EXPR, position,
5101 convert (ssizetype,
5102 TYPE_SIZE_UNIT (elttype)));
5104 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5105 xtarget = offset_address (target, pos_rtx,
5106 highest_pow2_factor (position));
5107 xtarget = adjust_address (xtarget, mode, 0);
5108 if (TREE_CODE (value) == CONSTRUCTOR)
5109 store_constructor (value, xtarget, cleared,
5110 bitsize / BITS_PER_UNIT);
5111 else
5112 store_expr (value, xtarget, 0);
5114 /* Generate a conditional jump to exit the loop. */
5115 exit_cond = build2 (LT_EXPR, integer_type_node,
5116 index, hi_index);
5117 jumpif (exit_cond, loop_end);
5119 /* Update the loop counter, and jump to the head of
5120 the loop. */
5121 expand_assignment (index,
5122 build2 (PLUS_EXPR, TREE_TYPE (index),
5123 index, integer_one_node));
5125 emit_jump (loop_start);
5127 /* Build the end of the loop. */
5128 emit_label (loop_end);
5131 else if ((index != 0 && ! host_integerp (index, 0))
5132 || ! host_integerp (TYPE_SIZE (elttype), 1))
5134 tree position;
5136 if (index == 0)
5137 index = ssize_int (1);
5139 if (minelt)
5140 index = fold_convert (ssizetype,
5141 fold_build2 (MINUS_EXPR,
5142 TREE_TYPE (index),
5143 index,
5144 TYPE_MIN_VALUE (domain)));
5146 position = size_binop (MULT_EXPR, index,
5147 convert (ssizetype,
5148 TYPE_SIZE_UNIT (elttype)));
5149 xtarget = offset_address (target,
5150 expand_expr (position, 0, VOIDmode, 0),
5151 highest_pow2_factor (position));
5152 xtarget = adjust_address (xtarget, mode, 0);
5153 store_expr (value, xtarget, 0);
5155 else
5157 if (index != 0)
5158 bitpos = ((tree_low_cst (index, 0) - minelt)
5159 * tree_low_cst (TYPE_SIZE (elttype), 1));
5160 else
5161 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5163 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5164 && TREE_CODE (type) == ARRAY_TYPE
5165 && TYPE_NONALIASED_COMPONENT (type))
5167 target = copy_rtx (target);
5168 MEM_KEEP_ALIAS_SET_P (target) = 1;
5170 store_constructor_field (target, bitsize, bitpos, mode, value,
5171 type, cleared, get_alias_set (elttype));
5174 break;
5177 case VECTOR_TYPE:
5179 unsigned HOST_WIDE_INT idx;
5180 constructor_elt *ce;
5181 int i;
5182 int need_to_clear;
5183 int icode = 0;
5184 tree elttype = TREE_TYPE (type);
5185 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5186 enum machine_mode eltmode = TYPE_MODE (elttype);
5187 HOST_WIDE_INT bitsize;
5188 HOST_WIDE_INT bitpos;
5189 rtvec vector = NULL;
5190 unsigned n_elts;
5192 gcc_assert (eltmode != BLKmode);
5194 n_elts = TYPE_VECTOR_SUBPARTS (type);
5195 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5197 enum machine_mode mode = GET_MODE (target);
5199 icode = (int) vec_init_optab->handlers[mode].insn_code;
5200 if (icode != CODE_FOR_nothing)
5202 unsigned int i;
5204 vector = rtvec_alloc (n_elts);
5205 for (i = 0; i < n_elts; i++)
5206 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5210 /* If the constructor has fewer elements than the vector,
5211 clear the whole array first. Similarly if this is static
5212 constructor of a non-BLKmode object. */
5213 if (cleared)
5214 need_to_clear = 0;
5215 else if (REG_P (target) && TREE_STATIC (exp))
5216 need_to_clear = 1;
5217 else
5219 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5220 tree value;
5222 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5224 int n_elts_here = tree_low_cst
5225 (int_const_binop (TRUNC_DIV_EXPR,
5226 TYPE_SIZE (TREE_TYPE (value)),
5227 TYPE_SIZE (elttype), 0), 1);
5229 count += n_elts_here;
5230 if (mostly_zeros_p (value))
5231 zero_count += n_elts_here;
5234 /* Clear the entire vector first if there are any missing elements,
5235 or if the incidence of zero elements is >= 75%. */
5236 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5239 if (need_to_clear && size > 0 && !vector)
5241 if (REG_P (target))
5242 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5243 else
5244 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5245 cleared = 1;
5248 /* Inform later passes that the old value is dead. */
5249 if (!cleared && REG_P (target))
5250 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5252 /* Store each element of the constructor into the corresponding
5253 element of TARGET, determined by counting the elements. */
5254 for (idx = 0, i = 0;
5255 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5256 idx++, i += bitsize / elt_size)
5258 HOST_WIDE_INT eltpos;
5259 tree value = ce->value;
5261 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5262 if (cleared && initializer_zerop (value))
5263 continue;
5265 if (ce->index)
5266 eltpos = tree_low_cst (ce->index, 1);
5267 else
5268 eltpos = i;
5270 if (vector)
5272 /* Vector CONSTRUCTORs should only be built from smaller
5273 vectors in the case of BLKmode vectors. */
5274 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5275 RTVEC_ELT (vector, eltpos)
5276 = expand_expr (value, NULL_RTX, VOIDmode, 0);
5278 else
5280 enum machine_mode value_mode =
5281 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5282 ? TYPE_MODE (TREE_TYPE (value))
5283 : eltmode;
5284 bitpos = eltpos * elt_size;
5285 store_constructor_field (target, bitsize, bitpos,
5286 value_mode, value, type,
5287 cleared, get_alias_set (elttype));
5291 if (vector)
5292 emit_insn (GEN_FCN (icode)
5293 (target,
5294 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5295 break;
5298 default:
5299 gcc_unreachable ();
5303 /* Store the value of EXP (an expression tree)
5304 into a subfield of TARGET which has mode MODE and occupies
5305 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5306 If MODE is VOIDmode, it means that we are storing into a bit-field.
5308 Always return const0_rtx unless we have something particular to
5309 return.
5311 TYPE is the type of the underlying object,
5313 ALIAS_SET is the alias set for the destination. This value will
5314 (in general) be different from that for TARGET, since TARGET is a
5315 reference to the containing structure. */
5317 static rtx
5318 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5319 enum machine_mode mode, tree exp, tree type, int alias_set)
5321 HOST_WIDE_INT width_mask = 0;
5323 if (TREE_CODE (exp) == ERROR_MARK)
5324 return const0_rtx;
5326 /* If we have nothing to store, do nothing unless the expression has
5327 side-effects. */
5328 if (bitsize == 0)
5329 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5330 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5331 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5333 /* If we are storing into an unaligned field of an aligned union that is
5334 in a register, we may have the mode of TARGET being an integer mode but
5335 MODE == BLKmode. In that case, get an aligned object whose size and
5336 alignment are the same as TARGET and store TARGET into it (we can avoid
5337 the store if the field being stored is the entire width of TARGET). Then
5338 call ourselves recursively to store the field into a BLKmode version of
5339 that object. Finally, load from the object into TARGET. This is not
5340 very efficient in general, but should only be slightly more expensive
5341 than the otherwise-required unaligned accesses. Perhaps this can be
5342 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5343 twice, once with emit_move_insn and once via store_field. */
5345 if (mode == BLKmode
5346 && (REG_P (target) || GET_CODE (target) == SUBREG))
5348 rtx object = assign_temp (type, 0, 1, 1);
5349 rtx blk_object = adjust_address (object, BLKmode, 0);
5351 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5352 emit_move_insn (object, target);
5354 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5356 emit_move_insn (target, object);
5358 /* We want to return the BLKmode version of the data. */
5359 return blk_object;
5362 if (GET_CODE (target) == CONCAT)
5364 /* We're storing into a struct containing a single __complex. */
5366 gcc_assert (!bitpos);
5367 return store_expr (exp, target, 0);
5370 /* If the structure is in a register or if the component
5371 is a bit field, we cannot use addressing to access it.
5372 Use bit-field techniques or SUBREG to store in it. */
5374 if (mode == VOIDmode
5375 || (mode != BLKmode && ! direct_store[(int) mode]
5376 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5377 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5378 || REG_P (target)
5379 || GET_CODE (target) == SUBREG
5380 /* If the field isn't aligned enough to store as an ordinary memref,
5381 store it as a bit field. */
5382 || (mode != BLKmode
5383 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5384 || bitpos % GET_MODE_ALIGNMENT (mode))
5385 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5386 || (bitpos % BITS_PER_UNIT != 0)))
5387 /* If the RHS and field are a constant size and the size of the
5388 RHS isn't the same size as the bitfield, we must use bitfield
5389 operations. */
5390 || (bitsize >= 0
5391 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5392 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5394 rtx temp;
5396 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5397 implies a mask operation. If the precision is the same size as
5398 the field we're storing into, that mask is redundant. This is
5399 particularly common with bit field assignments generated by the
5400 C front end. */
5401 if (TREE_CODE (exp) == NOP_EXPR)
5403 tree type = TREE_TYPE (exp);
5404 if (INTEGRAL_TYPE_P (type)
5405 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5406 && bitsize == TYPE_PRECISION (type))
5408 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5409 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5410 exp = TREE_OPERAND (exp, 0);
5414 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5416 /* If BITSIZE is narrower than the size of the type of EXP
5417 we will be narrowing TEMP. Normally, what's wanted are the
5418 low-order bits. However, if EXP's type is a record and this is
5419 big-endian machine, we want the upper BITSIZE bits. */
5420 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5421 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5422 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5423 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5424 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5425 - bitsize),
5426 NULL_RTX, 1);
5428 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5429 MODE. */
5430 if (mode != VOIDmode && mode != BLKmode
5431 && mode != TYPE_MODE (TREE_TYPE (exp)))
5432 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5434 /* If the modes of TARGET and TEMP are both BLKmode, both
5435 must be in memory and BITPOS must be aligned on a byte
5436 boundary. If so, we simply do a block copy. */
5437 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5439 gcc_assert (MEM_P (target) && MEM_P (temp)
5440 && !(bitpos % BITS_PER_UNIT));
5442 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5443 emit_block_move (target, temp,
5444 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5445 / BITS_PER_UNIT),
5446 BLOCK_OP_NORMAL);
5448 return const0_rtx;
5451 /* Store the value in the bitfield. */
5452 store_bit_field (target, bitsize, bitpos, mode, temp);
5454 return const0_rtx;
5456 else
5458 /* Now build a reference to just the desired component. */
5459 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5461 if (to_rtx == target)
5462 to_rtx = copy_rtx (to_rtx);
5464 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5465 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5466 set_mem_alias_set (to_rtx, alias_set);
5468 return store_expr (exp, to_rtx, 0);
5472 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5473 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5474 codes and find the ultimate containing object, which we return.
5476 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5477 bit position, and *PUNSIGNEDP to the signedness of the field.
5478 If the position of the field is variable, we store a tree
5479 giving the variable offset (in units) in *POFFSET.
5480 This offset is in addition to the bit position.
5481 If the position is not variable, we store 0 in *POFFSET.
5483 If any of the extraction expressions is volatile,
5484 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5486 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5487 is a mode that can be used to access the field. In that case, *PBITSIZE
5488 is redundant.
5490 If the field describes a variable-sized object, *PMODE is set to
5491 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5492 this case, but the address of the object can be found.
5494 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5495 look through nodes that serve as markers of a greater alignment than
5496 the one that can be deduced from the expression. These nodes make it
5497 possible for front-ends to prevent temporaries from being created by
5498 the middle-end on alignment considerations. For that purpose, the
5499 normal operating mode at high-level is to always pass FALSE so that
5500 the ultimate containing object is really returned; moreover, the
5501 associated predicate handled_component_p will always return TRUE
5502 on these nodes, thus indicating that they are essentially handled
5503 by get_inner_reference. TRUE should only be passed when the caller
5504 is scanning the expression in order to build another representation
5505 and specifically knows how to handle these nodes; as such, this is
5506 the normal operating mode in the RTL expanders. */
5508 tree
5509 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5510 HOST_WIDE_INT *pbitpos, tree *poffset,
5511 enum machine_mode *pmode, int *punsignedp,
5512 int *pvolatilep, bool keep_aligning)
5514 tree size_tree = 0;
5515 enum machine_mode mode = VOIDmode;
5516 tree offset = size_zero_node;
5517 tree bit_offset = bitsize_zero_node;
5518 tree tem;
5520 /* First get the mode, signedness, and size. We do this from just the
5521 outermost expression. */
5522 if (TREE_CODE (exp) == COMPONENT_REF)
5524 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5525 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5526 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5528 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5530 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5532 size_tree = TREE_OPERAND (exp, 1);
5533 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5535 else
5537 mode = TYPE_MODE (TREE_TYPE (exp));
5538 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5540 if (mode == BLKmode)
5541 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5542 else
5543 *pbitsize = GET_MODE_BITSIZE (mode);
5546 if (size_tree != 0)
5548 if (! host_integerp (size_tree, 1))
5549 mode = BLKmode, *pbitsize = -1;
5550 else
5551 *pbitsize = tree_low_cst (size_tree, 1);
5554 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5555 and find the ultimate containing object. */
5556 while (1)
5558 switch (TREE_CODE (exp))
5560 case BIT_FIELD_REF:
5561 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5562 TREE_OPERAND (exp, 2));
5563 break;
5565 case COMPONENT_REF:
5567 tree field = TREE_OPERAND (exp, 1);
5568 tree this_offset = component_ref_field_offset (exp);
5570 /* If this field hasn't been filled in yet, don't go past it.
5571 This should only happen when folding expressions made during
5572 type construction. */
5573 if (this_offset == 0)
5574 break;
5576 offset = size_binop (PLUS_EXPR, offset, this_offset);
5577 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5578 DECL_FIELD_BIT_OFFSET (field));
5580 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5582 break;
5584 case ARRAY_REF:
5585 case ARRAY_RANGE_REF:
5587 tree index = TREE_OPERAND (exp, 1);
5588 tree low_bound = array_ref_low_bound (exp);
5589 tree unit_size = array_ref_element_size (exp);
5591 /* We assume all arrays have sizes that are a multiple of a byte.
5592 First subtract the lower bound, if any, in the type of the
5593 index, then convert to sizetype and multiply by the size of
5594 the array element. */
5595 if (! integer_zerop (low_bound))
5596 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5597 index, low_bound);
5599 offset = size_binop (PLUS_EXPR, offset,
5600 size_binop (MULT_EXPR,
5601 convert (sizetype, index),
5602 unit_size));
5604 break;
5606 case REALPART_EXPR:
5607 break;
5609 case IMAGPART_EXPR:
5610 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5611 bitsize_int (*pbitsize));
5612 break;
5614 case VIEW_CONVERT_EXPR:
5615 if (keep_aligning && STRICT_ALIGNMENT
5616 && (TYPE_ALIGN (TREE_TYPE (exp))
5617 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5618 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5619 < BIGGEST_ALIGNMENT)
5620 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5621 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5622 goto done;
5623 break;
5625 default:
5626 goto done;
5629 /* If any reference in the chain is volatile, the effect is volatile. */
5630 if (TREE_THIS_VOLATILE (exp))
5631 *pvolatilep = 1;
5633 exp = TREE_OPERAND (exp, 0);
5635 done:
5637 /* If OFFSET is constant, see if we can return the whole thing as a
5638 constant bit position. Otherwise, split it up. */
5639 if (host_integerp (offset, 0)
5640 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5641 bitsize_unit_node))
5642 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5643 && host_integerp (tem, 0))
5644 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5645 else
5646 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5648 *pmode = mode;
5649 return exp;
5652 /* Return a tree of sizetype representing the size, in bytes, of the element
5653 of EXP, an ARRAY_REF. */
5655 tree
5656 array_ref_element_size (tree exp)
5658 tree aligned_size = TREE_OPERAND (exp, 3);
5659 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5661 /* If a size was specified in the ARRAY_REF, it's the size measured
5662 in alignment units of the element type. So multiply by that value. */
5663 if (aligned_size)
5665 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5666 sizetype from another type of the same width and signedness. */
5667 if (TREE_TYPE (aligned_size) != sizetype)
5668 aligned_size = fold_convert (sizetype, aligned_size);
5669 return size_binop (MULT_EXPR, aligned_size,
5670 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5673 /* Otherwise, take the size from that of the element type. Substitute
5674 any PLACEHOLDER_EXPR that we have. */
5675 else
5676 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5679 /* Return a tree representing the lower bound of the array mentioned in
5680 EXP, an ARRAY_REF. */
5682 tree
5683 array_ref_low_bound (tree exp)
5685 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5687 /* If a lower bound is specified in EXP, use it. */
5688 if (TREE_OPERAND (exp, 2))
5689 return TREE_OPERAND (exp, 2);
5691 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5692 substituting for a PLACEHOLDER_EXPR as needed. */
5693 if (domain_type && TYPE_MIN_VALUE (domain_type))
5694 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5696 /* Otherwise, return a zero of the appropriate type. */
5697 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5700 /* Return a tree representing the upper bound of the array mentioned in
5701 EXP, an ARRAY_REF. */
5703 tree
5704 array_ref_up_bound (tree exp)
5706 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5708 /* If there is a domain type and it has an upper bound, use it, substituting
5709 for a PLACEHOLDER_EXPR as needed. */
5710 if (domain_type && TYPE_MAX_VALUE (domain_type))
5711 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5713 /* Otherwise fail. */
5714 return NULL_TREE;
5717 /* Return a tree representing the offset, in bytes, of the field referenced
5718 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5720 tree
5721 component_ref_field_offset (tree exp)
5723 tree aligned_offset = TREE_OPERAND (exp, 2);
5724 tree field = TREE_OPERAND (exp, 1);
5726 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5727 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5728 value. */
5729 if (aligned_offset)
5731 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5732 sizetype from another type of the same width and signedness. */
5733 if (TREE_TYPE (aligned_offset) != sizetype)
5734 aligned_offset = fold_convert (sizetype, aligned_offset);
5735 return size_binop (MULT_EXPR, aligned_offset,
5736 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5739 /* Otherwise, take the offset from that of the field. Substitute
5740 any PLACEHOLDER_EXPR that we have. */
5741 else
5742 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5745 /* Return 1 if T is an expression that get_inner_reference handles. */
5748 handled_component_p (tree t)
5750 switch (TREE_CODE (t))
5752 case BIT_FIELD_REF:
5753 case COMPONENT_REF:
5754 case ARRAY_REF:
5755 case ARRAY_RANGE_REF:
5756 case VIEW_CONVERT_EXPR:
5757 case REALPART_EXPR:
5758 case IMAGPART_EXPR:
5759 return 1;
5761 default:
5762 return 0;
5766 /* Given an rtx VALUE that may contain additions and multiplications, return
5767 an equivalent value that just refers to a register, memory, or constant.
5768 This is done by generating instructions to perform the arithmetic and
5769 returning a pseudo-register containing the value.
5771 The returned value may be a REG, SUBREG, MEM or constant. */
5774 force_operand (rtx value, rtx target)
5776 rtx op1, op2;
5777 /* Use subtarget as the target for operand 0 of a binary operation. */
5778 rtx subtarget = get_subtarget (target);
5779 enum rtx_code code = GET_CODE (value);
5781 /* Check for subreg applied to an expression produced by loop optimizer. */
5782 if (code == SUBREG
5783 && !REG_P (SUBREG_REG (value))
5784 && !MEM_P (SUBREG_REG (value)))
5786 value = simplify_gen_subreg (GET_MODE (value),
5787 force_reg (GET_MODE (SUBREG_REG (value)),
5788 force_operand (SUBREG_REG (value),
5789 NULL_RTX)),
5790 GET_MODE (SUBREG_REG (value)),
5791 SUBREG_BYTE (value));
5792 code = GET_CODE (value);
5795 /* Check for a PIC address load. */
5796 if ((code == PLUS || code == MINUS)
5797 && XEXP (value, 0) == pic_offset_table_rtx
5798 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5799 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5800 || GET_CODE (XEXP (value, 1)) == CONST))
5802 if (!subtarget)
5803 subtarget = gen_reg_rtx (GET_MODE (value));
5804 emit_move_insn (subtarget, value);
5805 return subtarget;
5808 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5810 if (!target)
5811 target = gen_reg_rtx (GET_MODE (value));
5812 convert_move (target, force_operand (XEXP (value, 0), NULL),
5813 code == ZERO_EXTEND);
5814 return target;
5817 if (ARITHMETIC_P (value))
5819 op2 = XEXP (value, 1);
5820 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5821 subtarget = 0;
5822 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5824 code = PLUS;
5825 op2 = negate_rtx (GET_MODE (value), op2);
5828 /* Check for an addition with OP2 a constant integer and our first
5829 operand a PLUS of a virtual register and something else. In that
5830 case, we want to emit the sum of the virtual register and the
5831 constant first and then add the other value. This allows virtual
5832 register instantiation to simply modify the constant rather than
5833 creating another one around this addition. */
5834 if (code == PLUS && GET_CODE (op2) == CONST_INT
5835 && GET_CODE (XEXP (value, 0)) == PLUS
5836 && REG_P (XEXP (XEXP (value, 0), 0))
5837 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5838 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5840 rtx temp = expand_simple_binop (GET_MODE (value), code,
5841 XEXP (XEXP (value, 0), 0), op2,
5842 subtarget, 0, OPTAB_LIB_WIDEN);
5843 return expand_simple_binop (GET_MODE (value), code, temp,
5844 force_operand (XEXP (XEXP (value,
5845 0), 1), 0),
5846 target, 0, OPTAB_LIB_WIDEN);
5849 op1 = force_operand (XEXP (value, 0), subtarget);
5850 op2 = force_operand (op2, NULL_RTX);
5851 switch (code)
5853 case MULT:
5854 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5855 case DIV:
5856 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5857 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5858 target, 1, OPTAB_LIB_WIDEN);
5859 else
5860 return expand_divmod (0,
5861 FLOAT_MODE_P (GET_MODE (value))
5862 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5863 GET_MODE (value), op1, op2, target, 0);
5864 break;
5865 case MOD:
5866 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5867 target, 0);
5868 break;
5869 case UDIV:
5870 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5871 target, 1);
5872 break;
5873 case UMOD:
5874 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5875 target, 1);
5876 break;
5877 case ASHIFTRT:
5878 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5879 target, 0, OPTAB_LIB_WIDEN);
5880 break;
5881 default:
5882 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5883 target, 1, OPTAB_LIB_WIDEN);
5886 if (UNARY_P (value))
5888 int unsignedp = 0;
5890 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5891 switch (code)
5893 case ZERO_EXTEND: case UNSIGNED_FIX: case UNSIGNED_FLOAT:
5894 unsignedp = 1;
5895 /* fall through. */
5896 case TRUNCATE:
5897 case SIGN_EXTEND: case FIX: case FLOAT:
5898 return convert_to_mode (GET_MODE (value), op1, unsignedp);
5899 default:
5900 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5904 #ifdef INSN_SCHEDULING
5905 /* On machines that have insn scheduling, we want all memory reference to be
5906 explicit, so we need to deal with such paradoxical SUBREGs. */
5907 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5908 && (GET_MODE_SIZE (GET_MODE (value))
5909 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5910 value
5911 = simplify_gen_subreg (GET_MODE (value),
5912 force_reg (GET_MODE (SUBREG_REG (value)),
5913 force_operand (SUBREG_REG (value),
5914 NULL_RTX)),
5915 GET_MODE (SUBREG_REG (value)),
5916 SUBREG_BYTE (value));
5917 #endif
5919 return value;
5922 /* Subroutine of expand_expr: return nonzero iff there is no way that
5923 EXP can reference X, which is being modified. TOP_P is nonzero if this
5924 call is going to be used to determine whether we need a temporary
5925 for EXP, as opposed to a recursive call to this function.
5927 It is always safe for this routine to return zero since it merely
5928 searches for optimization opportunities. */
5931 safe_from_p (rtx x, tree exp, int top_p)
5933 rtx exp_rtl = 0;
5934 int i, nops;
5936 if (x == 0
5937 /* If EXP has varying size, we MUST use a target since we currently
5938 have no way of allocating temporaries of variable size
5939 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5940 So we assume here that something at a higher level has prevented a
5941 clash. This is somewhat bogus, but the best we can do. Only
5942 do this when X is BLKmode and when we are at the top level. */
5943 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5944 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5945 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5946 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5947 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5948 != INTEGER_CST)
5949 && GET_MODE (x) == BLKmode)
5950 /* If X is in the outgoing argument area, it is always safe. */
5951 || (MEM_P (x)
5952 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5953 || (GET_CODE (XEXP (x, 0)) == PLUS
5954 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5955 return 1;
5957 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5958 find the underlying pseudo. */
5959 if (GET_CODE (x) == SUBREG)
5961 x = SUBREG_REG (x);
5962 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5963 return 0;
5966 /* Now look at our tree code and possibly recurse. */
5967 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5969 case tcc_declaration:
5970 exp_rtl = DECL_RTL_IF_SET (exp);
5971 break;
5973 case tcc_constant:
5974 return 1;
5976 case tcc_exceptional:
5977 if (TREE_CODE (exp) == TREE_LIST)
5979 while (1)
5981 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5982 return 0;
5983 exp = TREE_CHAIN (exp);
5984 if (!exp)
5985 return 1;
5986 if (TREE_CODE (exp) != TREE_LIST)
5987 return safe_from_p (x, exp, 0);
5990 else if (TREE_CODE (exp) == ERROR_MARK)
5991 return 1; /* An already-visited SAVE_EXPR? */
5992 else
5993 return 0;
5995 case tcc_statement:
5996 /* The only case we look at here is the DECL_INITIAL inside a
5997 DECL_EXPR. */
5998 return (TREE_CODE (exp) != DECL_EXPR
5999 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6000 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6001 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6003 case tcc_binary:
6004 case tcc_comparison:
6005 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6006 return 0;
6007 /* Fall through. */
6009 case tcc_unary:
6010 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6012 case tcc_expression:
6013 case tcc_reference:
6014 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6015 the expression. If it is set, we conflict iff we are that rtx or
6016 both are in memory. Otherwise, we check all operands of the
6017 expression recursively. */
6019 switch (TREE_CODE (exp))
6021 case ADDR_EXPR:
6022 /* If the operand is static or we are static, we can't conflict.
6023 Likewise if we don't conflict with the operand at all. */
6024 if (staticp (TREE_OPERAND (exp, 0))
6025 || TREE_STATIC (exp)
6026 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6027 return 1;
6029 /* Otherwise, the only way this can conflict is if we are taking
6030 the address of a DECL a that address if part of X, which is
6031 very rare. */
6032 exp = TREE_OPERAND (exp, 0);
6033 if (DECL_P (exp))
6035 if (!DECL_RTL_SET_P (exp)
6036 || !MEM_P (DECL_RTL (exp)))
6037 return 0;
6038 else
6039 exp_rtl = XEXP (DECL_RTL (exp), 0);
6041 break;
6043 case MISALIGNED_INDIRECT_REF:
6044 case ALIGN_INDIRECT_REF:
6045 case INDIRECT_REF:
6046 if (MEM_P (x)
6047 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6048 get_alias_set (exp)))
6049 return 0;
6050 break;
6052 case CALL_EXPR:
6053 /* Assume that the call will clobber all hard registers and
6054 all of memory. */
6055 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6056 || MEM_P (x))
6057 return 0;
6058 break;
6060 case WITH_CLEANUP_EXPR:
6061 case CLEANUP_POINT_EXPR:
6062 /* Lowered by gimplify.c. */
6063 gcc_unreachable ();
6065 case SAVE_EXPR:
6066 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6068 default:
6069 break;
6072 /* If we have an rtx, we do not need to scan our operands. */
6073 if (exp_rtl)
6074 break;
6076 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
6077 for (i = 0; i < nops; i++)
6078 if (TREE_OPERAND (exp, i) != 0
6079 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6080 return 0;
6082 /* If this is a language-specific tree code, it may require
6083 special handling. */
6084 if ((unsigned int) TREE_CODE (exp)
6085 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6086 && !lang_hooks.safe_from_p (x, exp))
6087 return 0;
6088 break;
6090 case tcc_type:
6091 /* Should never get a type here. */
6092 gcc_unreachable ();
6095 /* If we have an rtl, find any enclosed object. Then see if we conflict
6096 with it. */
6097 if (exp_rtl)
6099 if (GET_CODE (exp_rtl) == SUBREG)
6101 exp_rtl = SUBREG_REG (exp_rtl);
6102 if (REG_P (exp_rtl)
6103 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6104 return 0;
6107 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6108 are memory and they conflict. */
6109 return ! (rtx_equal_p (x, exp_rtl)
6110 || (MEM_P (x) && MEM_P (exp_rtl)
6111 && true_dependence (exp_rtl, VOIDmode, x,
6112 rtx_addr_varies_p)));
6115 /* If we reach here, it is safe. */
6116 return 1;
6120 /* Return the highest power of two that EXP is known to be a multiple of.
6121 This is used in updating alignment of MEMs in array references. */
6123 unsigned HOST_WIDE_INT
6124 highest_pow2_factor (tree exp)
6126 unsigned HOST_WIDE_INT c0, c1;
6128 switch (TREE_CODE (exp))
6130 case INTEGER_CST:
6131 /* We can find the lowest bit that's a one. If the low
6132 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6133 We need to handle this case since we can find it in a COND_EXPR,
6134 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6135 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6136 later ICE. */
6137 if (TREE_CONSTANT_OVERFLOW (exp))
6138 return BIGGEST_ALIGNMENT;
6139 else
6141 /* Note: tree_low_cst is intentionally not used here,
6142 we don't care about the upper bits. */
6143 c0 = TREE_INT_CST_LOW (exp);
6144 c0 &= -c0;
6145 return c0 ? c0 : BIGGEST_ALIGNMENT;
6147 break;
6149 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6150 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6151 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6152 return MIN (c0, c1);
6154 case MULT_EXPR:
6155 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6156 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6157 return c0 * c1;
6159 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6160 case CEIL_DIV_EXPR:
6161 if (integer_pow2p (TREE_OPERAND (exp, 1))
6162 && host_integerp (TREE_OPERAND (exp, 1), 1))
6164 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6165 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6166 return MAX (1, c0 / c1);
6168 break;
6170 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6171 case SAVE_EXPR:
6172 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6174 case COMPOUND_EXPR:
6175 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6177 case COND_EXPR:
6178 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6179 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6180 return MIN (c0, c1);
6182 default:
6183 break;
6186 return 1;
6189 /* Similar, except that the alignment requirements of TARGET are
6190 taken into account. Assume it is at least as aligned as its
6191 type, unless it is a COMPONENT_REF in which case the layout of
6192 the structure gives the alignment. */
6194 static unsigned HOST_WIDE_INT
6195 highest_pow2_factor_for_target (tree target, tree exp)
6197 unsigned HOST_WIDE_INT target_align, factor;
6199 factor = highest_pow2_factor (exp);
6200 if (TREE_CODE (target) == COMPONENT_REF)
6201 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6202 else
6203 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6204 return MAX (factor, target_align);
6207 /* Expands variable VAR. */
6209 void
6210 expand_var (tree var)
6212 if (DECL_EXTERNAL (var))
6213 return;
6215 if (TREE_STATIC (var))
6216 /* If this is an inlined copy of a static local variable,
6217 look up the original decl. */
6218 var = DECL_ORIGIN (var);
6220 if (TREE_STATIC (var)
6221 ? !TREE_ASM_WRITTEN (var)
6222 : !DECL_RTL_SET_P (var))
6224 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6225 /* Should be ignored. */;
6226 else if (lang_hooks.expand_decl (var))
6227 /* OK. */;
6228 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6229 expand_decl (var);
6230 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6231 rest_of_decl_compilation (var, 0, 0);
6232 else
6233 /* No expansion needed. */
6234 gcc_assert (TREE_CODE (var) == TYPE_DECL
6235 || TREE_CODE (var) == CONST_DECL
6236 || TREE_CODE (var) == FUNCTION_DECL
6237 || TREE_CODE (var) == LABEL_DECL);
6241 /* Subroutine of expand_expr. Expand the two operands of a binary
6242 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6243 The value may be stored in TARGET if TARGET is nonzero. The
6244 MODIFIER argument is as documented by expand_expr. */
6246 static void
6247 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6248 enum expand_modifier modifier)
6250 if (! safe_from_p (target, exp1, 1))
6251 target = 0;
6252 if (operand_equal_p (exp0, exp1, 0))
6254 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6255 *op1 = copy_rtx (*op0);
6257 else
6259 /* If we need to preserve evaluation order, copy exp0 into its own
6260 temporary variable so that it can't be clobbered by exp1. */
6261 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6262 exp0 = save_expr (exp0);
6263 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6264 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6269 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6270 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6272 static rtx
6273 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6274 enum expand_modifier modifier)
6276 rtx result, subtarget;
6277 tree inner, offset;
6278 HOST_WIDE_INT bitsize, bitpos;
6279 int volatilep, unsignedp;
6280 enum machine_mode mode1;
6282 /* If we are taking the address of a constant and are at the top level,
6283 we have to use output_constant_def since we can't call force_const_mem
6284 at top level. */
6285 /* ??? This should be considered a front-end bug. We should not be
6286 generating ADDR_EXPR of something that isn't an LVALUE. The only
6287 exception here is STRING_CST. */
6288 if (TREE_CODE (exp) == CONSTRUCTOR
6289 || CONSTANT_CLASS_P (exp))
6290 return XEXP (output_constant_def (exp, 0), 0);
6292 /* Everything must be something allowed by is_gimple_addressable. */
6293 switch (TREE_CODE (exp))
6295 case INDIRECT_REF:
6296 /* This case will happen via recursion for &a->b. */
6297 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, EXPAND_NORMAL);
6299 case CONST_DECL:
6300 /* Recurse and make the output_constant_def clause above handle this. */
6301 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6302 tmode, modifier);
6304 case REALPART_EXPR:
6305 /* The real part of the complex number is always first, therefore
6306 the address is the same as the address of the parent object. */
6307 offset = 0;
6308 bitpos = 0;
6309 inner = TREE_OPERAND (exp, 0);
6310 break;
6312 case IMAGPART_EXPR:
6313 /* The imaginary part of the complex number is always second.
6314 The expression is therefore always offset by the size of the
6315 scalar type. */
6316 offset = 0;
6317 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6318 inner = TREE_OPERAND (exp, 0);
6319 break;
6321 default:
6322 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6323 expand_expr, as that can have various side effects; LABEL_DECLs for
6324 example, may not have their DECL_RTL set yet. Assume language
6325 specific tree nodes can be expanded in some interesting way. */
6326 if (DECL_P (exp)
6327 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6329 result = expand_expr (exp, target, tmode,
6330 modifier == EXPAND_INITIALIZER
6331 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6333 /* If the DECL isn't in memory, then the DECL wasn't properly
6334 marked TREE_ADDRESSABLE, which will be either a front-end
6335 or a tree optimizer bug. */
6336 gcc_assert (MEM_P (result));
6337 result = XEXP (result, 0);
6339 /* ??? Is this needed anymore? */
6340 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6342 assemble_external (exp);
6343 TREE_USED (exp) = 1;
6346 if (modifier != EXPAND_INITIALIZER
6347 && modifier != EXPAND_CONST_ADDRESS)
6348 result = force_operand (result, target);
6349 return result;
6352 /* Pass FALSE as the last argument to get_inner_reference although
6353 we are expanding to RTL. The rationale is that we know how to
6354 handle "aligning nodes" here: we can just bypass them because
6355 they won't change the final object whose address will be returned
6356 (they actually exist only for that purpose). */
6357 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6358 &mode1, &unsignedp, &volatilep, false);
6359 break;
6362 /* We must have made progress. */
6363 gcc_assert (inner != exp);
6365 subtarget = offset || bitpos ? NULL_RTX : target;
6366 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6368 if (offset)
6370 rtx tmp;
6372 if (modifier != EXPAND_NORMAL)
6373 result = force_operand (result, NULL);
6374 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6376 result = convert_memory_address (tmode, result);
6377 tmp = convert_memory_address (tmode, tmp);
6379 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6380 result = gen_rtx_PLUS (tmode, result, tmp);
6381 else
6383 subtarget = bitpos ? NULL_RTX : target;
6384 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6385 1, OPTAB_LIB_WIDEN);
6389 if (bitpos)
6391 /* Someone beforehand should have rejected taking the address
6392 of such an object. */
6393 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6395 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6396 if (modifier < EXPAND_SUM)
6397 result = force_operand (result, target);
6400 return result;
6403 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6404 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6406 static rtx
6407 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6408 enum expand_modifier modifier)
6410 enum machine_mode rmode;
6411 rtx result;
6413 /* Target mode of VOIDmode says "whatever's natural". */
6414 if (tmode == VOIDmode)
6415 tmode = TYPE_MODE (TREE_TYPE (exp));
6417 /* We can get called with some Weird Things if the user does silliness
6418 like "(short) &a". In that case, convert_memory_address won't do
6419 the right thing, so ignore the given target mode. */
6420 if (tmode != Pmode && tmode != ptr_mode)
6421 tmode = Pmode;
6423 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6424 tmode, modifier);
6426 /* Despite expand_expr claims concerning ignoring TMODE when not
6427 strictly convenient, stuff breaks if we don't honor it. Note
6428 that combined with the above, we only do this for pointer modes. */
6429 rmode = GET_MODE (result);
6430 if (rmode == VOIDmode)
6431 rmode = tmode;
6432 if (rmode != tmode)
6433 result = convert_memory_address (tmode, result);
6435 return result;
6439 /* expand_expr: generate code for computing expression EXP.
6440 An rtx for the computed value is returned. The value is never null.
6441 In the case of a void EXP, const0_rtx is returned.
6443 The value may be stored in TARGET if TARGET is nonzero.
6444 TARGET is just a suggestion; callers must assume that
6445 the rtx returned may not be the same as TARGET.
6447 If TARGET is CONST0_RTX, it means that the value will be ignored.
6449 If TMODE is not VOIDmode, it suggests generating the
6450 result in mode TMODE. But this is done only when convenient.
6451 Otherwise, TMODE is ignored and the value generated in its natural mode.
6452 TMODE is just a suggestion; callers must assume that
6453 the rtx returned may not have mode TMODE.
6455 Note that TARGET may have neither TMODE nor MODE. In that case, it
6456 probably will not be used.
6458 If MODIFIER is EXPAND_SUM then when EXP is an addition
6459 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6460 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6461 products as above, or REG or MEM, or constant.
6462 Ordinarily in such cases we would output mul or add instructions
6463 and then return a pseudo reg containing the sum.
6465 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6466 it also marks a label as absolutely required (it can't be dead).
6467 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6468 This is used for outputting expressions used in initializers.
6470 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6471 with a constant address even if that address is not normally legitimate.
6472 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6474 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6475 a call parameter. Such targets require special care as we haven't yet
6476 marked TARGET so that it's safe from being trashed by libcalls. We
6477 don't want to use TARGET for anything but the final result;
6478 Intermediate values must go elsewhere. Additionally, calls to
6479 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6481 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6482 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6483 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6484 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6485 recursively. */
6487 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6488 enum expand_modifier, rtx *);
6491 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6492 enum expand_modifier modifier, rtx *alt_rtl)
6494 int rn = -1;
6495 rtx ret, last = NULL;
6497 /* Handle ERROR_MARK before anybody tries to access its type. */
6498 if (TREE_CODE (exp) == ERROR_MARK
6499 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6501 ret = CONST0_RTX (tmode);
6502 return ret ? ret : const0_rtx;
6505 if (flag_non_call_exceptions)
6507 rn = lookup_stmt_eh_region (exp);
6508 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6509 if (rn >= 0)
6510 last = get_last_insn ();
6513 /* If this is an expression of some kind and it has an associated line
6514 number, then emit the line number before expanding the expression.
6516 We need to save and restore the file and line information so that
6517 errors discovered during expansion are emitted with the right
6518 information. It would be better of the diagnostic routines
6519 used the file/line information embedded in the tree nodes rather
6520 than globals. */
6521 if (cfun && cfun->ib_boundaries_block && EXPR_HAS_LOCATION (exp))
6523 location_t saved_location = input_location;
6524 input_location = EXPR_LOCATION (exp);
6525 emit_line_note (input_location);
6527 /* Record where the insns produced belong. */
6528 record_block_change (TREE_BLOCK (exp));
6530 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6532 input_location = saved_location;
6534 else
6536 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6539 /* If using non-call exceptions, mark all insns that may trap.
6540 expand_call() will mark CALL_INSNs before we get to this code,
6541 but it doesn't handle libcalls, and these may trap. */
6542 if (rn >= 0)
6544 rtx insn;
6545 for (insn = next_real_insn (last); insn;
6546 insn = next_real_insn (insn))
6548 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6549 /* If we want exceptions for non-call insns, any
6550 may_trap_p instruction may throw. */
6551 && GET_CODE (PATTERN (insn)) != CLOBBER
6552 && GET_CODE (PATTERN (insn)) != USE
6553 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6555 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6556 REG_NOTES (insn));
6561 return ret;
6564 static rtx
6565 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6566 enum expand_modifier modifier, rtx *alt_rtl)
6568 rtx op0, op1, temp;
6569 tree type = TREE_TYPE (exp);
6570 int unsignedp;
6571 enum machine_mode mode;
6572 enum tree_code code = TREE_CODE (exp);
6573 optab this_optab;
6574 rtx subtarget, original_target;
6575 int ignore;
6576 tree context, subexp0, subexp1;
6577 bool reduce_bit_field = false;
6578 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6579 ? reduce_to_bit_field_precision ((expr), \
6580 target, \
6581 type) \
6582 : (expr))
6584 mode = TYPE_MODE (type);
6585 unsignedp = TYPE_UNSIGNED (type);
6586 if (lang_hooks.reduce_bit_field_operations
6587 && TREE_CODE (type) == INTEGER_TYPE
6588 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6590 /* An operation in what may be a bit-field type needs the
6591 result to be reduced to the precision of the bit-field type,
6592 which is narrower than that of the type's mode. */
6593 reduce_bit_field = true;
6594 if (modifier == EXPAND_STACK_PARM)
6595 target = 0;
6598 /* Use subtarget as the target for operand 0 of a binary operation. */
6599 subtarget = get_subtarget (target);
6600 original_target = target;
6601 ignore = (target == const0_rtx
6602 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6603 || code == CONVERT_EXPR || code == COND_EXPR
6604 || code == VIEW_CONVERT_EXPR)
6605 && TREE_CODE (type) == VOID_TYPE));
6607 /* If we are going to ignore this result, we need only do something
6608 if there is a side-effect somewhere in the expression. If there
6609 is, short-circuit the most common cases here. Note that we must
6610 not call expand_expr with anything but const0_rtx in case this
6611 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6613 if (ignore)
6615 if (! TREE_SIDE_EFFECTS (exp))
6616 return const0_rtx;
6618 /* Ensure we reference a volatile object even if value is ignored, but
6619 don't do this if all we are doing is taking its address. */
6620 if (TREE_THIS_VOLATILE (exp)
6621 && TREE_CODE (exp) != FUNCTION_DECL
6622 && mode != VOIDmode && mode != BLKmode
6623 && modifier != EXPAND_CONST_ADDRESS)
6625 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6626 if (MEM_P (temp))
6627 temp = copy_to_reg (temp);
6628 return const0_rtx;
6631 if (TREE_CODE_CLASS (code) == tcc_unary
6632 || code == COMPONENT_REF || code == INDIRECT_REF)
6633 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6634 modifier);
6636 else if (TREE_CODE_CLASS (code) == tcc_binary
6637 || TREE_CODE_CLASS (code) == tcc_comparison
6638 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6640 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6641 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6642 return const0_rtx;
6644 else if (code == BIT_FIELD_REF)
6646 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6647 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6648 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6649 return const0_rtx;
6652 target = 0;
6656 switch (code)
6658 case LABEL_DECL:
6660 tree function = decl_function_context (exp);
6662 temp = label_rtx (exp);
6663 temp = gen_rtx_LABEL_REF (Pmode, temp);
6665 if (function != current_function_decl
6666 && function != 0)
6667 LABEL_REF_NONLOCAL_P (temp) = 1;
6669 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6670 return temp;
6673 case SSA_NAME:
6674 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6675 NULL);
6677 case PARM_DECL:
6678 case VAR_DECL:
6679 /* If a static var's type was incomplete when the decl was written,
6680 but the type is complete now, lay out the decl now. */
6681 if (DECL_SIZE (exp) == 0
6682 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6683 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6684 layout_decl (exp, 0);
6686 /* ... fall through ... */
6688 case FUNCTION_DECL:
6689 case RESULT_DECL:
6690 gcc_assert (DECL_RTL (exp));
6692 /* Ensure variable marked as used even if it doesn't go through
6693 a parser. If it hasn't be used yet, write out an external
6694 definition. */
6695 if (! TREE_USED (exp))
6697 assemble_external (exp);
6698 TREE_USED (exp) = 1;
6701 /* Show we haven't gotten RTL for this yet. */
6702 temp = 0;
6704 /* Variables inherited from containing functions should have
6705 been lowered by this point. */
6706 context = decl_function_context (exp);
6707 gcc_assert (!context
6708 || context == current_function_decl
6709 || TREE_STATIC (exp)
6710 /* ??? C++ creates functions that are not TREE_STATIC. */
6711 || TREE_CODE (exp) == FUNCTION_DECL);
6713 /* This is the case of an array whose size is to be determined
6714 from its initializer, while the initializer is still being parsed.
6715 See expand_decl. */
6717 if (MEM_P (DECL_RTL (exp))
6718 && REG_P (XEXP (DECL_RTL (exp), 0)))
6719 temp = validize_mem (DECL_RTL (exp));
6721 /* If DECL_RTL is memory, we are in the normal case and either
6722 the address is not valid or it is not a register and -fforce-addr
6723 is specified, get the address into a register. */
6725 else if (MEM_P (DECL_RTL (exp))
6726 && modifier != EXPAND_CONST_ADDRESS
6727 && modifier != EXPAND_SUM
6728 && modifier != EXPAND_INITIALIZER
6729 && (! memory_address_p (DECL_MODE (exp),
6730 XEXP (DECL_RTL (exp), 0))
6731 || (flag_force_addr
6732 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6734 if (alt_rtl)
6735 *alt_rtl = DECL_RTL (exp);
6736 temp = replace_equiv_address (DECL_RTL (exp),
6737 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6740 /* If we got something, return it. But first, set the alignment
6741 if the address is a register. */
6742 if (temp != 0)
6744 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6745 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6747 return temp;
6750 /* If the mode of DECL_RTL does not match that of the decl, it
6751 must be a promoted value. We return a SUBREG of the wanted mode,
6752 but mark it so that we know that it was already extended. */
6754 if (REG_P (DECL_RTL (exp))
6755 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6757 enum machine_mode pmode;
6759 /* Get the signedness used for this variable. Ensure we get the
6760 same mode we got when the variable was declared. */
6761 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6762 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0));
6763 gcc_assert (GET_MODE (DECL_RTL (exp)) == pmode);
6765 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6766 SUBREG_PROMOTED_VAR_P (temp) = 1;
6767 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6768 return temp;
6771 return DECL_RTL (exp);
6773 case INTEGER_CST:
6774 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6775 TREE_INT_CST_HIGH (exp), mode);
6777 /* ??? If overflow is set, fold will have done an incomplete job,
6778 which can result in (plus xx (const_int 0)), which can get
6779 simplified by validate_replace_rtx during virtual register
6780 instantiation, which can result in unrecognizable insns.
6781 Avoid this by forcing all overflows into registers. */
6782 if (TREE_CONSTANT_OVERFLOW (exp)
6783 && modifier != EXPAND_INITIALIZER)
6784 temp = force_reg (mode, temp);
6786 return temp;
6788 case VECTOR_CST:
6789 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6790 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6791 return const_vector_from_tree (exp);
6792 else
6793 return expand_expr (build_constructor_from_list
6794 (TREE_TYPE (exp),
6795 TREE_VECTOR_CST_ELTS (exp)),
6796 ignore ? const0_rtx : target, tmode, modifier);
6798 case CONST_DECL:
6799 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6801 case REAL_CST:
6802 /* If optimized, generate immediate CONST_DOUBLE
6803 which will be turned into memory by reload if necessary.
6805 We used to force a register so that loop.c could see it. But
6806 this does not allow gen_* patterns to perform optimizations with
6807 the constants. It also produces two insns in cases like "x = 1.0;".
6808 On most machines, floating-point constants are not permitted in
6809 many insns, so we'd end up copying it to a register in any case.
6811 Now, we do the copying in expand_binop, if appropriate. */
6812 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6813 TYPE_MODE (TREE_TYPE (exp)));
6815 case COMPLEX_CST:
6816 /* Handle evaluating a complex constant in a CONCAT target. */
6817 if (original_target && GET_CODE (original_target) == CONCAT)
6819 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6820 rtx rtarg, itarg;
6822 rtarg = XEXP (original_target, 0);
6823 itarg = XEXP (original_target, 1);
6825 /* Move the real and imaginary parts separately. */
6826 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6827 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6829 if (op0 != rtarg)
6830 emit_move_insn (rtarg, op0);
6831 if (op1 != itarg)
6832 emit_move_insn (itarg, op1);
6834 return original_target;
6837 /* ... fall through ... */
6839 case STRING_CST:
6840 temp = output_constant_def (exp, 1);
6842 /* temp contains a constant address.
6843 On RISC machines where a constant address isn't valid,
6844 make some insns to get that address into a register. */
6845 if (modifier != EXPAND_CONST_ADDRESS
6846 && modifier != EXPAND_INITIALIZER
6847 && modifier != EXPAND_SUM
6848 && (! memory_address_p (mode, XEXP (temp, 0))
6849 || flag_force_addr))
6850 return replace_equiv_address (temp,
6851 copy_rtx (XEXP (temp, 0)));
6852 return temp;
6854 case SAVE_EXPR:
6856 tree val = TREE_OPERAND (exp, 0);
6857 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6859 if (!SAVE_EXPR_RESOLVED_P (exp))
6861 /* We can indeed still hit this case, typically via builtin
6862 expanders calling save_expr immediately before expanding
6863 something. Assume this means that we only have to deal
6864 with non-BLKmode values. */
6865 gcc_assert (GET_MODE (ret) != BLKmode);
6867 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6868 DECL_ARTIFICIAL (val) = 1;
6869 DECL_IGNORED_P (val) = 1;
6870 TREE_OPERAND (exp, 0) = val;
6871 SAVE_EXPR_RESOLVED_P (exp) = 1;
6873 if (!CONSTANT_P (ret))
6874 ret = copy_to_reg (ret);
6875 SET_DECL_RTL (val, ret);
6878 return ret;
6881 case GOTO_EXPR:
6882 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6883 expand_goto (TREE_OPERAND (exp, 0));
6884 else
6885 expand_computed_goto (TREE_OPERAND (exp, 0));
6886 return const0_rtx;
6888 case CONSTRUCTOR:
6889 /* If we don't need the result, just ensure we evaluate any
6890 subexpressions. */
6891 if (ignore)
6893 unsigned HOST_WIDE_INT idx;
6894 tree value;
6896 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6897 expand_expr (value, const0_rtx, VOIDmode, 0);
6899 return const0_rtx;
6902 /* Try to avoid creating a temporary at all. This is possible
6903 if all of the initializer is zero.
6904 FIXME: try to handle all [0..255] initializers we can handle
6905 with memset. */
6906 else if (TREE_STATIC (exp)
6907 && !TREE_ADDRESSABLE (exp)
6908 && target != 0 && mode == BLKmode
6909 && all_zeros_p (exp))
6911 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6912 return target;
6915 /* All elts simple constants => refer to a constant in memory. But
6916 if this is a non-BLKmode mode, let it store a field at a time
6917 since that should make a CONST_INT or CONST_DOUBLE when we
6918 fold. Likewise, if we have a target we can use, it is best to
6919 store directly into the target unless the type is large enough
6920 that memcpy will be used. If we are making an initializer and
6921 all operands are constant, put it in memory as well.
6923 FIXME: Avoid trying to fill vector constructors piece-meal.
6924 Output them with output_constant_def below unless we're sure
6925 they're zeros. This should go away when vector initializers
6926 are treated like VECTOR_CST instead of arrays.
6928 else if ((TREE_STATIC (exp)
6929 && ((mode == BLKmode
6930 && ! (target != 0 && safe_from_p (target, exp, 1)))
6931 || TREE_ADDRESSABLE (exp)
6932 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6933 && (! MOVE_BY_PIECES_P
6934 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6935 TYPE_ALIGN (type)))
6936 && ! mostly_zeros_p (exp))))
6937 || ((modifier == EXPAND_INITIALIZER
6938 || modifier == EXPAND_CONST_ADDRESS)
6939 && TREE_CONSTANT (exp)))
6941 rtx constructor = output_constant_def (exp, 1);
6943 if (modifier != EXPAND_CONST_ADDRESS
6944 && modifier != EXPAND_INITIALIZER
6945 && modifier != EXPAND_SUM)
6946 constructor = validize_mem (constructor);
6948 return constructor;
6950 else
6952 /* Handle calls that pass values in multiple non-contiguous
6953 locations. The Irix 6 ABI has examples of this. */
6954 if (target == 0 || ! safe_from_p (target, exp, 1)
6955 || GET_CODE (target) == PARALLEL
6956 || modifier == EXPAND_STACK_PARM)
6957 target
6958 = assign_temp (build_qualified_type (type,
6959 (TYPE_QUALS (type)
6960 | (TREE_READONLY (exp)
6961 * TYPE_QUAL_CONST))),
6962 0, TREE_ADDRESSABLE (exp), 1);
6964 store_constructor (exp, target, 0, int_expr_size (exp));
6965 return target;
6968 case MISALIGNED_INDIRECT_REF:
6969 case ALIGN_INDIRECT_REF:
6970 case INDIRECT_REF:
6972 tree exp1 = TREE_OPERAND (exp, 0);
6974 if (modifier != EXPAND_WRITE)
6976 tree t;
6978 t = fold_read_from_constant_string (exp);
6979 if (t)
6980 return expand_expr (t, target, tmode, modifier);
6983 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6984 op0 = memory_address (mode, op0);
6986 if (code == ALIGN_INDIRECT_REF)
6988 int align = TYPE_ALIGN_UNIT (type);
6989 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
6990 op0 = memory_address (mode, op0);
6993 temp = gen_rtx_MEM (mode, op0);
6995 set_mem_attributes (temp, exp, 0);
6997 /* Resolve the misalignment now, so that we don't have to remember
6998 to resolve it later. Of course, this only works for reads. */
6999 /* ??? When we get around to supporting writes, we'll have to handle
7000 this in store_expr directly. The vectorizer isn't generating
7001 those yet, however. */
7002 if (code == MISALIGNED_INDIRECT_REF)
7004 int icode;
7005 rtx reg, insn;
7007 gcc_assert (modifier == EXPAND_NORMAL
7008 || modifier == EXPAND_STACK_PARM);
7010 /* The vectorizer should have already checked the mode. */
7011 icode = movmisalign_optab->handlers[mode].insn_code;
7012 gcc_assert (icode != CODE_FOR_nothing);
7014 /* We've already validated the memory, and we're creating a
7015 new pseudo destination. The predicates really can't fail. */
7016 reg = gen_reg_rtx (mode);
7018 /* Nor can the insn generator. */
7019 insn = GEN_FCN (icode) (reg, temp);
7020 emit_insn (insn);
7022 return reg;
7025 return temp;
7028 case TARGET_MEM_REF:
7030 struct mem_address addr;
7032 get_address_description (exp, &addr);
7033 op0 = addr_for_mem_ref (&addr, true);
7034 op0 = memory_address (mode, op0);
7035 temp = gen_rtx_MEM (mode, op0);
7036 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7038 return temp;
7040 case ARRAY_REF:
7043 tree array = TREE_OPERAND (exp, 0);
7044 tree index = TREE_OPERAND (exp, 1);
7046 /* Fold an expression like: "foo"[2].
7047 This is not done in fold so it won't happen inside &.
7048 Don't fold if this is for wide characters since it's too
7049 difficult to do correctly and this is a very rare case. */
7051 if (modifier != EXPAND_CONST_ADDRESS
7052 && modifier != EXPAND_INITIALIZER
7053 && modifier != EXPAND_MEMORY)
7055 tree t = fold_read_from_constant_string (exp);
7057 if (t)
7058 return expand_expr (t, target, tmode, modifier);
7061 /* If this is a constant index into a constant array,
7062 just get the value from the array. Handle both the cases when
7063 we have an explicit constructor and when our operand is a variable
7064 that was declared const. */
7066 if (modifier != EXPAND_CONST_ADDRESS
7067 && modifier != EXPAND_INITIALIZER
7068 && modifier != EXPAND_MEMORY
7069 && TREE_CODE (array) == CONSTRUCTOR
7070 && ! TREE_SIDE_EFFECTS (array)
7071 && TREE_CODE (index) == INTEGER_CST)
7073 unsigned HOST_WIDE_INT ix;
7074 tree field, value;
7076 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7077 field, value)
7078 if (tree_int_cst_equal (field, index))
7080 if (!TREE_SIDE_EFFECTS (value))
7081 return expand_expr (fold (value), target, tmode, modifier);
7082 break;
7086 else if (optimize >= 1
7087 && modifier != EXPAND_CONST_ADDRESS
7088 && modifier != EXPAND_INITIALIZER
7089 && modifier != EXPAND_MEMORY
7090 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7091 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7092 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7093 && targetm.binds_local_p (array))
7095 if (TREE_CODE (index) == INTEGER_CST)
7097 tree init = DECL_INITIAL (array);
7099 if (TREE_CODE (init) == CONSTRUCTOR)
7101 unsigned HOST_WIDE_INT ix;
7102 tree field, value;
7104 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7105 field, value)
7106 if (tree_int_cst_equal (field, index))
7108 if (!TREE_SIDE_EFFECTS (value))
7109 return expand_expr (fold (value), target, tmode,
7110 modifier);
7111 break;
7114 else if (TREE_CODE (init) == STRING_CST
7115 && 0 > compare_tree_int (index,
7116 TREE_STRING_LENGTH (init)))
7118 tree type = TREE_TYPE (TREE_TYPE (init));
7119 enum machine_mode mode = TYPE_MODE (type);
7121 if (GET_MODE_CLASS (mode) == MODE_INT
7122 && GET_MODE_SIZE (mode) == 1)
7123 return gen_int_mode (TREE_STRING_POINTER (init)
7124 [TREE_INT_CST_LOW (index)], mode);
7129 goto normal_inner_ref;
7131 case COMPONENT_REF:
7132 /* If the operand is a CONSTRUCTOR, we can just extract the
7133 appropriate field if it is present. */
7134 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7136 unsigned HOST_WIDE_INT idx;
7137 tree field, value;
7139 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7140 idx, field, value)
7141 if (field == TREE_OPERAND (exp, 1)
7142 /* We can normally use the value of the field in the
7143 CONSTRUCTOR. However, if this is a bitfield in
7144 an integral mode that we can fit in a HOST_WIDE_INT,
7145 we must mask only the number of bits in the bitfield,
7146 since this is done implicitly by the constructor. If
7147 the bitfield does not meet either of those conditions,
7148 we can't do this optimization. */
7149 && (! DECL_BIT_FIELD (field)
7150 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7151 && (GET_MODE_BITSIZE (DECL_MODE (field))
7152 <= HOST_BITS_PER_WIDE_INT))))
7154 if (DECL_BIT_FIELD (field)
7155 && modifier == EXPAND_STACK_PARM)
7156 target = 0;
7157 op0 = expand_expr (value, target, tmode, modifier);
7158 if (DECL_BIT_FIELD (field))
7160 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7161 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7163 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7165 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7166 op0 = expand_and (imode, op0, op1, target);
7168 else
7170 tree count
7171 = build_int_cst (NULL_TREE,
7172 GET_MODE_BITSIZE (imode) - bitsize);
7174 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7175 target, 0);
7176 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7177 target, 0);
7181 return op0;
7184 goto normal_inner_ref;
7186 case BIT_FIELD_REF:
7187 case ARRAY_RANGE_REF:
7188 normal_inner_ref:
7190 enum machine_mode mode1;
7191 HOST_WIDE_INT bitsize, bitpos;
7192 tree offset;
7193 int volatilep = 0;
7194 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7195 &mode1, &unsignedp, &volatilep, true);
7196 rtx orig_op0;
7198 /* If we got back the original object, something is wrong. Perhaps
7199 we are evaluating an expression too early. In any event, don't
7200 infinitely recurse. */
7201 gcc_assert (tem != exp);
7203 /* If TEM's type is a union of variable size, pass TARGET to the inner
7204 computation, since it will need a temporary and TARGET is known
7205 to have to do. This occurs in unchecked conversion in Ada. */
7207 orig_op0 = op0
7208 = expand_expr (tem,
7209 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7210 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7211 != INTEGER_CST)
7212 && modifier != EXPAND_STACK_PARM
7213 ? target : NULL_RTX),
7214 VOIDmode,
7215 (modifier == EXPAND_INITIALIZER
7216 || modifier == EXPAND_CONST_ADDRESS
7217 || modifier == EXPAND_STACK_PARM)
7218 ? modifier : EXPAND_NORMAL);
7220 /* If this is a constant, put it into a register if it is a legitimate
7221 constant, OFFSET is 0, and we won't try to extract outside the
7222 register (in case we were passed a partially uninitialized object
7223 or a view_conversion to a larger size). Force the constant to
7224 memory otherwise. */
7225 if (CONSTANT_P (op0))
7227 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7228 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7229 && offset == 0
7230 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7231 op0 = force_reg (mode, op0);
7232 else
7233 op0 = validize_mem (force_const_mem (mode, op0));
7236 /* Otherwise, if this object not in memory and we either have an
7237 offset, a BLKmode result, or a reference outside the object, put it
7238 there. Such cases can occur in Ada if we have unchecked conversion
7239 of an expression from a scalar type to an array or record type or
7240 for an ARRAY_RANGE_REF whose type is BLKmode. */
7241 else if (!MEM_P (op0)
7242 && (offset != 0
7243 || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
7244 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7246 tree nt = build_qualified_type (TREE_TYPE (tem),
7247 (TYPE_QUALS (TREE_TYPE (tem))
7248 | TYPE_QUAL_CONST));
7249 rtx memloc = assign_temp (nt, 1, 1, 1);
7251 emit_move_insn (memloc, op0);
7252 op0 = memloc;
7255 if (offset != 0)
7257 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7258 EXPAND_SUM);
7260 gcc_assert (MEM_P (op0));
7262 #ifdef POINTERS_EXTEND_UNSIGNED
7263 if (GET_MODE (offset_rtx) != Pmode)
7264 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7265 #else
7266 if (GET_MODE (offset_rtx) != ptr_mode)
7267 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7268 #endif
7270 if (GET_MODE (op0) == BLKmode
7271 /* A constant address in OP0 can have VOIDmode, we must
7272 not try to call force_reg in that case. */
7273 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7274 && bitsize != 0
7275 && (bitpos % bitsize) == 0
7276 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7277 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7279 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7280 bitpos = 0;
7283 op0 = offset_address (op0, offset_rtx,
7284 highest_pow2_factor (offset));
7287 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7288 record its alignment as BIGGEST_ALIGNMENT. */
7289 if (MEM_P (op0) && bitpos == 0 && offset != 0
7290 && is_aligning_offset (offset, tem))
7291 set_mem_align (op0, BIGGEST_ALIGNMENT);
7293 /* Don't forget about volatility even if this is a bitfield. */
7294 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7296 if (op0 == orig_op0)
7297 op0 = copy_rtx (op0);
7299 MEM_VOLATILE_P (op0) = 1;
7302 /* The following code doesn't handle CONCAT.
7303 Assume only bitpos == 0 can be used for CONCAT, due to
7304 one element arrays having the same mode as its element. */
7305 if (GET_CODE (op0) == CONCAT)
7307 gcc_assert (bitpos == 0
7308 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7309 return op0;
7312 /* In cases where an aligned union has an unaligned object
7313 as a field, we might be extracting a BLKmode value from
7314 an integer-mode (e.g., SImode) object. Handle this case
7315 by doing the extract into an object as wide as the field
7316 (which we know to be the width of a basic mode), then
7317 storing into memory, and changing the mode to BLKmode. */
7318 if (mode1 == VOIDmode
7319 || REG_P (op0) || GET_CODE (op0) == SUBREG
7320 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7321 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7322 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7323 && modifier != EXPAND_CONST_ADDRESS
7324 && modifier != EXPAND_INITIALIZER)
7325 /* If the field isn't aligned enough to fetch as a memref,
7326 fetch it as a bit field. */
7327 || (mode1 != BLKmode
7328 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7329 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7330 || (MEM_P (op0)
7331 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7332 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7333 && ((modifier == EXPAND_CONST_ADDRESS
7334 || modifier == EXPAND_INITIALIZER)
7335 ? STRICT_ALIGNMENT
7336 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7337 || (bitpos % BITS_PER_UNIT != 0)))
7338 /* If the type and the field are a constant size and the
7339 size of the type isn't the same size as the bitfield,
7340 we must use bitfield operations. */
7341 || (bitsize >= 0
7342 && TYPE_SIZE (TREE_TYPE (exp))
7343 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7344 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7345 bitsize)))
7347 enum machine_mode ext_mode = mode;
7349 if (ext_mode == BLKmode
7350 && ! (target != 0 && MEM_P (op0)
7351 && MEM_P (target)
7352 && bitpos % BITS_PER_UNIT == 0))
7353 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7355 if (ext_mode == BLKmode)
7357 if (target == 0)
7358 target = assign_temp (type, 0, 1, 1);
7360 if (bitsize == 0)
7361 return target;
7363 /* In this case, BITPOS must start at a byte boundary and
7364 TARGET, if specified, must be a MEM. */
7365 gcc_assert (MEM_P (op0)
7366 && (!target || MEM_P (target))
7367 && !(bitpos % BITS_PER_UNIT));
7369 emit_block_move (target,
7370 adjust_address (op0, VOIDmode,
7371 bitpos / BITS_PER_UNIT),
7372 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7373 / BITS_PER_UNIT),
7374 (modifier == EXPAND_STACK_PARM
7375 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7377 return target;
7380 op0 = validize_mem (op0);
7382 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7383 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7385 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7386 (modifier == EXPAND_STACK_PARM
7387 ? NULL_RTX : target),
7388 ext_mode, ext_mode);
7390 /* If the result is a record type and BITSIZE is narrower than
7391 the mode of OP0, an integral mode, and this is a big endian
7392 machine, we must put the field into the high-order bits. */
7393 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7394 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7395 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7396 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7397 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7398 - bitsize),
7399 op0, 1);
7401 /* If the result type is BLKmode, store the data into a temporary
7402 of the appropriate type, but with the mode corresponding to the
7403 mode for the data we have (op0's mode). It's tempting to make
7404 this a constant type, since we know it's only being stored once,
7405 but that can cause problems if we are taking the address of this
7406 COMPONENT_REF because the MEM of any reference via that address
7407 will have flags corresponding to the type, which will not
7408 necessarily be constant. */
7409 if (mode == BLKmode)
7411 rtx new
7412 = assign_stack_temp_for_type
7413 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7415 emit_move_insn (new, op0);
7416 op0 = copy_rtx (new);
7417 PUT_MODE (op0, BLKmode);
7418 set_mem_attributes (op0, exp, 1);
7421 return op0;
7424 /* If the result is BLKmode, use that to access the object
7425 now as well. */
7426 if (mode == BLKmode)
7427 mode1 = BLKmode;
7429 /* Get a reference to just this component. */
7430 if (modifier == EXPAND_CONST_ADDRESS
7431 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7432 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7433 else
7434 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7436 if (op0 == orig_op0)
7437 op0 = copy_rtx (op0);
7439 set_mem_attributes (op0, exp, 0);
7440 if (REG_P (XEXP (op0, 0)))
7441 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7443 MEM_VOLATILE_P (op0) |= volatilep;
7444 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7445 || modifier == EXPAND_CONST_ADDRESS
7446 || modifier == EXPAND_INITIALIZER)
7447 return op0;
7448 else if (target == 0)
7449 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7451 convert_move (target, op0, unsignedp);
7452 return target;
7455 case OBJ_TYPE_REF:
7456 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7458 case CALL_EXPR:
7459 /* Check for a built-in function. */
7460 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7461 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7462 == FUNCTION_DECL)
7463 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7465 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7466 == BUILT_IN_FRONTEND)
7467 return lang_hooks.expand_expr (exp, original_target,
7468 tmode, modifier,
7469 alt_rtl);
7470 else
7471 return expand_builtin (exp, target, subtarget, tmode, ignore);
7474 return expand_call (exp, target, ignore);
7476 case NON_LVALUE_EXPR:
7477 case NOP_EXPR:
7478 case CONVERT_EXPR:
7479 if (TREE_OPERAND (exp, 0) == error_mark_node)
7480 return const0_rtx;
7482 if (TREE_CODE (type) == UNION_TYPE)
7484 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7486 /* If both input and output are BLKmode, this conversion isn't doing
7487 anything except possibly changing memory attribute. */
7488 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7490 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7491 modifier);
7493 result = copy_rtx (result);
7494 set_mem_attributes (result, exp, 0);
7495 return result;
7498 if (target == 0)
7500 if (TYPE_MODE (type) != BLKmode)
7501 target = gen_reg_rtx (TYPE_MODE (type));
7502 else
7503 target = assign_temp (type, 0, 1, 1);
7506 if (MEM_P (target))
7507 /* Store data into beginning of memory target. */
7508 store_expr (TREE_OPERAND (exp, 0),
7509 adjust_address (target, TYPE_MODE (valtype), 0),
7510 modifier == EXPAND_STACK_PARM);
7512 else
7514 gcc_assert (REG_P (target));
7516 /* Store this field into a union of the proper type. */
7517 store_field (target,
7518 MIN ((int_size_in_bytes (TREE_TYPE
7519 (TREE_OPERAND (exp, 0)))
7520 * BITS_PER_UNIT),
7521 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7522 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7523 type, 0);
7526 /* Return the entire union. */
7527 return target;
7530 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7532 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7533 modifier);
7535 /* If the signedness of the conversion differs and OP0 is
7536 a promoted SUBREG, clear that indication since we now
7537 have to do the proper extension. */
7538 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7539 && GET_CODE (op0) == SUBREG)
7540 SUBREG_PROMOTED_VAR_P (op0) = 0;
7542 return REDUCE_BIT_FIELD (op0);
7545 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7546 if (GET_MODE (op0) == mode)
7549 /* If OP0 is a constant, just convert it into the proper mode. */
7550 else if (CONSTANT_P (op0))
7552 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7553 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7555 if (modifier == EXPAND_INITIALIZER)
7556 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7557 subreg_lowpart_offset (mode,
7558 inner_mode));
7559 else
7560 op0= convert_modes (mode, inner_mode, op0,
7561 TYPE_UNSIGNED (inner_type));
7564 else if (modifier == EXPAND_INITIALIZER)
7565 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7567 else if (target == 0)
7568 op0 = convert_to_mode (mode, op0,
7569 TYPE_UNSIGNED (TREE_TYPE
7570 (TREE_OPERAND (exp, 0))));
7571 else
7573 convert_move (target, op0,
7574 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7575 op0 = target;
7578 return REDUCE_BIT_FIELD (op0);
7580 case VIEW_CONVERT_EXPR:
7581 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7583 /* If the input and output modes are both the same, we are done. */
7584 if (TYPE_MODE (type) == GET_MODE (op0))
7586 /* If neither mode is BLKmode, and both modes are the same size
7587 then we can use gen_lowpart. */
7588 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7589 && GET_MODE_SIZE (TYPE_MODE (type))
7590 == GET_MODE_SIZE (GET_MODE (op0)))
7592 if (GET_CODE (op0) == SUBREG)
7593 op0 = force_reg (GET_MODE (op0), op0);
7594 op0 = gen_lowpart (TYPE_MODE (type), op0);
7596 /* If both modes are integral, then we can convert from one to the
7597 other. */
7598 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
7599 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
7600 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
7601 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7602 /* As a last resort, spill op0 to memory, and reload it in a
7603 different mode. */
7604 else if (!MEM_P (op0))
7606 /* If the operand is not a MEM, force it into memory. Since we
7607 are going to be be changing the mode of the MEM, don't call
7608 force_const_mem for constants because we don't allow pool
7609 constants to change mode. */
7610 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7612 gcc_assert (!TREE_ADDRESSABLE (exp));
7614 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7615 target
7616 = assign_stack_temp_for_type
7617 (TYPE_MODE (inner_type),
7618 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7620 emit_move_insn (target, op0);
7621 op0 = target;
7624 /* At this point, OP0 is in the correct mode. If the output type is such
7625 that the operand is known to be aligned, indicate that it is.
7626 Otherwise, we need only be concerned about alignment for non-BLKmode
7627 results. */
7628 if (MEM_P (op0))
7630 op0 = copy_rtx (op0);
7632 if (TYPE_ALIGN_OK (type))
7633 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7634 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7635 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7637 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7638 HOST_WIDE_INT temp_size
7639 = MAX (int_size_in_bytes (inner_type),
7640 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7641 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7642 temp_size, 0, type);
7643 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7645 gcc_assert (!TREE_ADDRESSABLE (exp));
7647 if (GET_MODE (op0) == BLKmode)
7648 emit_block_move (new_with_op0_mode, op0,
7649 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7650 (modifier == EXPAND_STACK_PARM
7651 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7652 else
7653 emit_move_insn (new_with_op0_mode, op0);
7655 op0 = new;
7658 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7661 return op0;
7663 case PLUS_EXPR:
7664 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7665 something else, make sure we add the register to the constant and
7666 then to the other thing. This case can occur during strength
7667 reduction and doing it this way will produce better code if the
7668 frame pointer or argument pointer is eliminated.
7670 fold-const.c will ensure that the constant is always in the inner
7671 PLUS_EXPR, so the only case we need to do anything about is if
7672 sp, ap, or fp is our second argument, in which case we must swap
7673 the innermost first argument and our second argument. */
7675 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7676 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7677 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7678 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7679 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7680 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7682 tree t = TREE_OPERAND (exp, 1);
7684 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7685 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7688 /* If the result is to be ptr_mode and we are adding an integer to
7689 something, we might be forming a constant. So try to use
7690 plus_constant. If it produces a sum and we can't accept it,
7691 use force_operand. This allows P = &ARR[const] to generate
7692 efficient code on machines where a SYMBOL_REF is not a valid
7693 address.
7695 If this is an EXPAND_SUM call, always return the sum. */
7696 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7697 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7699 if (modifier == EXPAND_STACK_PARM)
7700 target = 0;
7701 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7702 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7703 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7705 rtx constant_part;
7707 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7708 EXPAND_SUM);
7709 /* Use immed_double_const to ensure that the constant is
7710 truncated according to the mode of OP1, then sign extended
7711 to a HOST_WIDE_INT. Using the constant directly can result
7712 in non-canonical RTL in a 64x32 cross compile. */
7713 constant_part
7714 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7715 (HOST_WIDE_INT) 0,
7716 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7717 op1 = plus_constant (op1, INTVAL (constant_part));
7718 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7719 op1 = force_operand (op1, target);
7720 return REDUCE_BIT_FIELD (op1);
7723 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7724 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7725 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7727 rtx constant_part;
7729 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7730 (modifier == EXPAND_INITIALIZER
7731 ? EXPAND_INITIALIZER : EXPAND_SUM));
7732 if (! CONSTANT_P (op0))
7734 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7735 VOIDmode, modifier);
7736 /* Return a PLUS if modifier says it's OK. */
7737 if (modifier == EXPAND_SUM
7738 || modifier == EXPAND_INITIALIZER)
7739 return simplify_gen_binary (PLUS, mode, op0, op1);
7740 goto binop2;
7742 /* Use immed_double_const to ensure that the constant is
7743 truncated according to the mode of OP1, then sign extended
7744 to a HOST_WIDE_INT. Using the constant directly can result
7745 in non-canonical RTL in a 64x32 cross compile. */
7746 constant_part
7747 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7748 (HOST_WIDE_INT) 0,
7749 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7750 op0 = plus_constant (op0, INTVAL (constant_part));
7751 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7752 op0 = force_operand (op0, target);
7753 return REDUCE_BIT_FIELD (op0);
7757 /* No sense saving up arithmetic to be done
7758 if it's all in the wrong mode to form part of an address.
7759 And force_operand won't know whether to sign-extend or
7760 zero-extend. */
7761 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7762 || mode != ptr_mode)
7764 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7765 subtarget, &op0, &op1, 0);
7766 if (op0 == const0_rtx)
7767 return op1;
7768 if (op1 == const0_rtx)
7769 return op0;
7770 goto binop2;
7773 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7774 subtarget, &op0, &op1, modifier);
7775 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7777 case MINUS_EXPR:
7778 /* For initializers, we are allowed to return a MINUS of two
7779 symbolic constants. Here we handle all cases when both operands
7780 are constant. */
7781 /* Handle difference of two symbolic constants,
7782 for the sake of an initializer. */
7783 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7784 && really_constant_p (TREE_OPERAND (exp, 0))
7785 && really_constant_p (TREE_OPERAND (exp, 1)))
7787 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7788 NULL_RTX, &op0, &op1, modifier);
7790 /* If the last operand is a CONST_INT, use plus_constant of
7791 the negated constant. Else make the MINUS. */
7792 if (GET_CODE (op1) == CONST_INT)
7793 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7794 else
7795 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7798 /* No sense saving up arithmetic to be done
7799 if it's all in the wrong mode to form part of an address.
7800 And force_operand won't know whether to sign-extend or
7801 zero-extend. */
7802 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7803 || mode != ptr_mode)
7804 goto binop;
7806 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7807 subtarget, &op0, &op1, modifier);
7809 /* Convert A - const to A + (-const). */
7810 if (GET_CODE (op1) == CONST_INT)
7812 op1 = negate_rtx (mode, op1);
7813 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7816 goto binop2;
7818 case MULT_EXPR:
7819 /* If first operand is constant, swap them.
7820 Thus the following special case checks need only
7821 check the second operand. */
7822 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7824 tree t1 = TREE_OPERAND (exp, 0);
7825 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7826 TREE_OPERAND (exp, 1) = t1;
7829 /* Attempt to return something suitable for generating an
7830 indexed address, for machines that support that. */
7832 if (modifier == EXPAND_SUM && mode == ptr_mode
7833 && host_integerp (TREE_OPERAND (exp, 1), 0))
7835 tree exp1 = TREE_OPERAND (exp, 1);
7837 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7838 EXPAND_SUM);
7840 if (!REG_P (op0))
7841 op0 = force_operand (op0, NULL_RTX);
7842 if (!REG_P (op0))
7843 op0 = copy_to_mode_reg (mode, op0);
7845 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7846 gen_int_mode (tree_low_cst (exp1, 0),
7847 TYPE_MODE (TREE_TYPE (exp1)))));
7850 if (modifier == EXPAND_STACK_PARM)
7851 target = 0;
7853 /* Check for multiplying things that have been extended
7854 from a narrower type. If this machine supports multiplying
7855 in that narrower type with a result in the desired type,
7856 do it that way, and avoid the explicit type-conversion. */
7858 subexp0 = TREE_OPERAND (exp, 0);
7859 subexp1 = TREE_OPERAND (exp, 1);
7860 /* First, check if we have a multiplication of one signed and one
7861 unsigned operand. */
7862 if (TREE_CODE (subexp0) == NOP_EXPR
7863 && TREE_CODE (subexp1) == NOP_EXPR
7864 && TREE_CODE (type) == INTEGER_TYPE
7865 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
7866 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7867 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
7868 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
7869 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
7870 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
7872 enum machine_mode innermode
7873 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
7874 this_optab = usmul_widen_optab;
7875 if (mode == GET_MODE_WIDER_MODE (innermode))
7877 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7879 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
7880 expand_operands (TREE_OPERAND (subexp0, 0),
7881 TREE_OPERAND (subexp1, 0),
7882 NULL_RTX, &op0, &op1, 0);
7883 else
7884 expand_operands (TREE_OPERAND (subexp0, 0),
7885 TREE_OPERAND (subexp1, 0),
7886 NULL_RTX, &op1, &op0, 0);
7888 goto binop3;
7892 /* Check for a multiplication with matching signedness. */
7893 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7894 && TREE_CODE (type) == INTEGER_TYPE
7895 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7896 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7897 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7898 && int_fits_type_p (TREE_OPERAND (exp, 1),
7899 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7900 /* Don't use a widening multiply if a shift will do. */
7901 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7902 > HOST_BITS_PER_WIDE_INT)
7903 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7905 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7906 && (TYPE_PRECISION (TREE_TYPE
7907 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7908 == TYPE_PRECISION (TREE_TYPE
7909 (TREE_OPERAND
7910 (TREE_OPERAND (exp, 0), 0))))
7911 /* If both operands are extended, they must either both
7912 be zero-extended or both be sign-extended. */
7913 && (TYPE_UNSIGNED (TREE_TYPE
7914 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7915 == TYPE_UNSIGNED (TREE_TYPE
7916 (TREE_OPERAND
7917 (TREE_OPERAND (exp, 0), 0)))))))
7919 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7920 enum machine_mode innermode = TYPE_MODE (op0type);
7921 bool zextend_p = TYPE_UNSIGNED (op0type);
7922 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7923 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7925 if (mode == GET_MODE_2XWIDER_MODE (innermode))
7927 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7929 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7930 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7931 TREE_OPERAND (exp, 1),
7932 NULL_RTX, &op0, &op1, 0);
7933 else
7934 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7935 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7936 NULL_RTX, &op0, &op1, 0);
7937 goto binop3;
7939 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7940 && innermode == word_mode)
7942 rtx htem, hipart;
7943 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7944 NULL_RTX, VOIDmode, 0);
7945 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7946 op1 = convert_modes (innermode, mode,
7947 expand_expr (TREE_OPERAND (exp, 1),
7948 NULL_RTX, VOIDmode, 0),
7949 unsignedp);
7950 else
7951 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7952 NULL_RTX, VOIDmode, 0);
7953 temp = expand_binop (mode, other_optab, op0, op1, target,
7954 unsignedp, OPTAB_LIB_WIDEN);
7955 hipart = gen_highpart (innermode, temp);
7956 htem = expand_mult_highpart_adjust (innermode, hipart,
7957 op0, op1, hipart,
7958 zextend_p);
7959 if (htem != hipart)
7960 emit_move_insn (hipart, htem);
7961 return REDUCE_BIT_FIELD (temp);
7965 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7966 subtarget, &op0, &op1, 0);
7967 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7969 case TRUNC_DIV_EXPR:
7970 case FLOOR_DIV_EXPR:
7971 case CEIL_DIV_EXPR:
7972 case ROUND_DIV_EXPR:
7973 case EXACT_DIV_EXPR:
7974 if (modifier == EXPAND_STACK_PARM)
7975 target = 0;
7976 /* Possible optimization: compute the dividend with EXPAND_SUM
7977 then if the divisor is constant can optimize the case
7978 where some terms of the dividend have coeffs divisible by it. */
7979 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7980 subtarget, &op0, &op1, 0);
7981 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7983 case RDIV_EXPR:
7984 goto binop;
7986 case TRUNC_MOD_EXPR:
7987 case FLOOR_MOD_EXPR:
7988 case CEIL_MOD_EXPR:
7989 case ROUND_MOD_EXPR:
7990 if (modifier == EXPAND_STACK_PARM)
7991 target = 0;
7992 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7993 subtarget, &op0, &op1, 0);
7994 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7996 case FIX_ROUND_EXPR:
7997 case FIX_FLOOR_EXPR:
7998 case FIX_CEIL_EXPR:
7999 gcc_unreachable (); /* Not used for C. */
8001 case FIX_TRUNC_EXPR:
8002 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8003 if (target == 0 || modifier == EXPAND_STACK_PARM)
8004 target = gen_reg_rtx (mode);
8005 expand_fix (target, op0, unsignedp);
8006 return target;
8008 case FLOAT_EXPR:
8009 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8010 if (target == 0 || modifier == EXPAND_STACK_PARM)
8011 target = gen_reg_rtx (mode);
8012 /* expand_float can't figure out what to do if FROM has VOIDmode.
8013 So give it the correct mode. With -O, cse will optimize this. */
8014 if (GET_MODE (op0) == VOIDmode)
8015 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8016 op0);
8017 expand_float (target, op0,
8018 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8019 return target;
8021 case NEGATE_EXPR:
8022 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8023 if (modifier == EXPAND_STACK_PARM)
8024 target = 0;
8025 temp = expand_unop (mode,
8026 optab_for_tree_code (NEGATE_EXPR, type),
8027 op0, target, 0);
8028 gcc_assert (temp);
8029 return REDUCE_BIT_FIELD (temp);
8031 case ABS_EXPR:
8032 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8033 if (modifier == EXPAND_STACK_PARM)
8034 target = 0;
8036 /* ABS_EXPR is not valid for complex arguments. */
8037 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8038 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8040 /* Unsigned abs is simply the operand. Testing here means we don't
8041 risk generating incorrect code below. */
8042 if (TYPE_UNSIGNED (type))
8043 return op0;
8045 return expand_abs (mode, op0, target, unsignedp,
8046 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8048 case MAX_EXPR:
8049 case MIN_EXPR:
8050 target = original_target;
8051 if (target == 0
8052 || modifier == EXPAND_STACK_PARM
8053 || (MEM_P (target) && MEM_VOLATILE_P (target))
8054 || GET_MODE (target) != mode
8055 || (REG_P (target)
8056 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8057 target = gen_reg_rtx (mode);
8058 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8059 target, &op0, &op1, 0);
8061 /* First try to do it with a special MIN or MAX instruction.
8062 If that does not win, use a conditional jump to select the proper
8063 value. */
8064 this_optab = optab_for_tree_code (code, type);
8065 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8066 OPTAB_WIDEN);
8067 if (temp != 0)
8068 return temp;
8070 /* At this point, a MEM target is no longer useful; we will get better
8071 code without it. */
8073 if (! REG_P (target))
8074 target = gen_reg_rtx (mode);
8076 /* If op1 was placed in target, swap op0 and op1. */
8077 if (target != op0 && target == op1)
8079 temp = op0;
8080 op0 = op1;
8081 op1 = temp;
8084 /* We generate better code and avoid problems with op1 mentioning
8085 target by forcing op1 into a pseudo if it isn't a constant. */
8086 if (! CONSTANT_P (op1))
8087 op1 = force_reg (mode, op1);
8090 enum rtx_code comparison_code;
8091 rtx cmpop1 = op1;
8093 if (code == MAX_EXPR)
8094 comparison_code = unsignedp ? GEU : GE;
8095 else
8096 comparison_code = unsignedp ? LEU : LE;
8098 /* Canonicalize to comparisons against 0. */
8099 if (op1 == const1_rtx)
8101 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8102 or (a != 0 ? a : 1) for unsigned.
8103 For MIN we are safe converting (a <= 1 ? a : 1)
8104 into (a <= 0 ? a : 1) */
8105 cmpop1 = const0_rtx;
8106 if (code == MAX_EXPR)
8107 comparison_code = unsignedp ? NE : GT;
8109 if (op1 == constm1_rtx && !unsignedp)
8111 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8112 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8113 cmpop1 = const0_rtx;
8114 if (code == MIN_EXPR)
8115 comparison_code = LT;
8117 #ifdef HAVE_conditional_move
8118 /* Use a conditional move if possible. */
8119 if (can_conditionally_move_p (mode))
8121 rtx insn;
8123 /* ??? Same problem as in expmed.c: emit_conditional_move
8124 forces a stack adjustment via compare_from_rtx, and we
8125 lose the stack adjustment if the sequence we are about
8126 to create is discarded. */
8127 do_pending_stack_adjust ();
8129 start_sequence ();
8131 /* Try to emit the conditional move. */
8132 insn = emit_conditional_move (target, comparison_code,
8133 op0, cmpop1, mode,
8134 op0, op1, mode,
8135 unsignedp);
8137 /* If we could do the conditional move, emit the sequence,
8138 and return. */
8139 if (insn)
8141 rtx seq = get_insns ();
8142 end_sequence ();
8143 emit_insn (seq);
8144 return target;
8147 /* Otherwise discard the sequence and fall back to code with
8148 branches. */
8149 end_sequence ();
8151 #endif
8152 if (target != op0)
8153 emit_move_insn (target, op0);
8155 temp = gen_label_rtx ();
8157 /* If this mode is an integer too wide to compare properly,
8158 compare word by word. Rely on cse to optimize constant cases. */
8159 if (GET_MODE_CLASS (mode) == MODE_INT
8160 && ! can_compare_p (GE, mode, ccp_jump))
8162 if (code == MAX_EXPR)
8163 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
8164 NULL_RTX, temp);
8165 else
8166 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
8167 NULL_RTX, temp);
8169 else
8171 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8172 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8175 emit_move_insn (target, op1);
8176 emit_label (temp);
8177 return target;
8179 case BIT_NOT_EXPR:
8180 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8181 if (modifier == EXPAND_STACK_PARM)
8182 target = 0;
8183 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8184 gcc_assert (temp);
8185 return temp;
8187 /* ??? Can optimize bitwise operations with one arg constant.
8188 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8189 and (a bitwise1 b) bitwise2 b (etc)
8190 but that is probably not worth while. */
8192 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8193 boolean values when we want in all cases to compute both of them. In
8194 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8195 as actual zero-or-1 values and then bitwise anding. In cases where
8196 there cannot be any side effects, better code would be made by
8197 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8198 how to recognize those cases. */
8200 case TRUTH_AND_EXPR:
8201 code = BIT_AND_EXPR;
8202 case BIT_AND_EXPR:
8203 goto binop;
8205 case TRUTH_OR_EXPR:
8206 code = BIT_IOR_EXPR;
8207 case BIT_IOR_EXPR:
8208 goto binop;
8210 case TRUTH_XOR_EXPR:
8211 code = BIT_XOR_EXPR;
8212 case BIT_XOR_EXPR:
8213 goto binop;
8215 case LSHIFT_EXPR:
8216 case RSHIFT_EXPR:
8217 case LROTATE_EXPR:
8218 case RROTATE_EXPR:
8219 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8220 subtarget = 0;
8221 if (modifier == EXPAND_STACK_PARM)
8222 target = 0;
8223 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8224 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8225 unsignedp);
8227 /* Could determine the answer when only additive constants differ. Also,
8228 the addition of one can be handled by changing the condition. */
8229 case LT_EXPR:
8230 case LE_EXPR:
8231 case GT_EXPR:
8232 case GE_EXPR:
8233 case EQ_EXPR:
8234 case NE_EXPR:
8235 case UNORDERED_EXPR:
8236 case ORDERED_EXPR:
8237 case UNLT_EXPR:
8238 case UNLE_EXPR:
8239 case UNGT_EXPR:
8240 case UNGE_EXPR:
8241 case UNEQ_EXPR:
8242 case LTGT_EXPR:
8243 temp = do_store_flag (exp,
8244 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8245 tmode != VOIDmode ? tmode : mode, 0);
8246 if (temp != 0)
8247 return temp;
8249 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8250 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8251 && original_target
8252 && REG_P (original_target)
8253 && (GET_MODE (original_target)
8254 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8256 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8257 VOIDmode, 0);
8259 /* If temp is constant, we can just compute the result. */
8260 if (GET_CODE (temp) == CONST_INT)
8262 if (INTVAL (temp) != 0)
8263 emit_move_insn (target, const1_rtx);
8264 else
8265 emit_move_insn (target, const0_rtx);
8267 return target;
8270 if (temp != original_target)
8272 enum machine_mode mode1 = GET_MODE (temp);
8273 if (mode1 == VOIDmode)
8274 mode1 = tmode != VOIDmode ? tmode : mode;
8276 temp = copy_to_mode_reg (mode1, temp);
8279 op1 = gen_label_rtx ();
8280 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8281 GET_MODE (temp), unsignedp, op1);
8282 emit_move_insn (temp, const1_rtx);
8283 emit_label (op1);
8284 return temp;
8287 /* If no set-flag instruction, must generate a conditional store
8288 into a temporary variable. Drop through and handle this
8289 like && and ||. */
8291 if (! ignore
8292 && (target == 0
8293 || modifier == EXPAND_STACK_PARM
8294 || ! safe_from_p (target, exp, 1)
8295 /* Make sure we don't have a hard reg (such as function's return
8296 value) live across basic blocks, if not optimizing. */
8297 || (!optimize && REG_P (target)
8298 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8299 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8301 if (target)
8302 emit_move_insn (target, const0_rtx);
8304 op1 = gen_label_rtx ();
8305 jumpifnot (exp, op1);
8307 if (target)
8308 emit_move_insn (target, const1_rtx);
8310 emit_label (op1);
8311 return ignore ? const0_rtx : target;
8313 case TRUTH_NOT_EXPR:
8314 if (modifier == EXPAND_STACK_PARM)
8315 target = 0;
8316 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8317 /* The parser is careful to generate TRUTH_NOT_EXPR
8318 only with operands that are always zero or one. */
8319 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8320 target, 1, OPTAB_LIB_WIDEN);
8321 gcc_assert (temp);
8322 return temp;
8324 case STATEMENT_LIST:
8326 tree_stmt_iterator iter;
8328 gcc_assert (ignore);
8330 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8331 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8333 return const0_rtx;
8335 case COND_EXPR:
8336 /* A COND_EXPR with its type being VOID_TYPE represents a
8337 conditional jump and is handled in
8338 expand_gimple_cond_expr. */
8339 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
8341 /* Note that COND_EXPRs whose type is a structure or union
8342 are required to be constructed to contain assignments of
8343 a temporary variable, so that we can evaluate them here
8344 for side effect only. If type is void, we must do likewise. */
8346 gcc_assert (!TREE_ADDRESSABLE (type)
8347 && !ignore
8348 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8349 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
8351 /* If we are not to produce a result, we have no target. Otherwise,
8352 if a target was specified use it; it will not be used as an
8353 intermediate target unless it is safe. If no target, use a
8354 temporary. */
8356 if (modifier != EXPAND_STACK_PARM
8357 && original_target
8358 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8359 && GET_MODE (original_target) == mode
8360 #ifdef HAVE_conditional_move
8361 && (! can_conditionally_move_p (mode)
8362 || REG_P (original_target))
8363 #endif
8364 && !MEM_P (original_target))
8365 temp = original_target;
8366 else
8367 temp = assign_temp (type, 0, 0, 1);
8369 do_pending_stack_adjust ();
8370 NO_DEFER_POP;
8371 op0 = gen_label_rtx ();
8372 op1 = gen_label_rtx ();
8373 jumpifnot (TREE_OPERAND (exp, 0), op0);
8374 store_expr (TREE_OPERAND (exp, 1), temp,
8375 modifier == EXPAND_STACK_PARM);
8377 emit_jump_insn (gen_jump (op1));
8378 emit_barrier ();
8379 emit_label (op0);
8380 store_expr (TREE_OPERAND (exp, 2), temp,
8381 modifier == EXPAND_STACK_PARM);
8383 emit_label (op1);
8384 OK_DEFER_POP;
8385 return temp;
8387 case VEC_COND_EXPR:
8388 target = expand_vec_cond_expr (exp, target);
8389 return target;
8391 case MODIFY_EXPR:
8393 tree lhs = TREE_OPERAND (exp, 0);
8394 tree rhs = TREE_OPERAND (exp, 1);
8396 gcc_assert (ignore);
8398 /* Check for |= or &= of a bitfield of size one into another bitfield
8399 of size 1. In this case, (unless we need the result of the
8400 assignment) we can do this more efficiently with a
8401 test followed by an assignment, if necessary.
8403 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8404 things change so we do, this code should be enhanced to
8405 support it. */
8406 if (TREE_CODE (lhs) == COMPONENT_REF
8407 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8408 || TREE_CODE (rhs) == BIT_AND_EXPR)
8409 && TREE_OPERAND (rhs, 0) == lhs
8410 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8411 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8412 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8414 rtx label = gen_label_rtx ();
8416 do_jump (TREE_OPERAND (rhs, 1),
8417 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8418 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8419 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8420 (TREE_CODE (rhs) == BIT_IOR_EXPR
8421 ? integer_one_node
8422 : integer_zero_node)));
8423 do_pending_stack_adjust ();
8424 emit_label (label);
8425 return const0_rtx;
8428 expand_assignment (lhs, rhs);
8430 return const0_rtx;
8433 case RETURN_EXPR:
8434 if (!TREE_OPERAND (exp, 0))
8435 expand_null_return ();
8436 else
8437 expand_return (TREE_OPERAND (exp, 0));
8438 return const0_rtx;
8440 case ADDR_EXPR:
8441 return expand_expr_addr_expr (exp, target, tmode, modifier);
8443 case COMPLEX_EXPR:
8444 /* Get the rtx code of the operands. */
8445 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8446 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8448 if (!target)
8449 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8451 /* Move the real (op0) and imaginary (op1) parts to their location. */
8452 write_complex_part (target, op0, false);
8453 write_complex_part (target, op1, true);
8455 return target;
8457 case REALPART_EXPR:
8458 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8459 return read_complex_part (op0, false);
8461 case IMAGPART_EXPR:
8462 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8463 return read_complex_part (op0, true);
8465 case RESX_EXPR:
8466 expand_resx_expr (exp);
8467 return const0_rtx;
8469 case TRY_CATCH_EXPR:
8470 case CATCH_EXPR:
8471 case EH_FILTER_EXPR:
8472 case TRY_FINALLY_EXPR:
8473 /* Lowered by tree-eh.c. */
8474 gcc_unreachable ();
8476 case WITH_CLEANUP_EXPR:
8477 case CLEANUP_POINT_EXPR:
8478 case TARGET_EXPR:
8479 case CASE_LABEL_EXPR:
8480 case VA_ARG_EXPR:
8481 case BIND_EXPR:
8482 case INIT_EXPR:
8483 case CONJ_EXPR:
8484 case COMPOUND_EXPR:
8485 case PREINCREMENT_EXPR:
8486 case PREDECREMENT_EXPR:
8487 case POSTINCREMENT_EXPR:
8488 case POSTDECREMENT_EXPR:
8489 case LOOP_EXPR:
8490 case EXIT_EXPR:
8491 case TRUTH_ANDIF_EXPR:
8492 case TRUTH_ORIF_EXPR:
8493 /* Lowered by gimplify.c. */
8494 gcc_unreachable ();
8496 case EXC_PTR_EXPR:
8497 return get_exception_pointer (cfun);
8499 case FILTER_EXPR:
8500 return get_exception_filter (cfun);
8502 case FDESC_EXPR:
8503 /* Function descriptors are not valid except for as
8504 initialization constants, and should not be expanded. */
8505 gcc_unreachable ();
8507 case SWITCH_EXPR:
8508 expand_case (exp);
8509 return const0_rtx;
8511 case LABEL_EXPR:
8512 expand_label (TREE_OPERAND (exp, 0));
8513 return const0_rtx;
8515 case ASM_EXPR:
8516 expand_asm_expr (exp);
8517 return const0_rtx;
8519 case WITH_SIZE_EXPR:
8520 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8521 have pulled out the size to use in whatever context it needed. */
8522 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8523 modifier, alt_rtl);
8525 case REALIGN_LOAD_EXPR:
8527 tree oprnd0 = TREE_OPERAND (exp, 0);
8528 tree oprnd1 = TREE_OPERAND (exp, 1);
8529 tree oprnd2 = TREE_OPERAND (exp, 2);
8530 rtx op2;
8532 this_optab = optab_for_tree_code (code, type);
8533 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8534 op2 = expand_expr (oprnd2, NULL_RTX, VOIDmode, 0);
8535 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8536 target, unsignedp);
8537 gcc_assert (temp);
8538 return temp;
8541 case REDUC_MAX_EXPR:
8542 case REDUC_MIN_EXPR:
8543 case REDUC_PLUS_EXPR:
8545 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8546 this_optab = optab_for_tree_code (code, type);
8547 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8548 gcc_assert (temp);
8549 return temp;
8552 case VEC_LSHIFT_EXPR:
8553 case VEC_RSHIFT_EXPR:
8555 target = expand_vec_shift_expr (exp, target);
8556 return target;
8559 default:
8560 return lang_hooks.expand_expr (exp, original_target, tmode,
8561 modifier, alt_rtl);
8564 /* Here to do an ordinary binary operator. */
8565 binop:
8566 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8567 subtarget, &op0, &op1, 0);
8568 binop2:
8569 this_optab = optab_for_tree_code (code, type);
8570 binop3:
8571 if (modifier == EXPAND_STACK_PARM)
8572 target = 0;
8573 temp = expand_binop (mode, this_optab, op0, op1, target,
8574 unsignedp, OPTAB_LIB_WIDEN);
8575 gcc_assert (temp);
8576 return REDUCE_BIT_FIELD (temp);
8578 #undef REDUCE_BIT_FIELD
8580 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8581 signedness of TYPE), possibly returning the result in TARGET. */
8582 static rtx
8583 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8585 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8586 if (target && GET_MODE (target) != GET_MODE (exp))
8587 target = 0;
8588 if (TYPE_UNSIGNED (type))
8590 rtx mask;
8591 if (prec < HOST_BITS_PER_WIDE_INT)
8592 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8593 GET_MODE (exp));
8594 else
8595 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8596 ((unsigned HOST_WIDE_INT) 1
8597 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8598 GET_MODE (exp));
8599 return expand_and (GET_MODE (exp), exp, mask, target);
8601 else
8603 tree count = build_int_cst (NULL_TREE,
8604 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8605 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8606 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8610 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8611 when applied to the address of EXP produces an address known to be
8612 aligned more than BIGGEST_ALIGNMENT. */
8614 static int
8615 is_aligning_offset (tree offset, tree exp)
8617 /* Strip off any conversions. */
8618 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8619 || TREE_CODE (offset) == NOP_EXPR
8620 || TREE_CODE (offset) == CONVERT_EXPR)
8621 offset = TREE_OPERAND (offset, 0);
8623 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8624 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8625 if (TREE_CODE (offset) != BIT_AND_EXPR
8626 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8627 || compare_tree_int (TREE_OPERAND (offset, 1),
8628 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8629 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8630 return 0;
8632 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8633 It must be NEGATE_EXPR. Then strip any more conversions. */
8634 offset = TREE_OPERAND (offset, 0);
8635 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8636 || TREE_CODE (offset) == NOP_EXPR
8637 || TREE_CODE (offset) == CONVERT_EXPR)
8638 offset = TREE_OPERAND (offset, 0);
8640 if (TREE_CODE (offset) != NEGATE_EXPR)
8641 return 0;
8643 offset = TREE_OPERAND (offset, 0);
8644 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8645 || TREE_CODE (offset) == NOP_EXPR
8646 || TREE_CODE (offset) == CONVERT_EXPR)
8647 offset = TREE_OPERAND (offset, 0);
8649 /* This must now be the address of EXP. */
8650 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8653 /* Return the tree node if an ARG corresponds to a string constant or zero
8654 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8655 in bytes within the string that ARG is accessing. The type of the
8656 offset will be `sizetype'. */
8658 tree
8659 string_constant (tree arg, tree *ptr_offset)
8661 tree array, offset;
8662 STRIP_NOPS (arg);
8664 if (TREE_CODE (arg) == ADDR_EXPR)
8666 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8668 *ptr_offset = size_zero_node;
8669 return TREE_OPERAND (arg, 0);
8671 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8673 array = TREE_OPERAND (arg, 0);
8674 offset = size_zero_node;
8676 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8678 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8679 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8680 if (TREE_CODE (array) != STRING_CST
8681 && TREE_CODE (array) != VAR_DECL)
8682 return 0;
8684 else
8685 return 0;
8687 else if (TREE_CODE (arg) == PLUS_EXPR)
8689 tree arg0 = TREE_OPERAND (arg, 0);
8690 tree arg1 = TREE_OPERAND (arg, 1);
8692 STRIP_NOPS (arg0);
8693 STRIP_NOPS (arg1);
8695 if (TREE_CODE (arg0) == ADDR_EXPR
8696 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8697 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
8699 array = TREE_OPERAND (arg0, 0);
8700 offset = arg1;
8702 else if (TREE_CODE (arg1) == ADDR_EXPR
8703 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8704 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
8706 array = TREE_OPERAND (arg1, 0);
8707 offset = arg0;
8709 else
8710 return 0;
8712 else
8713 return 0;
8715 if (TREE_CODE (array) == STRING_CST)
8717 *ptr_offset = convert (sizetype, offset);
8718 return array;
8720 else if (TREE_CODE (array) == VAR_DECL)
8722 int length;
8724 /* Variables initialized to string literals can be handled too. */
8725 if (DECL_INITIAL (array) == NULL_TREE
8726 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8727 return 0;
8729 /* If they are read-only, non-volatile and bind locally. */
8730 if (! TREE_READONLY (array)
8731 || TREE_SIDE_EFFECTS (array)
8732 || ! targetm.binds_local_p (array))
8733 return 0;
8735 /* Avoid const char foo[4] = "abcde"; */
8736 if (DECL_SIZE_UNIT (array) == NULL_TREE
8737 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8738 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8739 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8740 return 0;
8742 /* If variable is bigger than the string literal, OFFSET must be constant
8743 and inside of the bounds of the string literal. */
8744 offset = convert (sizetype, offset);
8745 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
8746 && (! host_integerp (offset, 1)
8747 || compare_tree_int (offset, length) >= 0))
8748 return 0;
8750 *ptr_offset = offset;
8751 return DECL_INITIAL (array);
8754 return 0;
8757 /* Generate code to calculate EXP using a store-flag instruction
8758 and return an rtx for the result. EXP is either a comparison
8759 or a TRUTH_NOT_EXPR whose operand is a comparison.
8761 If TARGET is nonzero, store the result there if convenient.
8763 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8764 cheap.
8766 Return zero if there is no suitable set-flag instruction
8767 available on this machine.
8769 Once expand_expr has been called on the arguments of the comparison,
8770 we are committed to doing the store flag, since it is not safe to
8771 re-evaluate the expression. We emit the store-flag insn by calling
8772 emit_store_flag, but only expand the arguments if we have a reason
8773 to believe that emit_store_flag will be successful. If we think that
8774 it will, but it isn't, we have to simulate the store-flag with a
8775 set/jump/set sequence. */
8777 static rtx
8778 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
8780 enum rtx_code code;
8781 tree arg0, arg1, type;
8782 tree tem;
8783 enum machine_mode operand_mode;
8784 int invert = 0;
8785 int unsignedp;
8786 rtx op0, op1;
8787 enum insn_code icode;
8788 rtx subtarget = target;
8789 rtx result, label;
8791 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8792 result at the end. We can't simply invert the test since it would
8793 have already been inverted if it were valid. This case occurs for
8794 some floating-point comparisons. */
8796 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8797 invert = 1, exp = TREE_OPERAND (exp, 0);
8799 arg0 = TREE_OPERAND (exp, 0);
8800 arg1 = TREE_OPERAND (exp, 1);
8802 /* Don't crash if the comparison was erroneous. */
8803 if (arg0 == error_mark_node || arg1 == error_mark_node)
8804 return const0_rtx;
8806 type = TREE_TYPE (arg0);
8807 operand_mode = TYPE_MODE (type);
8808 unsignedp = TYPE_UNSIGNED (type);
8810 /* We won't bother with BLKmode store-flag operations because it would mean
8811 passing a lot of information to emit_store_flag. */
8812 if (operand_mode == BLKmode)
8813 return 0;
8815 /* We won't bother with store-flag operations involving function pointers
8816 when function pointers must be canonicalized before comparisons. */
8817 #ifdef HAVE_canonicalize_funcptr_for_compare
8818 if (HAVE_canonicalize_funcptr_for_compare
8819 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8820 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8821 == FUNCTION_TYPE))
8822 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8823 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8824 == FUNCTION_TYPE))))
8825 return 0;
8826 #endif
8828 STRIP_NOPS (arg0);
8829 STRIP_NOPS (arg1);
8831 /* Get the rtx comparison code to use. We know that EXP is a comparison
8832 operation of some type. Some comparisons against 1 and -1 can be
8833 converted to comparisons with zero. Do so here so that the tests
8834 below will be aware that we have a comparison with zero. These
8835 tests will not catch constants in the first operand, but constants
8836 are rarely passed as the first operand. */
8838 switch (TREE_CODE (exp))
8840 case EQ_EXPR:
8841 code = EQ;
8842 break;
8843 case NE_EXPR:
8844 code = NE;
8845 break;
8846 case LT_EXPR:
8847 if (integer_onep (arg1))
8848 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8849 else
8850 code = unsignedp ? LTU : LT;
8851 break;
8852 case LE_EXPR:
8853 if (! unsignedp && integer_all_onesp (arg1))
8854 arg1 = integer_zero_node, code = LT;
8855 else
8856 code = unsignedp ? LEU : LE;
8857 break;
8858 case GT_EXPR:
8859 if (! unsignedp && integer_all_onesp (arg1))
8860 arg1 = integer_zero_node, code = GE;
8861 else
8862 code = unsignedp ? GTU : GT;
8863 break;
8864 case GE_EXPR:
8865 if (integer_onep (arg1))
8866 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8867 else
8868 code = unsignedp ? GEU : GE;
8869 break;
8871 case UNORDERED_EXPR:
8872 code = UNORDERED;
8873 break;
8874 case ORDERED_EXPR:
8875 code = ORDERED;
8876 break;
8877 case UNLT_EXPR:
8878 code = UNLT;
8879 break;
8880 case UNLE_EXPR:
8881 code = UNLE;
8882 break;
8883 case UNGT_EXPR:
8884 code = UNGT;
8885 break;
8886 case UNGE_EXPR:
8887 code = UNGE;
8888 break;
8889 case UNEQ_EXPR:
8890 code = UNEQ;
8891 break;
8892 case LTGT_EXPR:
8893 code = LTGT;
8894 break;
8896 default:
8897 gcc_unreachable ();
8900 /* Put a constant second. */
8901 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8903 tem = arg0; arg0 = arg1; arg1 = tem;
8904 code = swap_condition (code);
8907 /* If this is an equality or inequality test of a single bit, we can
8908 do this by shifting the bit being tested to the low-order bit and
8909 masking the result with the constant 1. If the condition was EQ,
8910 we xor it with 1. This does not require an scc insn and is faster
8911 than an scc insn even if we have it.
8913 The code to make this transformation was moved into fold_single_bit_test,
8914 so we just call into the folder and expand its result. */
8916 if ((code == NE || code == EQ)
8917 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8918 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8920 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
8921 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
8922 arg0, arg1, type),
8923 target, VOIDmode, EXPAND_NORMAL);
8926 /* Now see if we are likely to be able to do this. Return if not. */
8927 if (! can_compare_p (code, operand_mode, ccp_store_flag))
8928 return 0;
8930 icode = setcc_gen_code[(int) code];
8931 if (icode == CODE_FOR_nothing
8932 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
8934 /* We can only do this if it is one of the special cases that
8935 can be handled without an scc insn. */
8936 if ((code == LT && integer_zerop (arg1))
8937 || (! only_cheap && code == GE && integer_zerop (arg1)))
8939 else if (! only_cheap && (code == NE || code == EQ)
8940 && TREE_CODE (type) != REAL_TYPE
8941 && ((abs_optab->handlers[(int) operand_mode].insn_code
8942 != CODE_FOR_nothing)
8943 || (ffs_optab->handlers[(int) operand_mode].insn_code
8944 != CODE_FOR_nothing)))
8946 else
8947 return 0;
8950 if (! get_subtarget (target)
8951 || GET_MODE (subtarget) != operand_mode)
8952 subtarget = 0;
8954 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
8956 if (target == 0)
8957 target = gen_reg_rtx (mode);
8959 result = emit_store_flag (target, code, op0, op1,
8960 operand_mode, unsignedp, 1);
8962 if (result)
8964 if (invert)
8965 result = expand_binop (mode, xor_optab, result, const1_rtx,
8966 result, 0, OPTAB_LIB_WIDEN);
8967 return result;
8970 /* If this failed, we have to do this with set/compare/jump/set code. */
8971 if (!REG_P (target)
8972 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8973 target = gen_reg_rtx (GET_MODE (target));
8975 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8976 result = compare_from_rtx (op0, op1, code, unsignedp,
8977 operand_mode, NULL_RTX);
8978 if (GET_CODE (result) == CONST_INT)
8979 return (((result == const0_rtx && ! invert)
8980 || (result != const0_rtx && invert))
8981 ? const0_rtx : const1_rtx);
8983 /* The code of RESULT may not match CODE if compare_from_rtx
8984 decided to swap its operands and reverse the original code.
8986 We know that compare_from_rtx returns either a CONST_INT or
8987 a new comparison code, so it is safe to just extract the
8988 code from RESULT. */
8989 code = GET_CODE (result);
8991 label = gen_label_rtx ();
8992 gcc_assert (bcc_gen_fctn[(int) code]);
8994 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8995 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8996 emit_label (label);
8998 return target;
9002 /* Stubs in case we haven't got a casesi insn. */
9003 #ifndef HAVE_casesi
9004 # define HAVE_casesi 0
9005 # define gen_casesi(a, b, c, d, e) (0)
9006 # define CODE_FOR_casesi CODE_FOR_nothing
9007 #endif
9009 /* If the machine does not have a case insn that compares the bounds,
9010 this means extra overhead for dispatch tables, which raises the
9011 threshold for using them. */
9012 #ifndef CASE_VALUES_THRESHOLD
9013 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9014 #endif /* CASE_VALUES_THRESHOLD */
9016 unsigned int
9017 case_values_threshold (void)
9019 return CASE_VALUES_THRESHOLD;
9022 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9023 0 otherwise (i.e. if there is no casesi instruction). */
9025 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9026 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9028 enum machine_mode index_mode = SImode;
9029 int index_bits = GET_MODE_BITSIZE (index_mode);
9030 rtx op1, op2, index;
9031 enum machine_mode op_mode;
9033 if (! HAVE_casesi)
9034 return 0;
9036 /* Convert the index to SImode. */
9037 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9039 enum machine_mode omode = TYPE_MODE (index_type);
9040 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9042 /* We must handle the endpoints in the original mode. */
9043 index_expr = build2 (MINUS_EXPR, index_type,
9044 index_expr, minval);
9045 minval = integer_zero_node;
9046 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9047 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9048 omode, 1, default_label);
9049 /* Now we can safely truncate. */
9050 index = convert_to_mode (index_mode, index, 0);
9052 else
9054 if (TYPE_MODE (index_type) != index_mode)
9056 index_expr = convert (lang_hooks.types.type_for_size
9057 (index_bits, 0), index_expr);
9058 index_type = TREE_TYPE (index_expr);
9061 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9064 do_pending_stack_adjust ();
9066 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9067 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9068 (index, op_mode))
9069 index = copy_to_mode_reg (op_mode, index);
9071 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9073 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9074 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9075 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9076 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9077 (op1, op_mode))
9078 op1 = copy_to_mode_reg (op_mode, op1);
9080 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9082 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9083 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9084 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9085 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9086 (op2, op_mode))
9087 op2 = copy_to_mode_reg (op_mode, op2);
9089 emit_jump_insn (gen_casesi (index, op1, op2,
9090 table_label, default_label));
9091 return 1;
9094 /* Attempt to generate a tablejump instruction; same concept. */
9095 #ifndef HAVE_tablejump
9096 #define HAVE_tablejump 0
9097 #define gen_tablejump(x, y) (0)
9098 #endif
9100 /* Subroutine of the next function.
9102 INDEX is the value being switched on, with the lowest value
9103 in the table already subtracted.
9104 MODE is its expected mode (needed if INDEX is constant).
9105 RANGE is the length of the jump table.
9106 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9108 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9109 index value is out of range. */
9111 static void
9112 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9113 rtx default_label)
9115 rtx temp, vector;
9117 if (INTVAL (range) > cfun->max_jumptable_ents)
9118 cfun->max_jumptable_ents = INTVAL (range);
9120 /* Do an unsigned comparison (in the proper mode) between the index
9121 expression and the value which represents the length of the range.
9122 Since we just finished subtracting the lower bound of the range
9123 from the index expression, this comparison allows us to simultaneously
9124 check that the original index expression value is both greater than
9125 or equal to the minimum value of the range and less than or equal to
9126 the maximum value of the range. */
9128 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9129 default_label);
9131 /* If index is in range, it must fit in Pmode.
9132 Convert to Pmode so we can index with it. */
9133 if (mode != Pmode)
9134 index = convert_to_mode (Pmode, index, 1);
9136 /* Don't let a MEM slip through, because then INDEX that comes
9137 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9138 and break_out_memory_refs will go to work on it and mess it up. */
9139 #ifdef PIC_CASE_VECTOR_ADDRESS
9140 if (flag_pic && !REG_P (index))
9141 index = copy_to_mode_reg (Pmode, index);
9142 #endif
9144 /* If flag_force_addr were to affect this address
9145 it could interfere with the tricky assumptions made
9146 about addresses that contain label-refs,
9147 which may be valid only very near the tablejump itself. */
9148 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9149 GET_MODE_SIZE, because this indicates how large insns are. The other
9150 uses should all be Pmode, because they are addresses. This code
9151 could fail if addresses and insns are not the same size. */
9152 index = gen_rtx_PLUS (Pmode,
9153 gen_rtx_MULT (Pmode, index,
9154 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9155 gen_rtx_LABEL_REF (Pmode, table_label));
9156 #ifdef PIC_CASE_VECTOR_ADDRESS
9157 if (flag_pic)
9158 index = PIC_CASE_VECTOR_ADDRESS (index);
9159 else
9160 #endif
9161 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9162 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9163 vector = gen_const_mem (CASE_VECTOR_MODE, index);
9164 convert_move (temp, vector, 0);
9166 emit_jump_insn (gen_tablejump (temp, table_label));
9168 /* If we are generating PIC code or if the table is PC-relative, the
9169 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9170 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9171 emit_barrier ();
9175 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9176 rtx table_label, rtx default_label)
9178 rtx index;
9180 if (! HAVE_tablejump)
9181 return 0;
9183 index_expr = fold_build2 (MINUS_EXPR, index_type,
9184 convert (index_type, index_expr),
9185 convert (index_type, minval));
9186 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9187 do_pending_stack_adjust ();
9189 do_tablejump (index, TYPE_MODE (index_type),
9190 convert_modes (TYPE_MODE (index_type),
9191 TYPE_MODE (TREE_TYPE (range)),
9192 expand_expr (range, NULL_RTX,
9193 VOIDmode, 0),
9194 TYPE_UNSIGNED (TREE_TYPE (range))),
9195 table_label, default_label);
9196 return 1;
9199 /* Nonzero if the mode is a valid vector mode for this architecture.
9200 This returns nonzero even if there is no hardware support for the
9201 vector mode, but we can emulate with narrower modes. */
9204 vector_mode_valid_p (enum machine_mode mode)
9206 enum mode_class class = GET_MODE_CLASS (mode);
9207 enum machine_mode innermode;
9209 /* Doh! What's going on? */
9210 if (class != MODE_VECTOR_INT
9211 && class != MODE_VECTOR_FLOAT)
9212 return 0;
9214 /* Hardware support. Woo hoo! */
9215 if (targetm.vector_mode_supported_p (mode))
9216 return 1;
9218 innermode = GET_MODE_INNER (mode);
9220 /* We should probably return 1 if requesting V4DI and we have no DI,
9221 but we have V2DI, but this is probably very unlikely. */
9223 /* If we have support for the inner mode, we can safely emulate it.
9224 We may not have V2DI, but me can emulate with a pair of DIs. */
9225 return targetm.scalar_mode_supported_p (innermode);
9228 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9229 static rtx
9230 const_vector_from_tree (tree exp)
9232 rtvec v;
9233 int units, i;
9234 tree link, elt;
9235 enum machine_mode inner, mode;
9237 mode = TYPE_MODE (TREE_TYPE (exp));
9239 if (initializer_zerop (exp))
9240 return CONST0_RTX (mode);
9242 units = GET_MODE_NUNITS (mode);
9243 inner = GET_MODE_INNER (mode);
9245 v = rtvec_alloc (units);
9247 link = TREE_VECTOR_CST_ELTS (exp);
9248 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9250 elt = TREE_VALUE (link);
9252 if (TREE_CODE (elt) == REAL_CST)
9253 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9254 inner);
9255 else
9256 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9257 TREE_INT_CST_HIGH (elt),
9258 inner);
9261 /* Initialize remaining elements to 0. */
9262 for (; i < units; ++i)
9263 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9265 return gen_rtx_CONST_VECTOR (mode, v);
9267 #include "gt-expr.h"