* pt.c (instantiate_decl): Call push/pop_deferring_access_checks.
[official-gcc.git] / gcc / expr.c
blob07d4b14e25dd983c2dab7c8e8fe51587cf114fa8
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
57 #ifdef PUSH_ROUNDING
59 #ifndef PUSH_ARGS_REVERSED
60 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
61 #define PUSH_ARGS_REVERSED /* If it's last to first. */
62 #endif
63 #endif
65 #endif
67 #ifndef STACK_PUSH_CODE
68 #ifdef STACK_GROWS_DOWNWARD
69 #define STACK_PUSH_CODE PRE_DEC
70 #else
71 #define STACK_PUSH_CODE PRE_INC
72 #endif
73 #endif
75 /* Assume that case vectors are not pc-relative. */
76 #ifndef CASE_VECTOR_PC_RELATIVE
77 #define CASE_VECTOR_PC_RELATIVE 0
78 #endif
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
84 #else
85 #define TARGET_MEM_FUNCTIONS 0
86 #endif
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
95 int cse_not_expected;
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 tree placeholder_list = 0;
100 /* This structure is used by move_by_pieces to describe the move to
101 be performed. */
102 struct move_by_pieces
104 rtx to;
105 rtx to_addr;
106 int autinc_to;
107 int explicit_inc_to;
108 rtx from;
109 rtx from_addr;
110 int autinc_from;
111 int explicit_inc_from;
112 unsigned HOST_WIDE_INT len;
113 HOST_WIDE_INT offset;
114 int reverse;
117 /* This structure is used by store_by_pieces to describe the clear to
118 be performed. */
120 struct store_by_pieces
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
126 unsigned HOST_WIDE_INT len;
127 HOST_WIDE_INT offset;
128 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
129 PTR constfundata;
130 int reverse;
133 static rtx enqueue_insn PARAMS ((rtx, rtx));
134 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
135 PARAMS ((unsigned HOST_WIDE_INT,
136 unsigned int));
137 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
138 struct move_by_pieces *));
139 static bool block_move_libcall_safe_for_call_parm PARAMS ((void));
140 static bool emit_block_move_via_movstr PARAMS ((rtx, rtx, rtx, unsigned));
141 static rtx emit_block_move_via_libcall PARAMS ((rtx, rtx, rtx));
142 static tree emit_block_move_libcall_fn PARAMS ((int));
143 static void emit_block_move_via_loop PARAMS ((rtx, rtx, rtx, unsigned));
144 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
145 enum machine_mode));
146 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
147 unsigned int));
148 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
149 unsigned int));
150 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
151 enum machine_mode,
152 struct store_by_pieces *));
153 static bool clear_storage_via_clrstr PARAMS ((rtx, rtx, unsigned));
154 static rtx clear_storage_via_libcall PARAMS ((rtx, rtx));
155 static tree clear_storage_libcall_fn PARAMS ((int));
156 static rtx compress_float_constant PARAMS ((rtx, rtx));
157 static rtx get_subtarget PARAMS ((rtx));
158 static int is_zeros_p PARAMS ((tree));
159 static int mostly_zeros_p PARAMS ((tree));
160 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, tree, int, int));
163 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
164 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
165 HOST_WIDE_INT, enum machine_mode,
166 tree, enum machine_mode, int, tree,
167 int));
168 static rtx var_rtx PARAMS ((tree));
170 static unsigned HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
171 static unsigned HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree,
172 tree));
174 static int is_aligning_offset PARAMS ((tree, tree));
175 static rtx expand_increment PARAMS ((tree, int, int));
176 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
177 #ifdef PUSH_ROUNDING
178 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
179 #endif
180 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
181 static rtx const_vector_from_tree PARAMS ((tree));
183 /* Record for each mode whether we can move a register directly to or
184 from an object of that mode in memory. If we can't, we won't try
185 to use that mode directly when accessing a field of that mode. */
187 static char direct_load[NUM_MACHINE_MODES];
188 static char direct_store[NUM_MACHINE_MODES];
190 /* Record for each mode whether we can float-extend from memory. */
192 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
194 /* If a memory-to-memory move would take MOVE_RATIO or more simple
195 move-instruction sequences, we will do a movstr or libcall instead. */
197 #ifndef MOVE_RATIO
198 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
199 #define MOVE_RATIO 2
200 #else
201 /* If we are optimizing for space (-Os), cut down the default move ratio. */
202 #define MOVE_RATIO (optimize_size ? 3 : 15)
203 #endif
204 #endif
206 /* This macro is used to determine whether move_by_pieces should be called
207 to perform a structure copy. */
208 #ifndef MOVE_BY_PIECES_P
209 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
210 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
211 #endif
213 /* If a clear memory operation would take CLEAR_RATIO or more simple
214 move-instruction sequences, we will do a clrstr or libcall instead. */
216 #ifndef CLEAR_RATIO
217 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
218 #define CLEAR_RATIO 2
219 #else
220 /* If we are optimizing for space, cut down the default clear ratio. */
221 #define CLEAR_RATIO (optimize_size ? 3 : 15)
222 #endif
223 #endif
225 /* This macro is used to determine whether clear_by_pieces should be
226 called to clear storage. */
227 #ifndef CLEAR_BY_PIECES_P
228 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
229 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
230 #endif
232 /* This macro is used to determine whether store_by_pieces should be
233 called to "memset" storage with byte values other than zero, or
234 to "memcpy" storage when the source is a constant string. */
235 #ifndef STORE_BY_PIECES_P
236 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
237 #endif
239 /* This array records the insn_code of insns to perform block moves. */
240 enum insn_code movstr_optab[NUM_MACHINE_MODES];
242 /* This array records the insn_code of insns to perform block clears. */
243 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
245 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
247 #ifndef SLOW_UNALIGNED_ACCESS
248 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
249 #endif
251 /* This is run once per compilation to set up which modes can be used
252 directly in memory and to initialize the block move optab. */
254 void
255 init_expr_once ()
257 rtx insn, pat;
258 enum machine_mode mode;
259 int num_clobbers;
260 rtx mem, mem1;
261 rtx reg;
263 /* Try indexing by frame ptr and try by stack ptr.
264 It is known that on the Convex the stack ptr isn't a valid index.
265 With luck, one or the other is valid on any machine. */
266 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
267 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
269 /* A scratch register we can modify in-place below to avoid
270 useless RTL allocations. */
271 reg = gen_rtx_REG (VOIDmode, -1);
273 insn = rtx_alloc (INSN);
274 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
275 PATTERN (insn) = pat;
277 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
278 mode = (enum machine_mode) ((int) mode + 1))
280 int regno;
282 direct_load[(int) mode] = direct_store[(int) mode] = 0;
283 PUT_MODE (mem, mode);
284 PUT_MODE (mem1, mode);
285 PUT_MODE (reg, mode);
287 /* See if there is some register that can be used in this mode and
288 directly loaded or stored from memory. */
290 if (mode != VOIDmode && mode != BLKmode)
291 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
292 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
293 regno++)
295 if (! HARD_REGNO_MODE_OK (regno, mode))
296 continue;
298 REGNO (reg) = regno;
300 SET_SRC (pat) = mem;
301 SET_DEST (pat) = reg;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_load[(int) mode] = 1;
305 SET_SRC (pat) = mem1;
306 SET_DEST (pat) = reg;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_load[(int) mode] = 1;
310 SET_SRC (pat) = reg;
311 SET_DEST (pat) = mem;
312 if (recog (pat, insn, &num_clobbers) >= 0)
313 direct_store[(int) mode] = 1;
315 SET_SRC (pat) = reg;
316 SET_DEST (pat) = mem1;
317 if (recog (pat, insn, &num_clobbers) >= 0)
318 direct_store[(int) mode] = 1;
322 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
324 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
325 mode = GET_MODE_WIDER_MODE (mode))
327 enum machine_mode srcmode;
328 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
329 srcmode = GET_MODE_WIDER_MODE (srcmode))
331 enum insn_code ic;
333 ic = can_extend_p (mode, srcmode, 0);
334 if (ic == CODE_FOR_nothing)
335 continue;
337 PUT_MODE (mem, srcmode);
339 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
340 float_extend_from_mem[mode][srcmode] = true;
345 /* This is run at the start of compiling a function. */
347 void
348 init_expr ()
350 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
352 pending_chain = 0;
353 pending_stack_adjust = 0;
354 stack_pointer_delta = 0;
355 inhibit_defer_pop = 0;
356 saveregs_value = 0;
357 apply_args_value = 0;
358 forced_labels = 0;
361 /* Small sanity check that the queue is empty at the end of a function. */
363 void
364 finish_expr_for_function ()
366 if (pending_chain)
367 abort ();
370 /* Manage the queue of increment instructions to be output
371 for POSTINCREMENT_EXPR expressions, etc. */
373 /* Queue up to increment (or change) VAR later. BODY says how:
374 BODY should be the same thing you would pass to emit_insn
375 to increment right away. It will go to emit_insn later on.
377 The value is a QUEUED expression to be used in place of VAR
378 where you want to guarantee the pre-incrementation value of VAR. */
380 static rtx
381 enqueue_insn (var, body)
382 rtx var, body;
384 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
385 body, pending_chain);
386 return pending_chain;
389 /* Use protect_from_queue to convert a QUEUED expression
390 into something that you can put immediately into an instruction.
391 If the queued incrementation has not happened yet,
392 protect_from_queue returns the variable itself.
393 If the incrementation has happened, protect_from_queue returns a temp
394 that contains a copy of the old value of the variable.
396 Any time an rtx which might possibly be a QUEUED is to be put
397 into an instruction, it must be passed through protect_from_queue first.
398 QUEUED expressions are not meaningful in instructions.
400 Do not pass a value through protect_from_queue and then hold
401 on to it for a while before putting it in an instruction!
402 If the queue is flushed in between, incorrect code will result. */
405 protect_from_queue (x, modify)
406 rtx x;
407 int modify;
409 RTX_CODE code = GET_CODE (x);
411 #if 0 /* A QUEUED can hang around after the queue is forced out. */
412 /* Shortcut for most common case. */
413 if (pending_chain == 0)
414 return x;
415 #endif
417 if (code != QUEUED)
419 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
420 use of autoincrement. Make a copy of the contents of the memory
421 location rather than a copy of the address, but not if the value is
422 of mode BLKmode. Don't modify X in place since it might be
423 shared. */
424 if (code == MEM && GET_MODE (x) != BLKmode
425 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
427 rtx y = XEXP (x, 0);
428 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
430 if (QUEUED_INSN (y))
432 rtx temp = gen_reg_rtx (GET_MODE (x));
434 emit_insn_before (gen_move_insn (temp, new),
435 QUEUED_INSN (y));
436 return temp;
439 /* Copy the address into a pseudo, so that the returned value
440 remains correct across calls to emit_queue. */
441 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
444 /* Otherwise, recursively protect the subexpressions of all
445 the kinds of rtx's that can contain a QUEUED. */
446 if (code == MEM)
448 rtx tem = protect_from_queue (XEXP (x, 0), 0);
449 if (tem != XEXP (x, 0))
451 x = copy_rtx (x);
452 XEXP (x, 0) = tem;
455 else if (code == PLUS || code == MULT)
457 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
458 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
459 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
461 x = copy_rtx (x);
462 XEXP (x, 0) = new0;
463 XEXP (x, 1) = new1;
466 return x;
468 /* If the increment has not happened, use the variable itself. Copy it
469 into a new pseudo so that the value remains correct across calls to
470 emit_queue. */
471 if (QUEUED_INSN (x) == 0)
472 return copy_to_reg (QUEUED_VAR (x));
473 /* If the increment has happened and a pre-increment copy exists,
474 use that copy. */
475 if (QUEUED_COPY (x) != 0)
476 return QUEUED_COPY (x);
477 /* The increment has happened but we haven't set up a pre-increment copy.
478 Set one up now, and use it. */
479 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
480 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
481 QUEUED_INSN (x));
482 return QUEUED_COPY (x);
485 /* Return nonzero if X contains a QUEUED expression:
486 if it contains anything that will be altered by a queued increment.
487 We handle only combinations of MEM, PLUS, MINUS and MULT operators
488 since memory addresses generally contain only those. */
491 queued_subexp_p (x)
492 rtx x;
494 enum rtx_code code = GET_CODE (x);
495 switch (code)
497 case QUEUED:
498 return 1;
499 case MEM:
500 return queued_subexp_p (XEXP (x, 0));
501 case MULT:
502 case PLUS:
503 case MINUS:
504 return (queued_subexp_p (XEXP (x, 0))
505 || queued_subexp_p (XEXP (x, 1)));
506 default:
507 return 0;
511 /* Perform all the pending incrementations. */
513 void
514 emit_queue ()
516 rtx p;
517 while ((p = pending_chain))
519 rtx body = QUEUED_BODY (p);
521 switch (GET_CODE (body))
523 case INSN:
524 case JUMP_INSN:
525 case CALL_INSN:
526 case CODE_LABEL:
527 case BARRIER:
528 case NOTE:
529 QUEUED_INSN (p) = body;
530 emit_insn (body);
531 break;
533 #ifdef ENABLE_CHECKING
534 case SEQUENCE:
535 abort ();
536 break;
537 #endif
539 default:
540 QUEUED_INSN (p) = emit_insn (body);
541 break;
544 pending_chain = QUEUED_NEXT (p);
548 /* Copy data from FROM to TO, where the machine modes are not the same.
549 Both modes may be integer, or both may be floating.
550 UNSIGNEDP should be nonzero if FROM is an unsigned type.
551 This causes zero-extension instead of sign-extension. */
553 void
554 convert_move (to, from, unsignedp)
555 rtx to, from;
556 int unsignedp;
558 enum machine_mode to_mode = GET_MODE (to);
559 enum machine_mode from_mode = GET_MODE (from);
560 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
561 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
562 enum insn_code code;
563 rtx libcall;
565 /* rtx code for making an equivalent value. */
566 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
567 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
569 to = protect_from_queue (to, 1);
570 from = protect_from_queue (from, 0);
572 if (to_real != from_real)
573 abort ();
575 /* If FROM is a SUBREG that indicates that we have already done at least
576 the required extension, strip it. We don't handle such SUBREGs as
577 TO here. */
579 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
580 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
581 >= GET_MODE_SIZE (to_mode))
582 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
583 from = gen_lowpart (to_mode, from), from_mode = to_mode;
585 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
586 abort ();
588 if (to_mode == from_mode
589 || (from_mode == VOIDmode && CONSTANT_P (from)))
591 emit_move_insn (to, from);
592 return;
595 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
597 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
598 abort ();
600 if (VECTOR_MODE_P (to_mode))
601 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
602 else
603 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
605 emit_move_insn (to, from);
606 return;
609 if (to_real != from_real)
610 abort ();
612 if (to_real)
614 rtx value, insns;
616 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
618 /* Try converting directly if the insn is supported. */
619 if ((code = can_extend_p (to_mode, from_mode, 0))
620 != CODE_FOR_nothing)
622 emit_unop_insn (code, to, from, UNKNOWN);
623 return;
627 #ifdef HAVE_trunchfqf2
628 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
630 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
631 return;
633 #endif
634 #ifdef HAVE_trunctqfqf2
635 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
637 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
638 return;
640 #endif
641 #ifdef HAVE_truncsfqf2
642 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
644 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
645 return;
647 #endif
648 #ifdef HAVE_truncdfqf2
649 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
651 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
652 return;
654 #endif
655 #ifdef HAVE_truncxfqf2
656 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
658 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
659 return;
661 #endif
662 #ifdef HAVE_trunctfqf2
663 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
665 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
666 return;
668 #endif
670 #ifdef HAVE_trunctqfhf2
671 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
673 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
674 return;
676 #endif
677 #ifdef HAVE_truncsfhf2
678 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
680 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
681 return;
683 #endif
684 #ifdef HAVE_truncdfhf2
685 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
687 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
688 return;
690 #endif
691 #ifdef HAVE_truncxfhf2
692 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
694 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
695 return;
697 #endif
698 #ifdef HAVE_trunctfhf2
699 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
701 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
702 return;
704 #endif
706 #ifdef HAVE_truncsftqf2
707 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
709 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
710 return;
712 #endif
713 #ifdef HAVE_truncdftqf2
714 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
716 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
717 return;
719 #endif
720 #ifdef HAVE_truncxftqf2
721 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
723 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
724 return;
726 #endif
727 #ifdef HAVE_trunctftqf2
728 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
730 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
731 return;
733 #endif
735 #ifdef HAVE_truncdfsf2
736 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
738 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
739 return;
741 #endif
742 #ifdef HAVE_truncxfsf2
743 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
745 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
746 return;
748 #endif
749 #ifdef HAVE_trunctfsf2
750 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
752 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
753 return;
755 #endif
756 #ifdef HAVE_truncxfdf2
757 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
759 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
760 return;
762 #endif
763 #ifdef HAVE_trunctfdf2
764 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
766 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
767 return;
769 #endif
771 libcall = (rtx) 0;
772 switch (from_mode)
774 case SFmode:
775 switch (to_mode)
777 case DFmode:
778 libcall = extendsfdf2_libfunc;
779 break;
781 case XFmode:
782 libcall = extendsfxf2_libfunc;
783 break;
785 case TFmode:
786 libcall = extendsftf2_libfunc;
787 break;
789 default:
790 break;
792 break;
794 case DFmode:
795 switch (to_mode)
797 case SFmode:
798 libcall = truncdfsf2_libfunc;
799 break;
801 case XFmode:
802 libcall = extenddfxf2_libfunc;
803 break;
805 case TFmode:
806 libcall = extenddftf2_libfunc;
807 break;
809 default:
810 break;
812 break;
814 case XFmode:
815 switch (to_mode)
817 case SFmode:
818 libcall = truncxfsf2_libfunc;
819 break;
821 case DFmode:
822 libcall = truncxfdf2_libfunc;
823 break;
825 default:
826 break;
828 break;
830 case TFmode:
831 switch (to_mode)
833 case SFmode:
834 libcall = trunctfsf2_libfunc;
835 break;
837 case DFmode:
838 libcall = trunctfdf2_libfunc;
839 break;
841 default:
842 break;
844 break;
846 default:
847 break;
850 if (libcall == (rtx) 0)
851 /* This conversion is not implemented yet. */
852 abort ();
854 start_sequence ();
855 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
856 1, from, from_mode);
857 insns = get_insns ();
858 end_sequence ();
859 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
860 from));
861 return;
864 /* Now both modes are integers. */
866 /* Handle expanding beyond a word. */
867 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
868 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
870 rtx insns;
871 rtx lowpart;
872 rtx fill_value;
873 rtx lowfrom;
874 int i;
875 enum machine_mode lowpart_mode;
876 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
878 /* Try converting directly if the insn is supported. */
879 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
880 != CODE_FOR_nothing)
882 /* If FROM is a SUBREG, put it into a register. Do this
883 so that we always generate the same set of insns for
884 better cse'ing; if an intermediate assignment occurred,
885 we won't be doing the operation directly on the SUBREG. */
886 if (optimize > 0 && GET_CODE (from) == SUBREG)
887 from = force_reg (from_mode, from);
888 emit_unop_insn (code, to, from, equiv_code);
889 return;
891 /* Next, try converting via full word. */
892 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
893 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
894 != CODE_FOR_nothing))
896 if (GET_CODE (to) == REG)
897 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
898 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
899 emit_unop_insn (code, to,
900 gen_lowpart (word_mode, to), equiv_code);
901 return;
904 /* No special multiword conversion insn; do it by hand. */
905 start_sequence ();
907 /* Since we will turn this into a no conflict block, we must ensure
908 that the source does not overlap the target. */
910 if (reg_overlap_mentioned_p (to, from))
911 from = force_reg (from_mode, from);
913 /* Get a copy of FROM widened to a word, if necessary. */
914 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
915 lowpart_mode = word_mode;
916 else
917 lowpart_mode = from_mode;
919 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
921 lowpart = gen_lowpart (lowpart_mode, to);
922 emit_move_insn (lowpart, lowfrom);
924 /* Compute the value to put in each remaining word. */
925 if (unsignedp)
926 fill_value = const0_rtx;
927 else
929 #ifdef HAVE_slt
930 if (HAVE_slt
931 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
932 && STORE_FLAG_VALUE == -1)
934 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
935 lowpart_mode, 0);
936 fill_value = gen_reg_rtx (word_mode);
937 emit_insn (gen_slt (fill_value));
939 else
940 #endif
942 fill_value
943 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
944 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
945 NULL_RTX, 0);
946 fill_value = convert_to_mode (word_mode, fill_value, 1);
950 /* Fill the remaining words. */
951 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
953 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
954 rtx subword = operand_subword (to, index, 1, to_mode);
956 if (subword == 0)
957 abort ();
959 if (fill_value != subword)
960 emit_move_insn (subword, fill_value);
963 insns = get_insns ();
964 end_sequence ();
966 emit_no_conflict_block (insns, to, from, NULL_RTX,
967 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
968 return;
971 /* Truncating multi-word to a word or less. */
972 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
973 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
975 if (!((GET_CODE (from) == MEM
976 && ! MEM_VOLATILE_P (from)
977 && direct_load[(int) to_mode]
978 && ! mode_dependent_address_p (XEXP (from, 0)))
979 || GET_CODE (from) == REG
980 || GET_CODE (from) == SUBREG))
981 from = force_reg (from_mode, from);
982 convert_move (to, gen_lowpart (word_mode, from), 0);
983 return;
986 /* Handle pointer conversion. */ /* SPEE 900220. */
987 if (to_mode == PQImode)
989 if (from_mode != QImode)
990 from = convert_to_mode (QImode, from, unsignedp);
992 #ifdef HAVE_truncqipqi2
993 if (HAVE_truncqipqi2)
995 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
996 return;
998 #endif /* HAVE_truncqipqi2 */
999 abort ();
1002 if (from_mode == PQImode)
1004 if (to_mode != QImode)
1006 from = convert_to_mode (QImode, from, unsignedp);
1007 from_mode = QImode;
1009 else
1011 #ifdef HAVE_extendpqiqi2
1012 if (HAVE_extendpqiqi2)
1014 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1015 return;
1017 #endif /* HAVE_extendpqiqi2 */
1018 abort ();
1022 if (to_mode == PSImode)
1024 if (from_mode != SImode)
1025 from = convert_to_mode (SImode, from, unsignedp);
1027 #ifdef HAVE_truncsipsi2
1028 if (HAVE_truncsipsi2)
1030 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1031 return;
1033 #endif /* HAVE_truncsipsi2 */
1034 abort ();
1037 if (from_mode == PSImode)
1039 if (to_mode != SImode)
1041 from = convert_to_mode (SImode, from, unsignedp);
1042 from_mode = SImode;
1044 else
1046 #ifdef HAVE_extendpsisi2
1047 if (! unsignedp && HAVE_extendpsisi2)
1049 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1050 return;
1052 #endif /* HAVE_extendpsisi2 */
1053 #ifdef HAVE_zero_extendpsisi2
1054 if (unsignedp && HAVE_zero_extendpsisi2)
1056 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1057 return;
1059 #endif /* HAVE_zero_extendpsisi2 */
1060 abort ();
1064 if (to_mode == PDImode)
1066 if (from_mode != DImode)
1067 from = convert_to_mode (DImode, from, unsignedp);
1069 #ifdef HAVE_truncdipdi2
1070 if (HAVE_truncdipdi2)
1072 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1073 return;
1075 #endif /* HAVE_truncdipdi2 */
1076 abort ();
1079 if (from_mode == PDImode)
1081 if (to_mode != DImode)
1083 from = convert_to_mode (DImode, from, unsignedp);
1084 from_mode = DImode;
1086 else
1088 #ifdef HAVE_extendpdidi2
1089 if (HAVE_extendpdidi2)
1091 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1092 return;
1094 #endif /* HAVE_extendpdidi2 */
1095 abort ();
1099 /* Now follow all the conversions between integers
1100 no more than a word long. */
1102 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1103 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1104 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1105 GET_MODE_BITSIZE (from_mode)))
1107 if (!((GET_CODE (from) == MEM
1108 && ! MEM_VOLATILE_P (from)
1109 && direct_load[(int) to_mode]
1110 && ! mode_dependent_address_p (XEXP (from, 0)))
1111 || GET_CODE (from) == REG
1112 || GET_CODE (from) == SUBREG))
1113 from = force_reg (from_mode, from);
1114 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1115 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1116 from = copy_to_reg (from);
1117 emit_move_insn (to, gen_lowpart (to_mode, from));
1118 return;
1121 /* Handle extension. */
1122 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1124 /* Convert directly if that works. */
1125 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1126 != CODE_FOR_nothing)
1128 if (flag_force_mem)
1129 from = force_not_mem (from);
1131 emit_unop_insn (code, to, from, equiv_code);
1132 return;
1134 else
1136 enum machine_mode intermediate;
1137 rtx tmp;
1138 tree shift_amount;
1140 /* Search for a mode to convert via. */
1141 for (intermediate = from_mode; intermediate != VOIDmode;
1142 intermediate = GET_MODE_WIDER_MODE (intermediate))
1143 if (((can_extend_p (to_mode, intermediate, unsignedp)
1144 != CODE_FOR_nothing)
1145 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1146 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1147 GET_MODE_BITSIZE (intermediate))))
1148 && (can_extend_p (intermediate, from_mode, unsignedp)
1149 != CODE_FOR_nothing))
1151 convert_move (to, convert_to_mode (intermediate, from,
1152 unsignedp), unsignedp);
1153 return;
1156 /* No suitable intermediate mode.
1157 Generate what we need with shifts. */
1158 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1159 - GET_MODE_BITSIZE (from_mode), 0);
1160 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1161 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1162 to, unsignedp);
1163 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1164 to, unsignedp);
1165 if (tmp != to)
1166 emit_move_insn (to, tmp);
1167 return;
1171 /* Support special truncate insns for certain modes. */
1173 if (from_mode == DImode && to_mode == SImode)
1175 #ifdef HAVE_truncdisi2
1176 if (HAVE_truncdisi2)
1178 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1179 return;
1181 #endif
1182 convert_move (to, force_reg (from_mode, from), unsignedp);
1183 return;
1186 if (from_mode == DImode && to_mode == HImode)
1188 #ifdef HAVE_truncdihi2
1189 if (HAVE_truncdihi2)
1191 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1192 return;
1194 #endif
1195 convert_move (to, force_reg (from_mode, from), unsignedp);
1196 return;
1199 if (from_mode == DImode && to_mode == QImode)
1201 #ifdef HAVE_truncdiqi2
1202 if (HAVE_truncdiqi2)
1204 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1205 return;
1207 #endif
1208 convert_move (to, force_reg (from_mode, from), unsignedp);
1209 return;
1212 if (from_mode == SImode && to_mode == HImode)
1214 #ifdef HAVE_truncsihi2
1215 if (HAVE_truncsihi2)
1217 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1218 return;
1220 #endif
1221 convert_move (to, force_reg (from_mode, from), unsignedp);
1222 return;
1225 if (from_mode == SImode && to_mode == QImode)
1227 #ifdef HAVE_truncsiqi2
1228 if (HAVE_truncsiqi2)
1230 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1231 return;
1233 #endif
1234 convert_move (to, force_reg (from_mode, from), unsignedp);
1235 return;
1238 if (from_mode == HImode && to_mode == QImode)
1240 #ifdef HAVE_trunchiqi2
1241 if (HAVE_trunchiqi2)
1243 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1244 return;
1246 #endif
1247 convert_move (to, force_reg (from_mode, from), unsignedp);
1248 return;
1251 if (from_mode == TImode && to_mode == DImode)
1253 #ifdef HAVE_trunctidi2
1254 if (HAVE_trunctidi2)
1256 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1257 return;
1259 #endif
1260 convert_move (to, force_reg (from_mode, from), unsignedp);
1261 return;
1264 if (from_mode == TImode && to_mode == SImode)
1266 #ifdef HAVE_trunctisi2
1267 if (HAVE_trunctisi2)
1269 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1270 return;
1272 #endif
1273 convert_move (to, force_reg (from_mode, from), unsignedp);
1274 return;
1277 if (from_mode == TImode && to_mode == HImode)
1279 #ifdef HAVE_trunctihi2
1280 if (HAVE_trunctihi2)
1282 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1283 return;
1285 #endif
1286 convert_move (to, force_reg (from_mode, from), unsignedp);
1287 return;
1290 if (from_mode == TImode && to_mode == QImode)
1292 #ifdef HAVE_trunctiqi2
1293 if (HAVE_trunctiqi2)
1295 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1296 return;
1298 #endif
1299 convert_move (to, force_reg (from_mode, from), unsignedp);
1300 return;
1303 /* Handle truncation of volatile memrefs, and so on;
1304 the things that couldn't be truncated directly,
1305 and for which there was no special instruction. */
1306 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1308 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1309 emit_move_insn (to, temp);
1310 return;
1313 /* Mode combination is not recognized. */
1314 abort ();
1317 /* Return an rtx for a value that would result
1318 from converting X to mode MODE.
1319 Both X and MODE may be floating, or both integer.
1320 UNSIGNEDP is nonzero if X is an unsigned value.
1321 This can be done by referring to a part of X in place
1322 or by copying to a new temporary with conversion.
1324 This function *must not* call protect_from_queue
1325 except when putting X into an insn (in which case convert_move does it). */
1328 convert_to_mode (mode, x, unsignedp)
1329 enum machine_mode mode;
1330 rtx x;
1331 int unsignedp;
1333 return convert_modes (mode, VOIDmode, x, unsignedp);
1336 /* Return an rtx for a value that would result
1337 from converting X from mode OLDMODE to mode MODE.
1338 Both modes may be floating, or both integer.
1339 UNSIGNEDP is nonzero if X is an unsigned value.
1341 This can be done by referring to a part of X in place
1342 or by copying to a new temporary with conversion.
1344 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1346 This function *must not* call protect_from_queue
1347 except when putting X into an insn (in which case convert_move does it). */
1350 convert_modes (mode, oldmode, x, unsignedp)
1351 enum machine_mode mode, oldmode;
1352 rtx x;
1353 int unsignedp;
1355 rtx temp;
1357 /* If FROM is a SUBREG that indicates that we have already done at least
1358 the required extension, strip it. */
1360 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1361 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1362 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1363 x = gen_lowpart (mode, x);
1365 if (GET_MODE (x) != VOIDmode)
1366 oldmode = GET_MODE (x);
1368 if (mode == oldmode)
1369 return x;
1371 /* There is one case that we must handle specially: If we are converting
1372 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1373 we are to interpret the constant as unsigned, gen_lowpart will do
1374 the wrong if the constant appears negative. What we want to do is
1375 make the high-order word of the constant zero, not all ones. */
1377 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1378 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1379 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1381 HOST_WIDE_INT val = INTVAL (x);
1383 if (oldmode != VOIDmode
1384 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1386 int width = GET_MODE_BITSIZE (oldmode);
1388 /* We need to zero extend VAL. */
1389 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1392 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1395 /* We can do this with a gen_lowpart if both desired and current modes
1396 are integer, and this is either a constant integer, a register, or a
1397 non-volatile MEM. Except for the constant case where MODE is no
1398 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1400 if ((GET_CODE (x) == CONST_INT
1401 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1402 || (GET_MODE_CLASS (mode) == MODE_INT
1403 && GET_MODE_CLASS (oldmode) == MODE_INT
1404 && (GET_CODE (x) == CONST_DOUBLE
1405 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1406 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1407 && direct_load[(int) mode])
1408 || (GET_CODE (x) == REG
1409 && (! HARD_REGISTER_P (x)
1410 || HARD_REGNO_MODE_OK (REGNO (x), mode))
1411 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1412 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1414 /* ?? If we don't know OLDMODE, we have to assume here that
1415 X does not need sign- or zero-extension. This may not be
1416 the case, but it's the best we can do. */
1417 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1418 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1420 HOST_WIDE_INT val = INTVAL (x);
1421 int width = GET_MODE_BITSIZE (oldmode);
1423 /* We must sign or zero-extend in this case. Start by
1424 zero-extending, then sign extend if we need to. */
1425 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1426 if (! unsignedp
1427 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1428 val |= (HOST_WIDE_INT) (-1) << width;
1430 return gen_int_mode (val, mode);
1433 return gen_lowpart (mode, x);
1436 temp = gen_reg_rtx (mode);
1437 convert_move (temp, x, unsignedp);
1438 return temp;
1441 /* This macro is used to determine what the largest unit size that
1442 move_by_pieces can use is. */
1444 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1445 move efficiently, as opposed to MOVE_MAX which is the maximum
1446 number of bytes we can move with a single instruction. */
1448 #ifndef MOVE_MAX_PIECES
1449 #define MOVE_MAX_PIECES MOVE_MAX
1450 #endif
1452 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1453 store efficiently. Due to internal GCC limitations, this is
1454 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1455 for an immediate constant. */
1457 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1459 /* Generate several move instructions to copy LEN bytes from block FROM to
1460 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1461 and TO through protect_from_queue before calling.
1463 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1464 used to push FROM to the stack.
1466 ALIGN is maximum alignment we can assume. */
1468 void
1469 move_by_pieces (to, from, len, align)
1470 rtx to, from;
1471 unsigned HOST_WIDE_INT len;
1472 unsigned int align;
1474 struct move_by_pieces data;
1475 rtx to_addr, from_addr = XEXP (from, 0);
1476 unsigned int max_size = MOVE_MAX_PIECES + 1;
1477 enum machine_mode mode = VOIDmode, tmode;
1478 enum insn_code icode;
1480 data.offset = 0;
1481 data.from_addr = from_addr;
1482 if (to)
1484 to_addr = XEXP (to, 0);
1485 data.to = to;
1486 data.autinc_to
1487 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1488 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1489 data.reverse
1490 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1492 else
1494 to_addr = NULL_RTX;
1495 data.to = NULL_RTX;
1496 data.autinc_to = 1;
1497 #ifdef STACK_GROWS_DOWNWARD
1498 data.reverse = 1;
1499 #else
1500 data.reverse = 0;
1501 #endif
1503 data.to_addr = to_addr;
1504 data.from = from;
1505 data.autinc_from
1506 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1507 || GET_CODE (from_addr) == POST_INC
1508 || GET_CODE (from_addr) == POST_DEC);
1510 data.explicit_inc_from = 0;
1511 data.explicit_inc_to = 0;
1512 if (data.reverse) data.offset = len;
1513 data.len = len;
1515 /* If copying requires more than two move insns,
1516 copy addresses to registers (to make displacements shorter)
1517 and use post-increment if available. */
1518 if (!(data.autinc_from && data.autinc_to)
1519 && move_by_pieces_ninsns (len, align) > 2)
1521 /* Find the mode of the largest move... */
1522 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1523 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1524 if (GET_MODE_SIZE (tmode) < max_size)
1525 mode = tmode;
1527 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1529 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1530 data.autinc_from = 1;
1531 data.explicit_inc_from = -1;
1533 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1535 data.from_addr = copy_addr_to_reg (from_addr);
1536 data.autinc_from = 1;
1537 data.explicit_inc_from = 1;
1539 if (!data.autinc_from && CONSTANT_P (from_addr))
1540 data.from_addr = copy_addr_to_reg (from_addr);
1541 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1543 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1544 data.autinc_to = 1;
1545 data.explicit_inc_to = -1;
1547 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1549 data.to_addr = copy_addr_to_reg (to_addr);
1550 data.autinc_to = 1;
1551 data.explicit_inc_to = 1;
1553 if (!data.autinc_to && CONSTANT_P (to_addr))
1554 data.to_addr = copy_addr_to_reg (to_addr);
1557 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1558 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1559 align = MOVE_MAX * BITS_PER_UNIT;
1561 /* First move what we can in the largest integer mode, then go to
1562 successively smaller modes. */
1564 while (max_size > 1)
1566 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1567 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1568 if (GET_MODE_SIZE (tmode) < max_size)
1569 mode = tmode;
1571 if (mode == VOIDmode)
1572 break;
1574 icode = mov_optab->handlers[(int) mode].insn_code;
1575 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1576 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1578 max_size = GET_MODE_SIZE (mode);
1581 /* The code above should have handled everything. */
1582 if (data.len > 0)
1583 abort ();
1586 /* Return number of insns required to move L bytes by pieces.
1587 ALIGN (in bits) is maximum alignment we can assume. */
1589 static unsigned HOST_WIDE_INT
1590 move_by_pieces_ninsns (l, align)
1591 unsigned HOST_WIDE_INT l;
1592 unsigned int align;
1594 unsigned HOST_WIDE_INT n_insns = 0;
1595 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1597 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1598 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1599 align = MOVE_MAX * BITS_PER_UNIT;
1601 while (max_size > 1)
1603 enum machine_mode mode = VOIDmode, tmode;
1604 enum insn_code icode;
1606 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1607 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1608 if (GET_MODE_SIZE (tmode) < max_size)
1609 mode = tmode;
1611 if (mode == VOIDmode)
1612 break;
1614 icode = mov_optab->handlers[(int) mode].insn_code;
1615 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1616 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1618 max_size = GET_MODE_SIZE (mode);
1621 if (l)
1622 abort ();
1623 return n_insns;
1626 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1627 with move instructions for mode MODE. GENFUN is the gen_... function
1628 to make a move insn for that mode. DATA has all the other info. */
1630 static void
1631 move_by_pieces_1 (genfun, mode, data)
1632 rtx (*genfun) PARAMS ((rtx, ...));
1633 enum machine_mode mode;
1634 struct move_by_pieces *data;
1636 unsigned int size = GET_MODE_SIZE (mode);
1637 rtx to1 = NULL_RTX, from1;
1639 while (data->len >= size)
1641 if (data->reverse)
1642 data->offset -= size;
1644 if (data->to)
1646 if (data->autinc_to)
1647 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1648 data->offset);
1649 else
1650 to1 = adjust_address (data->to, mode, data->offset);
1653 if (data->autinc_from)
1654 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1655 data->offset);
1656 else
1657 from1 = adjust_address (data->from, mode, data->offset);
1659 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1660 emit_insn (gen_add2_insn (data->to_addr,
1661 GEN_INT (-(HOST_WIDE_INT)size)));
1662 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1663 emit_insn (gen_add2_insn (data->from_addr,
1664 GEN_INT (-(HOST_WIDE_INT)size)));
1666 if (data->to)
1667 emit_insn ((*genfun) (to1, from1));
1668 else
1670 #ifdef PUSH_ROUNDING
1671 emit_single_push_insn (mode, from1, NULL);
1672 #else
1673 abort ();
1674 #endif
1677 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1678 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1679 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1680 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1682 if (! data->reverse)
1683 data->offset += size;
1685 data->len -= size;
1689 /* Emit code to move a block Y to a block X. This may be done with
1690 string-move instructions, with multiple scalar move instructions,
1691 or with a library call.
1693 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1694 SIZE is an rtx that says how long they are.
1695 ALIGN is the maximum alignment we can assume they have.
1696 METHOD describes what kind of copy this is, and what mechanisms may be used.
1698 Return the address of the new block, if memcpy is called and returns it,
1699 0 otherwise. */
1702 emit_block_move (x, y, size, method)
1703 rtx x, y, size;
1704 enum block_op_methods method;
1706 bool may_use_call;
1707 rtx retval = 0;
1708 unsigned int align;
1710 switch (method)
1712 case BLOCK_OP_NORMAL:
1713 may_use_call = true;
1714 break;
1716 case BLOCK_OP_CALL_PARM:
1717 may_use_call = block_move_libcall_safe_for_call_parm ();
1719 /* Make inhibit_defer_pop nonzero around the library call
1720 to force it to pop the arguments right away. */
1721 NO_DEFER_POP;
1722 break;
1724 case BLOCK_OP_NO_LIBCALL:
1725 may_use_call = false;
1726 break;
1728 default:
1729 abort ();
1732 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1734 if (GET_MODE (x) != BLKmode)
1735 abort ();
1736 if (GET_MODE (y) != BLKmode)
1737 abort ();
1739 x = protect_from_queue (x, 1);
1740 y = protect_from_queue (y, 0);
1741 size = protect_from_queue (size, 0);
1743 if (GET_CODE (x) != MEM)
1744 abort ();
1745 if (GET_CODE (y) != MEM)
1746 abort ();
1747 if (size == 0)
1748 abort ();
1750 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1751 can be incorrect is coming from __builtin_memcpy. */
1752 if (GET_CODE (size) == CONST_INT)
1754 x = shallow_copy_rtx (x);
1755 y = shallow_copy_rtx (y);
1756 set_mem_size (x, size);
1757 set_mem_size (y, size);
1760 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1761 move_by_pieces (x, y, INTVAL (size), align);
1762 else if (emit_block_move_via_movstr (x, y, size, align))
1764 else if (may_use_call)
1765 retval = emit_block_move_via_libcall (x, y, size);
1766 else
1767 emit_block_move_via_loop (x, y, size, align);
1769 if (method == BLOCK_OP_CALL_PARM)
1770 OK_DEFER_POP;
1772 return retval;
1775 /* A subroutine of emit_block_move. Returns true if calling the
1776 block move libcall will not clobber any parameters which may have
1777 already been placed on the stack. */
1779 static bool
1780 block_move_libcall_safe_for_call_parm ()
1782 if (PUSH_ARGS)
1783 return true;
1784 else
1786 /* Check to see whether memcpy takes all register arguments. */
1787 static enum {
1788 takes_regs_uninit, takes_regs_no, takes_regs_yes
1789 } takes_regs = takes_regs_uninit;
1791 switch (takes_regs)
1793 case takes_regs_uninit:
1795 CUMULATIVE_ARGS args_so_far;
1796 tree fn, arg;
1798 fn = emit_block_move_libcall_fn (false);
1799 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1801 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1802 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1804 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1805 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1806 if (!tmp || !REG_P (tmp))
1807 goto fail_takes_regs;
1808 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1809 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1810 NULL_TREE, 1))
1811 goto fail_takes_regs;
1812 #endif
1813 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1816 takes_regs = takes_regs_yes;
1817 /* FALLTHRU */
1819 case takes_regs_yes:
1820 return true;
1822 fail_takes_regs:
1823 takes_regs = takes_regs_no;
1824 /* FALLTHRU */
1825 case takes_regs_no:
1826 return false;
1828 default:
1829 abort ();
1834 /* A subroutine of emit_block_move. Expand a movstr pattern;
1835 return true if successful. */
1837 static bool
1838 emit_block_move_via_movstr (x, y, size, align)
1839 rtx x, y, size;
1840 unsigned int align;
1842 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1843 enum machine_mode mode;
1845 /* Since this is a move insn, we don't care about volatility. */
1846 volatile_ok = 1;
1848 /* Try the most limited insn first, because there's no point
1849 including more than one in the machine description unless
1850 the more limited one has some advantage. */
1852 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1853 mode = GET_MODE_WIDER_MODE (mode))
1855 enum insn_code code = movstr_optab[(int) mode];
1856 insn_operand_predicate_fn pred;
1858 if (code != CODE_FOR_nothing
1859 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1860 here because if SIZE is less than the mode mask, as it is
1861 returned by the macro, it will definitely be less than the
1862 actual mode mask. */
1863 && ((GET_CODE (size) == CONST_INT
1864 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1865 <= (GET_MODE_MASK (mode) >> 1)))
1866 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1867 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1868 || (*pred) (x, BLKmode))
1869 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1870 || (*pred) (y, BLKmode))
1871 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1872 || (*pred) (opalign, VOIDmode)))
1874 rtx op2;
1875 rtx last = get_last_insn ();
1876 rtx pat;
1878 op2 = convert_to_mode (mode, size, 1);
1879 pred = insn_data[(int) code].operand[2].predicate;
1880 if (pred != 0 && ! (*pred) (op2, mode))
1881 op2 = copy_to_mode_reg (mode, op2);
1883 /* ??? When called via emit_block_move_for_call, it'd be
1884 nice if there were some way to inform the backend, so
1885 that it doesn't fail the expansion because it thinks
1886 emitting the libcall would be more efficient. */
1888 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1889 if (pat)
1891 emit_insn (pat);
1892 volatile_ok = 0;
1893 return true;
1895 else
1896 delete_insns_since (last);
1900 volatile_ok = 0;
1901 return false;
1904 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1905 Return the return value from memcpy, 0 otherwise. */
1907 static rtx
1908 emit_block_move_via_libcall (dst, src, size)
1909 rtx dst, src, size;
1911 rtx dst_addr, src_addr;
1912 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1913 enum machine_mode size_mode;
1914 rtx retval;
1916 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1918 It is unsafe to save the value generated by protect_from_queue and reuse
1919 it later. Consider what happens if emit_queue is called before the
1920 return value from protect_from_queue is used.
1922 Expansion of the CALL_EXPR below will call emit_queue before we are
1923 finished emitting RTL for argument setup. So if we are not careful we
1924 could get the wrong value for an argument.
1926 To avoid this problem we go ahead and emit code to copy the addresses of
1927 DST and SRC and SIZE into new pseudos. We can then place those new
1928 pseudos into an RTL_EXPR and use them later, even after a call to
1929 emit_queue.
1931 Note this is not strictly needed for library calls since they do not call
1932 emit_queue before loading their arguments. However, we may need to have
1933 library calls call emit_queue in the future since failing to do so could
1934 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1935 arguments in registers. */
1937 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1938 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1940 #ifdef POINTERS_EXTEND_UNSIGNED
1941 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1942 src_addr = convert_memory_address (ptr_mode, src_addr);
1943 #endif
1945 dst_tree = make_tree (ptr_type_node, dst_addr);
1946 src_tree = make_tree (ptr_type_node, src_addr);
1948 if (TARGET_MEM_FUNCTIONS)
1949 size_mode = TYPE_MODE (sizetype);
1950 else
1951 size_mode = TYPE_MODE (unsigned_type_node);
1953 size = convert_to_mode (size_mode, size, 1);
1954 size = copy_to_mode_reg (size_mode, size);
1956 /* It is incorrect to use the libcall calling conventions to call
1957 memcpy in this context. This could be a user call to memcpy and
1958 the user may wish to examine the return value from memcpy. For
1959 targets where libcalls and normal calls have different conventions
1960 for returning pointers, we could end up generating incorrect code.
1962 For convenience, we generate the call to bcopy this way as well. */
1964 if (TARGET_MEM_FUNCTIONS)
1965 size_tree = make_tree (sizetype, size);
1966 else
1967 size_tree = make_tree (unsigned_type_node, size);
1969 fn = emit_block_move_libcall_fn (true);
1970 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1971 if (TARGET_MEM_FUNCTIONS)
1973 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1974 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1976 else
1978 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1979 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1982 /* Now we have to build up the CALL_EXPR itself. */
1983 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1984 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1985 call_expr, arg_list, NULL_TREE);
1986 TREE_SIDE_EFFECTS (call_expr) = 1;
1988 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1990 /* If we are initializing a readonly value, show the above call clobbered
1991 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1992 the delay slot scheduler might overlook conflicts and take nasty
1993 decisions. */
1994 if (RTX_UNCHANGING_P (dst))
1995 add_function_usage_to
1996 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1997 gen_rtx_CLOBBER (VOIDmode, dst),
1998 NULL_RTX));
2000 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
2003 /* A subroutine of emit_block_move_via_libcall. Create the tree node
2004 for the function we use for block copies. The first time FOR_CALL
2005 is true, we call assemble_external. */
2007 static GTY(()) tree block_move_fn;
2009 void
2010 init_block_move_fn (asmspec)
2011 const char *asmspec;
2013 if (!block_move_fn)
2015 tree fn, args;
2017 if (TARGET_MEM_FUNCTIONS)
2019 fn = get_identifier ("memcpy");
2020 args = build_function_type_list (ptr_type_node, ptr_type_node,
2021 const_ptr_type_node, sizetype,
2022 NULL_TREE);
2024 else
2026 fn = get_identifier ("bcopy");
2027 args = build_function_type_list (void_type_node, const_ptr_type_node,
2028 ptr_type_node, unsigned_type_node,
2029 NULL_TREE);
2032 fn = build_decl (FUNCTION_DECL, fn, args);
2033 DECL_EXTERNAL (fn) = 1;
2034 TREE_PUBLIC (fn) = 1;
2035 DECL_ARTIFICIAL (fn) = 1;
2036 TREE_NOTHROW (fn) = 1;
2038 block_move_fn = fn;
2041 if (asmspec)
2043 SET_DECL_RTL (block_move_fn, NULL_RTX);
2044 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
2048 static tree
2049 emit_block_move_libcall_fn (for_call)
2050 int for_call;
2052 static bool emitted_extern;
2054 if (!block_move_fn)
2055 init_block_move_fn (NULL);
2057 if (for_call && !emitted_extern)
2059 emitted_extern = true;
2060 make_decl_rtl (block_move_fn, NULL);
2061 assemble_external (block_move_fn);
2064 return block_move_fn;
2067 /* A subroutine of emit_block_move. Copy the data via an explicit
2068 loop. This is used only when libcalls are forbidden. */
2069 /* ??? It'd be nice to copy in hunks larger than QImode. */
2071 static void
2072 emit_block_move_via_loop (x, y, size, align)
2073 rtx x, y, size;
2074 unsigned int align ATTRIBUTE_UNUSED;
2076 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2077 enum machine_mode iter_mode;
2079 iter_mode = GET_MODE (size);
2080 if (iter_mode == VOIDmode)
2081 iter_mode = word_mode;
2083 top_label = gen_label_rtx ();
2084 cmp_label = gen_label_rtx ();
2085 iter = gen_reg_rtx (iter_mode);
2087 emit_move_insn (iter, const0_rtx);
2089 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2090 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2091 do_pending_stack_adjust ();
2093 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2095 emit_jump (cmp_label);
2096 emit_label (top_label);
2098 tmp = convert_modes (Pmode, iter_mode, iter, true);
2099 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2100 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2101 x = change_address (x, QImode, x_addr);
2102 y = change_address (y, QImode, y_addr);
2104 emit_move_insn (x, y);
2106 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2107 true, OPTAB_LIB_WIDEN);
2108 if (tmp != iter)
2109 emit_move_insn (iter, tmp);
2111 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2112 emit_label (cmp_label);
2114 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2115 true, top_label);
2117 emit_note (NULL, NOTE_INSN_LOOP_END);
2120 /* Copy all or part of a value X into registers starting at REGNO.
2121 The number of registers to be filled is NREGS. */
2123 void
2124 move_block_to_reg (regno, x, nregs, mode)
2125 int regno;
2126 rtx x;
2127 int nregs;
2128 enum machine_mode mode;
2130 int i;
2131 #ifdef HAVE_load_multiple
2132 rtx pat;
2133 rtx last;
2134 #endif
2136 if (nregs == 0)
2137 return;
2139 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2140 x = validize_mem (force_const_mem (mode, x));
2142 /* See if the machine can do this with a load multiple insn. */
2143 #ifdef HAVE_load_multiple
2144 if (HAVE_load_multiple)
2146 last = get_last_insn ();
2147 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
2148 GEN_INT (nregs));
2149 if (pat)
2151 emit_insn (pat);
2152 return;
2154 else
2155 delete_insns_since (last);
2157 #endif
2159 for (i = 0; i < nregs; i++)
2160 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2161 operand_subword_force (x, i, mode));
2164 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2165 The number of registers to be filled is NREGS. */
2167 void
2168 move_block_from_reg (regno, x, nregs)
2169 int regno;
2170 rtx x;
2171 int nregs;
2173 int i;
2175 if (nregs == 0)
2176 return;
2178 /* See if the machine can do this with a store multiple insn. */
2179 #ifdef HAVE_store_multiple
2180 if (HAVE_store_multiple)
2182 rtx last = get_last_insn ();
2183 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2184 GEN_INT (nregs));
2185 if (pat)
2187 emit_insn (pat);
2188 return;
2190 else
2191 delete_insns_since (last);
2193 #endif
2195 for (i = 0; i < nregs; i++)
2197 rtx tem = operand_subword (x, i, 1, BLKmode);
2199 if (tem == 0)
2200 abort ();
2202 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2206 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2207 ORIG, where ORIG is a non-consecutive group of registers represented by
2208 a PARALLEL. The clone is identical to the original except in that the
2209 original set of registers is replaced by a new set of pseudo registers.
2210 The new set has the same modes as the original set. */
2213 gen_group_rtx (orig)
2214 rtx orig;
2216 int i, length;
2217 rtx *tmps;
2219 if (GET_CODE (orig) != PARALLEL)
2220 abort ();
2222 length = XVECLEN (orig, 0);
2223 tmps = (rtx *) alloca (sizeof (rtx) * length);
2225 /* Skip a NULL entry in first slot. */
2226 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2228 if (i)
2229 tmps[0] = 0;
2231 for (; i < length; i++)
2233 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2234 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2236 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2239 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2242 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2243 registers represented by a PARALLEL. SSIZE represents the total size of
2244 block SRC in bytes, or -1 if not known. */
2245 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2246 the balance will be in what would be the low-order memory addresses, i.e.
2247 left justified for big endian, right justified for little endian. This
2248 happens to be true for the targets currently using this support. If this
2249 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2250 would be needed. */
2252 void
2253 emit_group_load (dst, orig_src, ssize)
2254 rtx dst, orig_src;
2255 int ssize;
2257 rtx *tmps, src;
2258 int start, i;
2260 if (GET_CODE (dst) != PARALLEL)
2261 abort ();
2263 /* Check for a NULL entry, used to indicate that the parameter goes
2264 both on the stack and in registers. */
2265 if (XEXP (XVECEXP (dst, 0, 0), 0))
2266 start = 0;
2267 else
2268 start = 1;
2270 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
2272 /* Process the pieces. */
2273 for (i = start; i < XVECLEN (dst, 0); i++)
2275 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2276 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2277 unsigned int bytelen = GET_MODE_SIZE (mode);
2278 int shift = 0;
2280 /* Handle trailing fragments that run over the size of the struct. */
2281 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2283 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2284 bytelen = ssize - bytepos;
2285 if (bytelen <= 0)
2286 abort ();
2289 /* If we won't be loading directly from memory, protect the real source
2290 from strange tricks we might play; but make sure that the source can
2291 be loaded directly into the destination. */
2292 src = orig_src;
2293 if (GET_CODE (orig_src) != MEM
2294 && (!CONSTANT_P (orig_src)
2295 || (GET_MODE (orig_src) != mode
2296 && GET_MODE (orig_src) != VOIDmode)))
2298 if (GET_MODE (orig_src) == VOIDmode)
2299 src = gen_reg_rtx (mode);
2300 else
2301 src = gen_reg_rtx (GET_MODE (orig_src));
2303 emit_move_insn (src, orig_src);
2306 /* Optimize the access just a bit. */
2307 if (GET_CODE (src) == MEM
2308 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2309 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2310 && bytelen == GET_MODE_SIZE (mode))
2312 tmps[i] = gen_reg_rtx (mode);
2313 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2315 else if (GET_CODE (src) == CONCAT)
2317 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2318 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2320 if ((bytepos == 0 && bytelen == slen0)
2321 || (bytepos != 0 && bytepos + bytelen <= slen))
2323 /* The following assumes that the concatenated objects all
2324 have the same size. In this case, a simple calculation
2325 can be used to determine the object and the bit field
2326 to be extracted. */
2327 tmps[i] = XEXP (src, bytepos / slen0);
2328 if (! CONSTANT_P (tmps[i])
2329 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2330 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2331 (bytepos % slen0) * BITS_PER_UNIT,
2332 1, NULL_RTX, mode, mode, ssize);
2334 else if (bytepos == 0)
2336 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
2337 emit_move_insn (mem, src);
2338 tmps[i] = adjust_address (mem, mode, 0);
2340 else
2341 abort ();
2343 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2344 SIMD register, which is currently broken. While we get GCC
2345 to emit proper RTL for these cases, let's dump to memory. */
2346 else if (VECTOR_MODE_P (GET_MODE (dst))
2347 && GET_CODE (src) == REG)
2349 int slen = GET_MODE_SIZE (GET_MODE (src));
2350 rtx mem;
2352 mem = assign_stack_temp (GET_MODE (src), slen, 0);
2353 emit_move_insn (mem, src);
2354 tmps[i] = adjust_address (mem, mode, (int) bytepos);
2356 else if (CONSTANT_P (src)
2357 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2358 tmps[i] = src;
2359 else
2360 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2361 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2362 mode, mode, ssize);
2364 if (BYTES_BIG_ENDIAN && shift)
2365 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2366 tmps[i], 0, OPTAB_WIDEN);
2369 emit_queue ();
2371 /* Copy the extracted pieces into the proper (probable) hard regs. */
2372 for (i = start; i < XVECLEN (dst, 0); i++)
2373 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2376 /* Emit code to move a block SRC to block DST, where SRC and DST are
2377 non-consecutive groups of registers, each represented by a PARALLEL. */
2379 void
2380 emit_group_move (dst, src)
2381 rtx dst, src;
2383 int i;
2385 if (GET_CODE (src) != PARALLEL
2386 || GET_CODE (dst) != PARALLEL
2387 || XVECLEN (src, 0) != XVECLEN (dst, 0))
2388 abort ();
2390 /* Skip first entry if NULL. */
2391 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2392 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2393 XEXP (XVECEXP (src, 0, i), 0));
2396 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2397 registers represented by a PARALLEL. SSIZE represents the total size of
2398 block DST, or -1 if not known. */
2400 void
2401 emit_group_store (orig_dst, src, ssize)
2402 rtx orig_dst, src;
2403 int ssize;
2405 rtx *tmps, dst;
2406 int start, i;
2408 if (GET_CODE (src) != PARALLEL)
2409 abort ();
2411 /* Check for a NULL entry, used to indicate that the parameter goes
2412 both on the stack and in registers. */
2413 if (XEXP (XVECEXP (src, 0, 0), 0))
2414 start = 0;
2415 else
2416 start = 1;
2418 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2420 /* Copy the (probable) hard regs into pseudos. */
2421 for (i = start; i < XVECLEN (src, 0); i++)
2423 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2424 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2425 emit_move_insn (tmps[i], reg);
2427 emit_queue ();
2429 /* If we won't be storing directly into memory, protect the real destination
2430 from strange tricks we might play. */
2431 dst = orig_dst;
2432 if (GET_CODE (dst) == PARALLEL)
2434 rtx temp;
2436 /* We can get a PARALLEL dst if there is a conditional expression in
2437 a return statement. In that case, the dst and src are the same,
2438 so no action is necessary. */
2439 if (rtx_equal_p (dst, src))
2440 return;
2442 /* It is unclear if we can ever reach here, but we may as well handle
2443 it. Allocate a temporary, and split this into a store/load to/from
2444 the temporary. */
2446 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2447 emit_group_store (temp, src, ssize);
2448 emit_group_load (dst, temp, ssize);
2449 return;
2451 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2453 dst = gen_reg_rtx (GET_MODE (orig_dst));
2454 /* Make life a bit easier for combine. */
2455 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2458 /* Process the pieces. */
2459 for (i = start; i < XVECLEN (src, 0); i++)
2461 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2462 enum machine_mode mode = GET_MODE (tmps[i]);
2463 unsigned int bytelen = GET_MODE_SIZE (mode);
2464 rtx dest = dst;
2466 /* Handle trailing fragments that run over the size of the struct. */
2467 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2469 if (BYTES_BIG_ENDIAN)
2471 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2472 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2473 tmps[i], 0, OPTAB_WIDEN);
2475 bytelen = ssize - bytepos;
2478 if (GET_CODE (dst) == CONCAT)
2480 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2481 dest = XEXP (dst, 0);
2482 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2484 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2485 dest = XEXP (dst, 1);
2487 else if (bytepos == 0 && XVECLEN (src, 0))
2489 dest = assign_stack_temp (GET_MODE (dest),
2490 GET_MODE_SIZE (GET_MODE (dest)), 0);
2491 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2492 tmps[i]);
2493 dst = dest;
2494 break;
2496 else
2497 abort ();
2500 /* Optimize the access just a bit. */
2501 if (GET_CODE (dest) == MEM
2502 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2503 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2504 && bytelen == GET_MODE_SIZE (mode))
2505 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2506 else
2507 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2508 mode, tmps[i], ssize);
2511 emit_queue ();
2513 /* Copy from the pseudo into the (probable) hard reg. */
2514 if (orig_dst != dst)
2515 emit_move_insn (orig_dst, dst);
2518 /* Generate code to copy a BLKmode object of TYPE out of a
2519 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2520 is null, a stack temporary is created. TGTBLK is returned.
2522 The primary purpose of this routine is to handle functions
2523 that return BLKmode structures in registers. Some machines
2524 (the PA for example) want to return all small structures
2525 in registers regardless of the structure's alignment. */
2528 copy_blkmode_from_reg (tgtblk, srcreg, type)
2529 rtx tgtblk;
2530 rtx srcreg;
2531 tree type;
2533 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2534 rtx src = NULL, dst = NULL;
2535 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2536 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2538 if (tgtblk == 0)
2540 tgtblk = assign_temp (build_qualified_type (type,
2541 (TYPE_QUALS (type)
2542 | TYPE_QUAL_CONST)),
2543 0, 1, 1);
2544 preserve_temp_slots (tgtblk);
2547 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2548 into a new pseudo which is a full word. */
2550 if (GET_MODE (srcreg) != BLKmode
2551 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2552 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2554 /* Structures whose size is not a multiple of a word are aligned
2555 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2556 machine, this means we must skip the empty high order bytes when
2557 calculating the bit offset. */
2558 if (BYTES_BIG_ENDIAN
2559 && bytes % UNITS_PER_WORD)
2560 big_endian_correction
2561 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2563 /* Copy the structure BITSIZE bites at a time.
2565 We could probably emit more efficient code for machines which do not use
2566 strict alignment, but it doesn't seem worth the effort at the current
2567 time. */
2568 for (bitpos = 0, xbitpos = big_endian_correction;
2569 bitpos < bytes * BITS_PER_UNIT;
2570 bitpos += bitsize, xbitpos += bitsize)
2572 /* We need a new source operand each time xbitpos is on a
2573 word boundary and when xbitpos == big_endian_correction
2574 (the first time through). */
2575 if (xbitpos % BITS_PER_WORD == 0
2576 || xbitpos == big_endian_correction)
2577 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2578 GET_MODE (srcreg));
2580 /* We need a new destination operand each time bitpos is on
2581 a word boundary. */
2582 if (bitpos % BITS_PER_WORD == 0)
2583 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2585 /* Use xbitpos for the source extraction (right justified) and
2586 xbitpos for the destination store (left justified). */
2587 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2588 extract_bit_field (src, bitsize,
2589 xbitpos % BITS_PER_WORD, 1,
2590 NULL_RTX, word_mode, word_mode,
2591 BITS_PER_WORD),
2592 BITS_PER_WORD);
2595 return tgtblk;
2598 /* Add a USE expression for REG to the (possibly empty) list pointed
2599 to by CALL_FUSAGE. REG must denote a hard register. */
2601 void
2602 use_reg (call_fusage, reg)
2603 rtx *call_fusage, reg;
2605 if (GET_CODE (reg) != REG
2606 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2607 abort ();
2609 *call_fusage
2610 = gen_rtx_EXPR_LIST (VOIDmode,
2611 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2614 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2615 starting at REGNO. All of these registers must be hard registers. */
2617 void
2618 use_regs (call_fusage, regno, nregs)
2619 rtx *call_fusage;
2620 int regno;
2621 int nregs;
2623 int i;
2625 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2626 abort ();
2628 for (i = 0; i < nregs; i++)
2629 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2632 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2633 PARALLEL REGS. This is for calls that pass values in multiple
2634 non-contiguous locations. The Irix 6 ABI has examples of this. */
2636 void
2637 use_group_regs (call_fusage, regs)
2638 rtx *call_fusage;
2639 rtx regs;
2641 int i;
2643 for (i = 0; i < XVECLEN (regs, 0); i++)
2645 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2647 /* A NULL entry means the parameter goes both on the stack and in
2648 registers. This can also be a MEM for targets that pass values
2649 partially on the stack and partially in registers. */
2650 if (reg != 0 && GET_CODE (reg) == REG)
2651 use_reg (call_fusage, reg);
2656 /* Determine whether the LEN bytes generated by CONSTFUN can be
2657 stored to memory using several move instructions. CONSTFUNDATA is
2658 a pointer which will be passed as argument in every CONSTFUN call.
2659 ALIGN is maximum alignment we can assume. Return nonzero if a
2660 call to store_by_pieces should succeed. */
2663 can_store_by_pieces (len, constfun, constfundata, align)
2664 unsigned HOST_WIDE_INT len;
2665 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2666 PTR constfundata;
2667 unsigned int align;
2669 unsigned HOST_WIDE_INT max_size, l;
2670 HOST_WIDE_INT offset = 0;
2671 enum machine_mode mode, tmode;
2672 enum insn_code icode;
2673 int reverse;
2674 rtx cst;
2676 if (! STORE_BY_PIECES_P (len, align))
2677 return 0;
2679 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2680 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2681 align = MOVE_MAX * BITS_PER_UNIT;
2683 /* We would first store what we can in the largest integer mode, then go to
2684 successively smaller modes. */
2686 for (reverse = 0;
2687 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2688 reverse++)
2690 l = len;
2691 mode = VOIDmode;
2692 max_size = STORE_MAX_PIECES + 1;
2693 while (max_size > 1)
2695 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2696 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2697 if (GET_MODE_SIZE (tmode) < max_size)
2698 mode = tmode;
2700 if (mode == VOIDmode)
2701 break;
2703 icode = mov_optab->handlers[(int) mode].insn_code;
2704 if (icode != CODE_FOR_nothing
2705 && align >= GET_MODE_ALIGNMENT (mode))
2707 unsigned int size = GET_MODE_SIZE (mode);
2709 while (l >= size)
2711 if (reverse)
2712 offset -= size;
2714 cst = (*constfun) (constfundata, offset, mode);
2715 if (!LEGITIMATE_CONSTANT_P (cst))
2716 return 0;
2718 if (!reverse)
2719 offset += size;
2721 l -= size;
2725 max_size = GET_MODE_SIZE (mode);
2728 /* The code above should have handled everything. */
2729 if (l != 0)
2730 abort ();
2733 return 1;
2736 /* Generate several move instructions to store LEN bytes generated by
2737 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2738 pointer which will be passed as argument in every CONSTFUN call.
2739 ALIGN is maximum alignment we can assume. */
2741 void
2742 store_by_pieces (to, len, constfun, constfundata, align)
2743 rtx to;
2744 unsigned HOST_WIDE_INT len;
2745 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2746 PTR constfundata;
2747 unsigned int align;
2749 struct store_by_pieces data;
2751 if (! STORE_BY_PIECES_P (len, align))
2752 abort ();
2753 to = protect_from_queue (to, 1);
2754 data.constfun = constfun;
2755 data.constfundata = constfundata;
2756 data.len = len;
2757 data.to = to;
2758 store_by_pieces_1 (&data, align);
2761 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2762 rtx with BLKmode). The caller must pass TO through protect_from_queue
2763 before calling. ALIGN is maximum alignment we can assume. */
2765 static void
2766 clear_by_pieces (to, len, align)
2767 rtx to;
2768 unsigned HOST_WIDE_INT len;
2769 unsigned int align;
2771 struct store_by_pieces data;
2773 data.constfun = clear_by_pieces_1;
2774 data.constfundata = NULL;
2775 data.len = len;
2776 data.to = to;
2777 store_by_pieces_1 (&data, align);
2780 /* Callback routine for clear_by_pieces.
2781 Return const0_rtx unconditionally. */
2783 static rtx
2784 clear_by_pieces_1 (data, offset, mode)
2785 PTR data ATTRIBUTE_UNUSED;
2786 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2787 enum machine_mode mode ATTRIBUTE_UNUSED;
2789 return const0_rtx;
2792 /* Subroutine of clear_by_pieces and store_by_pieces.
2793 Generate several move instructions to store LEN bytes of block TO. (A MEM
2794 rtx with BLKmode). The caller must pass TO through protect_from_queue
2795 before calling. ALIGN is maximum alignment we can assume. */
2797 static void
2798 store_by_pieces_1 (data, align)
2799 struct store_by_pieces *data;
2800 unsigned int align;
2802 rtx to_addr = XEXP (data->to, 0);
2803 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2804 enum machine_mode mode = VOIDmode, tmode;
2805 enum insn_code icode;
2807 data->offset = 0;
2808 data->to_addr = to_addr;
2809 data->autinc_to
2810 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2811 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2813 data->explicit_inc_to = 0;
2814 data->reverse
2815 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2816 if (data->reverse)
2817 data->offset = data->len;
2819 /* If storing requires more than two move insns,
2820 copy addresses to registers (to make displacements shorter)
2821 and use post-increment if available. */
2822 if (!data->autinc_to
2823 && move_by_pieces_ninsns (data->len, align) > 2)
2825 /* Determine the main mode we'll be using. */
2826 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2827 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2828 if (GET_MODE_SIZE (tmode) < max_size)
2829 mode = tmode;
2831 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2833 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2834 data->autinc_to = 1;
2835 data->explicit_inc_to = -1;
2838 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2839 && ! data->autinc_to)
2841 data->to_addr = copy_addr_to_reg (to_addr);
2842 data->autinc_to = 1;
2843 data->explicit_inc_to = 1;
2846 if ( !data->autinc_to && CONSTANT_P (to_addr))
2847 data->to_addr = copy_addr_to_reg (to_addr);
2850 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2851 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2852 align = MOVE_MAX * BITS_PER_UNIT;
2854 /* First store what we can in the largest integer mode, then go to
2855 successively smaller modes. */
2857 while (max_size > 1)
2859 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2860 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2861 if (GET_MODE_SIZE (tmode) < max_size)
2862 mode = tmode;
2864 if (mode == VOIDmode)
2865 break;
2867 icode = mov_optab->handlers[(int) mode].insn_code;
2868 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2869 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2871 max_size = GET_MODE_SIZE (mode);
2874 /* The code above should have handled everything. */
2875 if (data->len != 0)
2876 abort ();
2879 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2880 with move instructions for mode MODE. GENFUN is the gen_... function
2881 to make a move insn for that mode. DATA has all the other info. */
2883 static void
2884 store_by_pieces_2 (genfun, mode, data)
2885 rtx (*genfun) PARAMS ((rtx, ...));
2886 enum machine_mode mode;
2887 struct store_by_pieces *data;
2889 unsigned int size = GET_MODE_SIZE (mode);
2890 rtx to1, cst;
2892 while (data->len >= size)
2894 if (data->reverse)
2895 data->offset -= size;
2897 if (data->autinc_to)
2898 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2899 data->offset);
2900 else
2901 to1 = adjust_address (data->to, mode, data->offset);
2903 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2904 emit_insn (gen_add2_insn (data->to_addr,
2905 GEN_INT (-(HOST_WIDE_INT) size)));
2907 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2908 emit_insn ((*genfun) (to1, cst));
2910 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2911 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2913 if (! data->reverse)
2914 data->offset += size;
2916 data->len -= size;
2920 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2921 its length in bytes. */
2924 clear_storage (object, size)
2925 rtx object;
2926 rtx size;
2928 rtx retval = 0;
2929 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2930 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2932 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2933 just move a zero. Otherwise, do this a piece at a time. */
2934 if (GET_MODE (object) != BLKmode
2935 && GET_CODE (size) == CONST_INT
2936 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2937 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2938 else
2940 object = protect_from_queue (object, 1);
2941 size = protect_from_queue (size, 0);
2943 if (GET_CODE (size) == CONST_INT
2944 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2945 clear_by_pieces (object, INTVAL (size), align);
2946 else if (clear_storage_via_clrstr (object, size, align))
2948 else
2949 retval = clear_storage_via_libcall (object, size);
2952 return retval;
2955 /* A subroutine of clear_storage. Expand a clrstr pattern;
2956 return true if successful. */
2958 static bool
2959 clear_storage_via_clrstr (object, size, align)
2960 rtx object, size;
2961 unsigned int align;
2963 /* Try the most limited insn first, because there's no point
2964 including more than one in the machine description unless
2965 the more limited one has some advantage. */
2967 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2968 enum machine_mode mode;
2970 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2971 mode = GET_MODE_WIDER_MODE (mode))
2973 enum insn_code code = clrstr_optab[(int) mode];
2974 insn_operand_predicate_fn pred;
2976 if (code != CODE_FOR_nothing
2977 /* We don't need MODE to be narrower than
2978 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2979 the mode mask, as it is returned by the macro, it will
2980 definitely be less than the actual mode mask. */
2981 && ((GET_CODE (size) == CONST_INT
2982 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2983 <= (GET_MODE_MASK (mode) >> 1)))
2984 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2985 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2986 || (*pred) (object, BLKmode))
2987 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2988 || (*pred) (opalign, VOIDmode)))
2990 rtx op1;
2991 rtx last = get_last_insn ();
2992 rtx pat;
2994 op1 = convert_to_mode (mode, size, 1);
2995 pred = insn_data[(int) code].operand[1].predicate;
2996 if (pred != 0 && ! (*pred) (op1, mode))
2997 op1 = copy_to_mode_reg (mode, op1);
2999 pat = GEN_FCN ((int) code) (object, op1, opalign);
3000 if (pat)
3002 emit_insn (pat);
3003 return true;
3005 else
3006 delete_insns_since (last);
3010 return false;
3013 /* A subroutine of clear_storage. Expand a call to memset or bzero.
3014 Return the return value of memset, 0 otherwise. */
3016 static rtx
3017 clear_storage_via_libcall (object, size)
3018 rtx object, size;
3020 tree call_expr, arg_list, fn, object_tree, size_tree;
3021 enum machine_mode size_mode;
3022 rtx retval;
3024 /* OBJECT or SIZE may have been passed through protect_from_queue.
3026 It is unsafe to save the value generated by protect_from_queue
3027 and reuse it later. Consider what happens if emit_queue is
3028 called before the return value from protect_from_queue is used.
3030 Expansion of the CALL_EXPR below will call emit_queue before
3031 we are finished emitting RTL for argument setup. So if we are
3032 not careful we could get the wrong value for an argument.
3034 To avoid this problem we go ahead and emit code to copy OBJECT
3035 and SIZE into new pseudos. We can then place those new pseudos
3036 into an RTL_EXPR and use them later, even after a call to
3037 emit_queue.
3039 Note this is not strictly needed for library calls since they
3040 do not call emit_queue before loading their arguments. However,
3041 we may need to have library calls call emit_queue in the future
3042 since failing to do so could cause problems for targets which
3043 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
3045 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
3047 if (TARGET_MEM_FUNCTIONS)
3048 size_mode = TYPE_MODE (sizetype);
3049 else
3050 size_mode = TYPE_MODE (unsigned_type_node);
3051 size = convert_to_mode (size_mode, size, 1);
3052 size = copy_to_mode_reg (size_mode, size);
3054 /* It is incorrect to use the libcall calling conventions to call
3055 memset in this context. This could be a user call to memset and
3056 the user may wish to examine the return value from memset. For
3057 targets where libcalls and normal calls have different conventions
3058 for returning pointers, we could end up generating incorrect code.
3060 For convenience, we generate the call to bzero this way as well. */
3062 object_tree = make_tree (ptr_type_node, object);
3063 if (TARGET_MEM_FUNCTIONS)
3064 size_tree = make_tree (sizetype, size);
3065 else
3066 size_tree = make_tree (unsigned_type_node, size);
3068 fn = clear_storage_libcall_fn (true);
3069 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
3070 if (TARGET_MEM_FUNCTIONS)
3071 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3072 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3074 /* Now we have to build up the CALL_EXPR itself. */
3075 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3076 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3077 call_expr, arg_list, NULL_TREE);
3078 TREE_SIDE_EFFECTS (call_expr) = 1;
3080 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3082 /* If we are initializing a readonly value, show the above call
3083 clobbered it. Otherwise, a load from it may erroneously be
3084 hoisted from a loop. */
3085 if (RTX_UNCHANGING_P (object))
3086 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3088 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3091 /* A subroutine of clear_storage_via_libcall. Create the tree node
3092 for the function we use for block clears. The first time FOR_CALL
3093 is true, we call assemble_external. */
3095 static GTY(()) tree block_clear_fn;
3097 void
3098 init_block_clear_fn (asmspec)
3099 const char *asmspec;
3101 if (!block_clear_fn)
3103 tree fn, args;
3105 if (TARGET_MEM_FUNCTIONS)
3107 fn = get_identifier ("memset");
3108 args = build_function_type_list (ptr_type_node, ptr_type_node,
3109 integer_type_node, sizetype,
3110 NULL_TREE);
3112 else
3114 fn = get_identifier ("bzero");
3115 args = build_function_type_list (void_type_node, ptr_type_node,
3116 unsigned_type_node, NULL_TREE);
3119 fn = build_decl (FUNCTION_DECL, fn, args);
3120 DECL_EXTERNAL (fn) = 1;
3121 TREE_PUBLIC (fn) = 1;
3122 DECL_ARTIFICIAL (fn) = 1;
3123 TREE_NOTHROW (fn) = 1;
3125 block_clear_fn = fn;
3128 if (asmspec)
3130 SET_DECL_RTL (block_clear_fn, NULL_RTX);
3131 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
3135 static tree
3136 clear_storage_libcall_fn (for_call)
3137 int for_call;
3139 static bool emitted_extern;
3141 if (!block_clear_fn)
3142 init_block_clear_fn (NULL);
3144 if (for_call && !emitted_extern)
3146 emitted_extern = true;
3147 make_decl_rtl (block_clear_fn, NULL);
3148 assemble_external (block_clear_fn);
3151 return block_clear_fn;
3154 /* Generate code to copy Y into X.
3155 Both Y and X must have the same mode, except that
3156 Y can be a constant with VOIDmode.
3157 This mode cannot be BLKmode; use emit_block_move for that.
3159 Return the last instruction emitted. */
3162 emit_move_insn (x, y)
3163 rtx x, y;
3165 enum machine_mode mode = GET_MODE (x);
3166 rtx y_cst = NULL_RTX;
3167 rtx last_insn;
3169 x = protect_from_queue (x, 1);
3170 y = protect_from_queue (y, 0);
3172 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3173 abort ();
3175 /* Never force constant_p_rtx to memory. */
3176 if (GET_CODE (y) == CONSTANT_P_RTX)
3178 else if (CONSTANT_P (y))
3180 if (optimize
3181 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3182 && (last_insn = compress_float_constant (x, y)))
3183 return last_insn;
3185 if (!LEGITIMATE_CONSTANT_P (y))
3187 y_cst = y;
3188 y = force_const_mem (mode, y);
3190 /* If the target's cannot_force_const_mem prevented the spill,
3191 assume that the target's move expanders will also take care
3192 of the non-legitimate constant. */
3193 if (!y)
3194 y = y_cst;
3198 /* If X or Y are memory references, verify that their addresses are valid
3199 for the machine. */
3200 if (GET_CODE (x) == MEM
3201 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3202 && ! push_operand (x, GET_MODE (x)))
3203 || (flag_force_addr
3204 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3205 x = validize_mem (x);
3207 if (GET_CODE (y) == MEM
3208 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3209 || (flag_force_addr
3210 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3211 y = validize_mem (y);
3213 if (mode == BLKmode)
3214 abort ();
3216 last_insn = emit_move_insn_1 (x, y);
3218 if (y_cst && GET_CODE (x) == REG)
3219 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3221 return last_insn;
3224 /* Low level part of emit_move_insn.
3225 Called just like emit_move_insn, but assumes X and Y
3226 are basically valid. */
3229 emit_move_insn_1 (x, y)
3230 rtx x, y;
3232 enum machine_mode mode = GET_MODE (x);
3233 enum machine_mode submode;
3234 enum mode_class class = GET_MODE_CLASS (mode);
3236 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3237 abort ();
3239 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3240 return
3241 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3243 /* Expand complex moves by moving real part and imag part, if possible. */
3244 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
3245 && BLKmode != (submode = GET_MODE_INNER (mode))
3246 && (mov_optab->handlers[(int) submode].insn_code
3247 != CODE_FOR_nothing))
3249 /* Don't split destination if it is a stack push. */
3250 int stack = push_operand (x, GET_MODE (x));
3252 #ifdef PUSH_ROUNDING
3253 /* In case we output to the stack, but the size is smaller machine can
3254 push exactly, we need to use move instructions. */
3255 if (stack
3256 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3257 != GET_MODE_SIZE (submode)))
3259 rtx temp;
3260 HOST_WIDE_INT offset1, offset2;
3262 /* Do not use anti_adjust_stack, since we don't want to update
3263 stack_pointer_delta. */
3264 temp = expand_binop (Pmode,
3265 #ifdef STACK_GROWS_DOWNWARD
3266 sub_optab,
3267 #else
3268 add_optab,
3269 #endif
3270 stack_pointer_rtx,
3271 GEN_INT
3272 (PUSH_ROUNDING
3273 (GET_MODE_SIZE (GET_MODE (x)))),
3274 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3276 if (temp != stack_pointer_rtx)
3277 emit_move_insn (stack_pointer_rtx, temp);
3279 #ifdef STACK_GROWS_DOWNWARD
3280 offset1 = 0;
3281 offset2 = GET_MODE_SIZE (submode);
3282 #else
3283 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3284 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3285 + GET_MODE_SIZE (submode));
3286 #endif
3288 emit_move_insn (change_address (x, submode,
3289 gen_rtx_PLUS (Pmode,
3290 stack_pointer_rtx,
3291 GEN_INT (offset1))),
3292 gen_realpart (submode, y));
3293 emit_move_insn (change_address (x, submode,
3294 gen_rtx_PLUS (Pmode,
3295 stack_pointer_rtx,
3296 GEN_INT (offset2))),
3297 gen_imagpart (submode, y));
3299 else
3300 #endif
3301 /* If this is a stack, push the highpart first, so it
3302 will be in the argument order.
3304 In that case, change_address is used only to convert
3305 the mode, not to change the address. */
3306 if (stack)
3308 /* Note that the real part always precedes the imag part in memory
3309 regardless of machine's endianness. */
3310 #ifdef STACK_GROWS_DOWNWARD
3311 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3312 (gen_rtx_MEM (submode, XEXP (x, 0)),
3313 gen_imagpart (submode, y)));
3314 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3315 (gen_rtx_MEM (submode, XEXP (x, 0)),
3316 gen_realpart (submode, y)));
3317 #else
3318 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3319 (gen_rtx_MEM (submode, XEXP (x, 0)),
3320 gen_realpart (submode, y)));
3321 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3322 (gen_rtx_MEM (submode, XEXP (x, 0)),
3323 gen_imagpart (submode, y)));
3324 #endif
3326 else
3328 rtx realpart_x, realpart_y;
3329 rtx imagpart_x, imagpart_y;
3331 /* If this is a complex value with each part being smaller than a
3332 word, the usual calling sequence will likely pack the pieces into
3333 a single register. Unfortunately, SUBREG of hard registers only
3334 deals in terms of words, so we have a problem converting input
3335 arguments to the CONCAT of two registers that is used elsewhere
3336 for complex values. If this is before reload, we can copy it into
3337 memory and reload. FIXME, we should see about using extract and
3338 insert on integer registers, but complex short and complex char
3339 variables should be rarely used. */
3340 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
3341 && (reload_in_progress | reload_completed) == 0)
3343 int packed_dest_p
3344 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3345 int packed_src_p
3346 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3348 if (packed_dest_p || packed_src_p)
3350 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3351 ? MODE_FLOAT : MODE_INT);
3353 enum machine_mode reg_mode
3354 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3356 if (reg_mode != BLKmode)
3358 rtx mem = assign_stack_temp (reg_mode,
3359 GET_MODE_SIZE (mode), 0);
3360 rtx cmem = adjust_address (mem, mode, 0);
3362 cfun->cannot_inline
3363 = N_("function using short complex types cannot be inline");
3365 if (packed_dest_p)
3367 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3369 emit_move_insn_1 (cmem, y);
3370 return emit_move_insn_1 (sreg, mem);
3372 else
3374 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3376 emit_move_insn_1 (mem, sreg);
3377 return emit_move_insn_1 (x, cmem);
3383 realpart_x = gen_realpart (submode, x);
3384 realpart_y = gen_realpart (submode, y);
3385 imagpart_x = gen_imagpart (submode, x);
3386 imagpart_y = gen_imagpart (submode, y);
3388 /* Show the output dies here. This is necessary for SUBREGs
3389 of pseudos since we cannot track their lifetimes correctly;
3390 hard regs shouldn't appear here except as return values.
3391 We never want to emit such a clobber after reload. */
3392 if (x != y
3393 && ! (reload_in_progress || reload_completed)
3394 && (GET_CODE (realpart_x) == SUBREG
3395 || GET_CODE (imagpart_x) == SUBREG))
3396 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3398 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3399 (realpart_x, realpart_y));
3400 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3401 (imagpart_x, imagpart_y));
3404 return get_last_insn ();
3407 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3408 find a mode to do it in. If we have a movcc, use it. Otherwise,
3409 find the MODE_INT mode of the same width. */
3410 else if (GET_MODE_CLASS (mode) == MODE_CC
3411 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3413 enum insn_code insn_code;
3414 enum machine_mode tmode = VOIDmode;
3415 rtx x1 = x, y1 = y;
3417 if (mode != CCmode
3418 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3419 tmode = CCmode;
3420 else
3421 for (tmode = QImode; tmode != VOIDmode;
3422 tmode = GET_MODE_WIDER_MODE (tmode))
3423 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3424 break;
3426 if (tmode == VOIDmode)
3427 abort ();
3429 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3430 may call change_address which is not appropriate if we were
3431 called when a reload was in progress. We don't have to worry
3432 about changing the address since the size in bytes is supposed to
3433 be the same. Copy the MEM to change the mode and move any
3434 substitutions from the old MEM to the new one. */
3436 if (reload_in_progress)
3438 x = gen_lowpart_common (tmode, x1);
3439 if (x == 0 && GET_CODE (x1) == MEM)
3441 x = adjust_address_nv (x1, tmode, 0);
3442 copy_replacements (x1, x);
3445 y = gen_lowpart_common (tmode, y1);
3446 if (y == 0 && GET_CODE (y1) == MEM)
3448 y = adjust_address_nv (y1, tmode, 0);
3449 copy_replacements (y1, y);
3452 else
3454 x = gen_lowpart (tmode, x);
3455 y = gen_lowpart (tmode, y);
3458 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3459 return emit_insn (GEN_FCN (insn_code) (x, y));
3462 /* This will handle any multi-word or full-word mode that lacks a move_insn
3463 pattern. However, you will get better code if you define such patterns,
3464 even if they must turn into multiple assembler instructions. */
3465 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3467 rtx last_insn = 0;
3468 rtx seq, inner;
3469 int need_clobber;
3470 int i;
3472 #ifdef PUSH_ROUNDING
3474 /* If X is a push on the stack, do the push now and replace
3475 X with a reference to the stack pointer. */
3476 if (push_operand (x, GET_MODE (x)))
3478 rtx temp;
3479 enum rtx_code code;
3481 /* Do not use anti_adjust_stack, since we don't want to update
3482 stack_pointer_delta. */
3483 temp = expand_binop (Pmode,
3484 #ifdef STACK_GROWS_DOWNWARD
3485 sub_optab,
3486 #else
3487 add_optab,
3488 #endif
3489 stack_pointer_rtx,
3490 GEN_INT
3491 (PUSH_ROUNDING
3492 (GET_MODE_SIZE (GET_MODE (x)))),
3493 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3495 if (temp != stack_pointer_rtx)
3496 emit_move_insn (stack_pointer_rtx, temp);
3498 code = GET_CODE (XEXP (x, 0));
3500 /* Just hope that small offsets off SP are OK. */
3501 if (code == POST_INC)
3502 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3503 GEN_INT (-((HOST_WIDE_INT)
3504 GET_MODE_SIZE (GET_MODE (x)))));
3505 else if (code == POST_DEC)
3506 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3507 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3508 else
3509 temp = stack_pointer_rtx;
3511 x = change_address (x, VOIDmode, temp);
3513 #endif
3515 /* If we are in reload, see if either operand is a MEM whose address
3516 is scheduled for replacement. */
3517 if (reload_in_progress && GET_CODE (x) == MEM
3518 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3519 x = replace_equiv_address_nv (x, inner);
3520 if (reload_in_progress && GET_CODE (y) == MEM
3521 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3522 y = replace_equiv_address_nv (y, inner);
3524 start_sequence ();
3526 need_clobber = 0;
3527 for (i = 0;
3528 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3529 i++)
3531 rtx xpart = operand_subword (x, i, 1, mode);
3532 rtx ypart = operand_subword (y, i, 1, mode);
3534 /* If we can't get a part of Y, put Y into memory if it is a
3535 constant. Otherwise, force it into a register. If we still
3536 can't get a part of Y, abort. */
3537 if (ypart == 0 && CONSTANT_P (y))
3539 y = force_const_mem (mode, y);
3540 ypart = operand_subword (y, i, 1, mode);
3542 else if (ypart == 0)
3543 ypart = operand_subword_force (y, i, mode);
3545 if (xpart == 0 || ypart == 0)
3546 abort ();
3548 need_clobber |= (GET_CODE (xpart) == SUBREG);
3550 last_insn = emit_move_insn (xpart, ypart);
3553 seq = get_insns ();
3554 end_sequence ();
3556 /* Show the output dies here. This is necessary for SUBREGs
3557 of pseudos since we cannot track their lifetimes correctly;
3558 hard regs shouldn't appear here except as return values.
3559 We never want to emit such a clobber after reload. */
3560 if (x != y
3561 && ! (reload_in_progress || reload_completed)
3562 && need_clobber != 0)
3563 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3565 emit_insn (seq);
3567 return last_insn;
3569 else
3570 abort ();
3573 /* If Y is representable exactly in a narrower mode, and the target can
3574 perform the extension directly from constant or memory, then emit the
3575 move as an extension. */
3577 static rtx
3578 compress_float_constant (x, y)
3579 rtx x, y;
3581 enum machine_mode dstmode = GET_MODE (x);
3582 enum machine_mode orig_srcmode = GET_MODE (y);
3583 enum machine_mode srcmode;
3584 REAL_VALUE_TYPE r;
3586 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3588 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3589 srcmode != orig_srcmode;
3590 srcmode = GET_MODE_WIDER_MODE (srcmode))
3592 enum insn_code ic;
3593 rtx trunc_y, last_insn;
3595 /* Skip if the target can't extend this way. */
3596 ic = can_extend_p (dstmode, srcmode, 0);
3597 if (ic == CODE_FOR_nothing)
3598 continue;
3600 /* Skip if the narrowed value isn't exact. */
3601 if (! exact_real_truncate (srcmode, &r))
3602 continue;
3604 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3606 if (LEGITIMATE_CONSTANT_P (trunc_y))
3608 /* Skip if the target needs extra instructions to perform
3609 the extension. */
3610 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3611 continue;
3613 else if (float_extend_from_mem[dstmode][srcmode])
3614 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3615 else
3616 continue;
3618 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3619 last_insn = get_last_insn ();
3621 if (GET_CODE (x) == REG)
3622 REG_NOTES (last_insn)
3623 = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3625 return last_insn;
3628 return NULL_RTX;
3631 /* Pushing data onto the stack. */
3633 /* Push a block of length SIZE (perhaps variable)
3634 and return an rtx to address the beginning of the block.
3635 Note that it is not possible for the value returned to be a QUEUED.
3636 The value may be virtual_outgoing_args_rtx.
3638 EXTRA is the number of bytes of padding to push in addition to SIZE.
3639 BELOW nonzero means this padding comes at low addresses;
3640 otherwise, the padding comes at high addresses. */
3643 push_block (size, extra, below)
3644 rtx size;
3645 int extra, below;
3647 rtx temp;
3649 size = convert_modes (Pmode, ptr_mode, size, 1);
3650 if (CONSTANT_P (size))
3651 anti_adjust_stack (plus_constant (size, extra));
3652 else if (GET_CODE (size) == REG && extra == 0)
3653 anti_adjust_stack (size);
3654 else
3656 temp = copy_to_mode_reg (Pmode, size);
3657 if (extra != 0)
3658 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3659 temp, 0, OPTAB_LIB_WIDEN);
3660 anti_adjust_stack (temp);
3663 #ifndef STACK_GROWS_DOWNWARD
3664 if (0)
3665 #else
3666 if (1)
3667 #endif
3669 temp = virtual_outgoing_args_rtx;
3670 if (extra != 0 && below)
3671 temp = plus_constant (temp, extra);
3673 else
3675 if (GET_CODE (size) == CONST_INT)
3676 temp = plus_constant (virtual_outgoing_args_rtx,
3677 -INTVAL (size) - (below ? 0 : extra));
3678 else if (extra != 0 && !below)
3679 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3680 negate_rtx (Pmode, plus_constant (size, extra)));
3681 else
3682 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3683 negate_rtx (Pmode, size));
3686 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3689 #ifdef PUSH_ROUNDING
3691 /* Emit single push insn. */
3693 static void
3694 emit_single_push_insn (mode, x, type)
3695 rtx x;
3696 enum machine_mode mode;
3697 tree type;
3699 rtx dest_addr;
3700 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3701 rtx dest;
3702 enum insn_code icode;
3703 insn_operand_predicate_fn pred;
3705 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3706 /* If there is push pattern, use it. Otherwise try old way of throwing
3707 MEM representing push operation to move expander. */
3708 icode = push_optab->handlers[(int) mode].insn_code;
3709 if (icode != CODE_FOR_nothing)
3711 if (((pred = insn_data[(int) icode].operand[0].predicate)
3712 && !((*pred) (x, mode))))
3713 x = force_reg (mode, x);
3714 emit_insn (GEN_FCN (icode) (x));
3715 return;
3717 if (GET_MODE_SIZE (mode) == rounded_size)
3718 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3719 else
3721 #ifdef STACK_GROWS_DOWNWARD
3722 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3723 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3724 #else
3725 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3726 GEN_INT (rounded_size));
3727 #endif
3728 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3731 dest = gen_rtx_MEM (mode, dest_addr);
3733 if (type != 0)
3735 set_mem_attributes (dest, type, 1);
3737 if (flag_optimize_sibling_calls)
3738 /* Function incoming arguments may overlap with sibling call
3739 outgoing arguments and we cannot allow reordering of reads
3740 from function arguments with stores to outgoing arguments
3741 of sibling calls. */
3742 set_mem_alias_set (dest, 0);
3744 emit_move_insn (dest, x);
3746 #endif
3748 /* Generate code to push X onto the stack, assuming it has mode MODE and
3749 type TYPE.
3750 MODE is redundant except when X is a CONST_INT (since they don't
3751 carry mode info).
3752 SIZE is an rtx for the size of data to be copied (in bytes),
3753 needed only if X is BLKmode.
3755 ALIGN (in bits) is maximum alignment we can assume.
3757 If PARTIAL and REG are both nonzero, then copy that many of the first
3758 words of X into registers starting with REG, and push the rest of X.
3759 The amount of space pushed is decreased by PARTIAL words,
3760 rounded *down* to a multiple of PARM_BOUNDARY.
3761 REG must be a hard register in this case.
3762 If REG is zero but PARTIAL is not, take any all others actions for an
3763 argument partially in registers, but do not actually load any
3764 registers.
3766 EXTRA is the amount in bytes of extra space to leave next to this arg.
3767 This is ignored if an argument block has already been allocated.
3769 On a machine that lacks real push insns, ARGS_ADDR is the address of
3770 the bottom of the argument block for this call. We use indexing off there
3771 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3772 argument block has not been preallocated.
3774 ARGS_SO_FAR is the size of args previously pushed for this call.
3776 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3777 for arguments passed in registers. If nonzero, it will be the number
3778 of bytes required. */
3780 void
3781 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3782 args_addr, args_so_far, reg_parm_stack_space,
3783 alignment_pad)
3784 rtx x;
3785 enum machine_mode mode;
3786 tree type;
3787 rtx size;
3788 unsigned int align;
3789 int partial;
3790 rtx reg;
3791 int extra;
3792 rtx args_addr;
3793 rtx args_so_far;
3794 int reg_parm_stack_space;
3795 rtx alignment_pad;
3797 rtx xinner;
3798 enum direction stack_direction
3799 #ifdef STACK_GROWS_DOWNWARD
3800 = downward;
3801 #else
3802 = upward;
3803 #endif
3805 /* Decide where to pad the argument: `downward' for below,
3806 `upward' for above, or `none' for don't pad it.
3807 Default is below for small data on big-endian machines; else above. */
3808 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3810 /* Invert direction if stack is post-decrement.
3811 FIXME: why? */
3812 if (STACK_PUSH_CODE == POST_DEC)
3813 if (where_pad != none)
3814 where_pad = (where_pad == downward ? upward : downward);
3816 xinner = x = protect_from_queue (x, 0);
3818 if (mode == BLKmode)
3820 /* Copy a block into the stack, entirely or partially. */
3822 rtx temp;
3823 int used = partial * UNITS_PER_WORD;
3824 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3825 int skip;
3827 if (size == 0)
3828 abort ();
3830 used -= offset;
3832 /* USED is now the # of bytes we need not copy to the stack
3833 because registers will take care of them. */
3835 if (partial != 0)
3836 xinner = adjust_address (xinner, BLKmode, used);
3838 /* If the partial register-part of the arg counts in its stack size,
3839 skip the part of stack space corresponding to the registers.
3840 Otherwise, start copying to the beginning of the stack space,
3841 by setting SKIP to 0. */
3842 skip = (reg_parm_stack_space == 0) ? 0 : used;
3844 #ifdef PUSH_ROUNDING
3845 /* Do it with several push insns if that doesn't take lots of insns
3846 and if there is no difficulty with push insns that skip bytes
3847 on the stack for alignment purposes. */
3848 if (args_addr == 0
3849 && PUSH_ARGS
3850 && GET_CODE (size) == CONST_INT
3851 && skip == 0
3852 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3853 /* Here we avoid the case of a structure whose weak alignment
3854 forces many pushes of a small amount of data,
3855 and such small pushes do rounding that causes trouble. */
3856 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3857 || align >= BIGGEST_ALIGNMENT
3858 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3859 == (align / BITS_PER_UNIT)))
3860 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3862 /* Push padding now if padding above and stack grows down,
3863 or if padding below and stack grows up.
3864 But if space already allocated, this has already been done. */
3865 if (extra && args_addr == 0
3866 && where_pad != none && where_pad != stack_direction)
3867 anti_adjust_stack (GEN_INT (extra));
3869 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3871 else
3872 #endif /* PUSH_ROUNDING */
3874 rtx target;
3876 /* Otherwise make space on the stack and copy the data
3877 to the address of that space. */
3879 /* Deduct words put into registers from the size we must copy. */
3880 if (partial != 0)
3882 if (GET_CODE (size) == CONST_INT)
3883 size = GEN_INT (INTVAL (size) - used);
3884 else
3885 size = expand_binop (GET_MODE (size), sub_optab, size,
3886 GEN_INT (used), NULL_RTX, 0,
3887 OPTAB_LIB_WIDEN);
3890 /* Get the address of the stack space.
3891 In this case, we do not deal with EXTRA separately.
3892 A single stack adjust will do. */
3893 if (! args_addr)
3895 temp = push_block (size, extra, where_pad == downward);
3896 extra = 0;
3898 else if (GET_CODE (args_so_far) == CONST_INT)
3899 temp = memory_address (BLKmode,
3900 plus_constant (args_addr,
3901 skip + INTVAL (args_so_far)));
3902 else
3903 temp = memory_address (BLKmode,
3904 plus_constant (gen_rtx_PLUS (Pmode,
3905 args_addr,
3906 args_so_far),
3907 skip));
3909 if (!ACCUMULATE_OUTGOING_ARGS)
3911 /* If the source is referenced relative to the stack pointer,
3912 copy it to another register to stabilize it. We do not need
3913 to do this if we know that we won't be changing sp. */
3915 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3916 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3917 temp = copy_to_reg (temp);
3920 target = gen_rtx_MEM (BLKmode, temp);
3922 if (type != 0)
3924 set_mem_attributes (target, type, 1);
3925 /* Function incoming arguments may overlap with sibling call
3926 outgoing arguments and we cannot allow reordering of reads
3927 from function arguments with stores to outgoing arguments
3928 of sibling calls. */
3929 set_mem_alias_set (target, 0);
3932 /* ALIGN may well be better aligned than TYPE, e.g. due to
3933 PARM_BOUNDARY. Assume the caller isn't lying. */
3934 set_mem_align (target, align);
3936 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3939 else if (partial > 0)
3941 /* Scalar partly in registers. */
3943 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3944 int i;
3945 int not_stack;
3946 /* # words of start of argument
3947 that we must make space for but need not store. */
3948 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3949 int args_offset = INTVAL (args_so_far);
3950 int skip;
3952 /* Push padding now if padding above and stack grows down,
3953 or if padding below and stack grows up.
3954 But if space already allocated, this has already been done. */
3955 if (extra && args_addr == 0
3956 && where_pad != none && where_pad != stack_direction)
3957 anti_adjust_stack (GEN_INT (extra));
3959 /* If we make space by pushing it, we might as well push
3960 the real data. Otherwise, we can leave OFFSET nonzero
3961 and leave the space uninitialized. */
3962 if (args_addr == 0)
3963 offset = 0;
3965 /* Now NOT_STACK gets the number of words that we don't need to
3966 allocate on the stack. */
3967 not_stack = partial - offset;
3969 /* If the partial register-part of the arg counts in its stack size,
3970 skip the part of stack space corresponding to the registers.
3971 Otherwise, start copying to the beginning of the stack space,
3972 by setting SKIP to 0. */
3973 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3975 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3976 x = validize_mem (force_const_mem (mode, x));
3978 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3979 SUBREGs of such registers are not allowed. */
3980 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3981 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3982 x = copy_to_reg (x);
3984 /* Loop over all the words allocated on the stack for this arg. */
3985 /* We can do it by words, because any scalar bigger than a word
3986 has a size a multiple of a word. */
3987 #ifndef PUSH_ARGS_REVERSED
3988 for (i = not_stack; i < size; i++)
3989 #else
3990 for (i = size - 1; i >= not_stack; i--)
3991 #endif
3992 if (i >= not_stack + offset)
3993 emit_push_insn (operand_subword_force (x, i, mode),
3994 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3995 0, args_addr,
3996 GEN_INT (args_offset + ((i - not_stack + skip)
3997 * UNITS_PER_WORD)),
3998 reg_parm_stack_space, alignment_pad);
4000 else
4002 rtx addr;
4003 rtx dest;
4005 /* Push padding now if padding above and stack grows down,
4006 or if padding below and stack grows up.
4007 But if space already allocated, this has already been done. */
4008 if (extra && args_addr == 0
4009 && where_pad != none && where_pad != stack_direction)
4010 anti_adjust_stack (GEN_INT (extra));
4012 #ifdef PUSH_ROUNDING
4013 if (args_addr == 0 && PUSH_ARGS)
4014 emit_single_push_insn (mode, x, type);
4015 else
4016 #endif
4018 if (GET_CODE (args_so_far) == CONST_INT)
4019 addr
4020 = memory_address (mode,
4021 plus_constant (args_addr,
4022 INTVAL (args_so_far)));
4023 else
4024 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4025 args_so_far));
4026 dest = gen_rtx_MEM (mode, addr);
4027 if (type != 0)
4029 set_mem_attributes (dest, type, 1);
4030 /* Function incoming arguments may overlap with sibling call
4031 outgoing arguments and we cannot allow reordering of reads
4032 from function arguments with stores to outgoing arguments
4033 of sibling calls. */
4034 set_mem_alias_set (dest, 0);
4037 emit_move_insn (dest, x);
4041 /* If part should go in registers, copy that part
4042 into the appropriate registers. Do this now, at the end,
4043 since mem-to-mem copies above may do function calls. */
4044 if (partial > 0 && reg != 0)
4046 /* Handle calls that pass values in multiple non-contiguous locations.
4047 The Irix 6 ABI has examples of this. */
4048 if (GET_CODE (reg) == PARALLEL)
4049 emit_group_load (reg, x, -1); /* ??? size? */
4050 else
4051 move_block_to_reg (REGNO (reg), x, partial, mode);
4054 if (extra && args_addr == 0 && where_pad == stack_direction)
4055 anti_adjust_stack (GEN_INT (extra));
4057 if (alignment_pad && args_addr == 0)
4058 anti_adjust_stack (alignment_pad);
4061 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4062 operations. */
4064 static rtx
4065 get_subtarget (x)
4066 rtx x;
4068 return ((x == 0
4069 /* Only registers can be subtargets. */
4070 || GET_CODE (x) != REG
4071 /* If the register is readonly, it can't be set more than once. */
4072 || RTX_UNCHANGING_P (x)
4073 /* Don't use hard regs to avoid extending their life. */
4074 || REGNO (x) < FIRST_PSEUDO_REGISTER
4075 /* Avoid subtargets inside loops,
4076 since they hide some invariant expressions. */
4077 || preserve_subexpressions_p ())
4078 ? 0 : x);
4081 /* Expand an assignment that stores the value of FROM into TO.
4082 If WANT_VALUE is nonzero, return an rtx for the value of TO.
4083 (This may contain a QUEUED rtx;
4084 if the value is constant, this rtx is a constant.)
4085 Otherwise, the returned value is NULL_RTX.
4087 SUGGEST_REG is no longer actually used.
4088 It used to mean, copy the value through a register
4089 and return that register, if that is possible.
4090 We now use WANT_VALUE to decide whether to do this. */
4093 expand_assignment (to, from, want_value, suggest_reg)
4094 tree to, from;
4095 int want_value;
4096 int suggest_reg ATTRIBUTE_UNUSED;
4098 rtx to_rtx = 0;
4099 rtx result;
4101 /* Don't crash if the lhs of the assignment was erroneous. */
4103 if (TREE_CODE (to) == ERROR_MARK)
4105 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
4106 return want_value ? result : NULL_RTX;
4109 /* Assignment of a structure component needs special treatment
4110 if the structure component's rtx is not simply a MEM.
4111 Assignment of an array element at a constant index, and assignment of
4112 an array element in an unaligned packed structure field, has the same
4113 problem. */
4115 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
4116 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
4117 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4119 enum machine_mode mode1;
4120 HOST_WIDE_INT bitsize, bitpos;
4121 rtx orig_to_rtx;
4122 tree offset;
4123 int unsignedp;
4124 int volatilep = 0;
4125 tree tem;
4127 push_temp_slots ();
4128 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4129 &unsignedp, &volatilep);
4131 /* If we are going to use store_bit_field and extract_bit_field,
4132 make sure to_rtx will be safe for multiple use. */
4134 if (mode1 == VOIDmode && want_value)
4135 tem = stabilize_reference (tem);
4137 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
4139 if (offset != 0)
4141 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4143 if (GET_CODE (to_rtx) != MEM)
4144 abort ();
4146 #ifdef POINTERS_EXTEND_UNSIGNED
4147 if (GET_MODE (offset_rtx) != Pmode)
4148 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4149 #else
4150 if (GET_MODE (offset_rtx) != ptr_mode)
4151 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4152 #endif
4154 /* A constant address in TO_RTX can have VOIDmode, we must not try
4155 to call force_reg for that case. Avoid that case. */
4156 if (GET_CODE (to_rtx) == MEM
4157 && GET_MODE (to_rtx) == BLKmode
4158 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4159 && bitsize > 0
4160 && (bitpos % bitsize) == 0
4161 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4162 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4164 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4165 bitpos = 0;
4168 to_rtx = offset_address (to_rtx, offset_rtx,
4169 highest_pow2_factor_for_type (TREE_TYPE (to),
4170 offset));
4173 if (GET_CODE (to_rtx) == MEM)
4175 /* If the field is at offset zero, we could have been given the
4176 DECL_RTX of the parent struct. Don't munge it. */
4177 to_rtx = shallow_copy_rtx (to_rtx);
4179 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4182 /* Deal with volatile and readonly fields. The former is only done
4183 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4184 if (volatilep && GET_CODE (to_rtx) == MEM)
4186 if (to_rtx == orig_to_rtx)
4187 to_rtx = copy_rtx (to_rtx);
4188 MEM_VOLATILE_P (to_rtx) = 1;
4191 if (TREE_CODE (to) == COMPONENT_REF
4192 && TREE_READONLY (TREE_OPERAND (to, 1)))
4194 if (to_rtx == orig_to_rtx)
4195 to_rtx = copy_rtx (to_rtx);
4196 RTX_UNCHANGING_P (to_rtx) = 1;
4199 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
4201 if (to_rtx == orig_to_rtx)
4202 to_rtx = copy_rtx (to_rtx);
4203 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4206 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4207 (want_value
4208 /* Spurious cast for HPUX compiler. */
4209 ? ((enum machine_mode)
4210 TYPE_MODE (TREE_TYPE (to)))
4211 : VOIDmode),
4212 unsignedp, TREE_TYPE (tem), get_alias_set (to));
4214 preserve_temp_slots (result);
4215 free_temp_slots ();
4216 pop_temp_slots ();
4218 /* If the value is meaningful, convert RESULT to the proper mode.
4219 Otherwise, return nothing. */
4220 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4221 TYPE_MODE (TREE_TYPE (from)),
4222 result,
4223 TREE_UNSIGNED (TREE_TYPE (to)))
4224 : NULL_RTX);
4227 /* If the rhs is a function call and its value is not an aggregate,
4228 call the function before we start to compute the lhs.
4229 This is needed for correct code for cases such as
4230 val = setjmp (buf) on machines where reference to val
4231 requires loading up part of an address in a separate insn.
4233 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4234 since it might be a promoted variable where the zero- or sign- extension
4235 needs to be done. Handling this in the normal way is safe because no
4236 computation is done before the call. */
4237 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
4238 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4239 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4240 && GET_CODE (DECL_RTL (to)) == REG))
4242 rtx value;
4244 push_temp_slots ();
4245 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4246 if (to_rtx == 0)
4247 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4249 /* Handle calls that return values in multiple non-contiguous locations.
4250 The Irix 6 ABI has examples of this. */
4251 if (GET_CODE (to_rtx) == PARALLEL)
4252 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
4253 else if (GET_MODE (to_rtx) == BLKmode)
4254 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4255 else
4257 #ifdef POINTERS_EXTEND_UNSIGNED
4258 if (POINTER_TYPE_P (TREE_TYPE (to))
4259 && GET_MODE (to_rtx) != GET_MODE (value))
4260 value = convert_memory_address (GET_MODE (to_rtx), value);
4261 #endif
4262 emit_move_insn (to_rtx, value);
4264 preserve_temp_slots (to_rtx);
4265 free_temp_slots ();
4266 pop_temp_slots ();
4267 return want_value ? to_rtx : NULL_RTX;
4270 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4271 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4273 if (to_rtx == 0)
4274 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4276 /* Don't move directly into a return register. */
4277 if (TREE_CODE (to) == RESULT_DECL
4278 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
4280 rtx temp;
4282 push_temp_slots ();
4283 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4285 if (GET_CODE (to_rtx) == PARALLEL)
4286 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
4287 else
4288 emit_move_insn (to_rtx, temp);
4290 preserve_temp_slots (to_rtx);
4291 free_temp_slots ();
4292 pop_temp_slots ();
4293 return want_value ? to_rtx : NULL_RTX;
4296 /* In case we are returning the contents of an object which overlaps
4297 the place the value is being stored, use a safe function when copying
4298 a value through a pointer into a structure value return block. */
4299 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4300 && current_function_returns_struct
4301 && !current_function_returns_pcc_struct)
4303 rtx from_rtx, size;
4305 push_temp_slots ();
4306 size = expr_size (from);
4307 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4309 if (TARGET_MEM_FUNCTIONS)
4310 emit_library_call (memmove_libfunc, LCT_NORMAL,
4311 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4312 XEXP (from_rtx, 0), Pmode,
4313 convert_to_mode (TYPE_MODE (sizetype),
4314 size, TREE_UNSIGNED (sizetype)),
4315 TYPE_MODE (sizetype));
4316 else
4317 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4318 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4319 XEXP (to_rtx, 0), Pmode,
4320 convert_to_mode (TYPE_MODE (integer_type_node),
4321 size,
4322 TREE_UNSIGNED (integer_type_node)),
4323 TYPE_MODE (integer_type_node));
4325 preserve_temp_slots (to_rtx);
4326 free_temp_slots ();
4327 pop_temp_slots ();
4328 return want_value ? to_rtx : NULL_RTX;
4331 /* Compute FROM and store the value in the rtx we got. */
4333 push_temp_slots ();
4334 result = store_expr (from, to_rtx, want_value);
4335 preserve_temp_slots (result);
4336 free_temp_slots ();
4337 pop_temp_slots ();
4338 return want_value ? result : NULL_RTX;
4341 /* Generate code for computing expression EXP,
4342 and storing the value into TARGET.
4343 TARGET may contain a QUEUED rtx.
4345 If WANT_VALUE & 1 is nonzero, return a copy of the value
4346 not in TARGET, so that we can be sure to use the proper
4347 value in a containing expression even if TARGET has something
4348 else stored in it. If possible, we copy the value through a pseudo
4349 and return that pseudo. Or, if the value is constant, we try to
4350 return the constant. In some cases, we return a pseudo
4351 copied *from* TARGET.
4353 If the mode is BLKmode then we may return TARGET itself.
4354 It turns out that in BLKmode it doesn't cause a problem.
4355 because C has no operators that could combine two different
4356 assignments into the same BLKmode object with different values
4357 with no sequence point. Will other languages need this to
4358 be more thorough?
4360 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4361 to catch quickly any cases where the caller uses the value
4362 and fails to set WANT_VALUE.
4364 If WANT_VALUE & 2 is set, this is a store into a call param on the
4365 stack, and block moves may need to be treated specially. */
4368 store_expr (exp, target, want_value)
4369 tree exp;
4370 rtx target;
4371 int want_value;
4373 rtx temp;
4374 int dont_return_target = 0;
4375 int dont_store_target = 0;
4377 if (VOID_TYPE_P (TREE_TYPE (exp)))
4379 /* C++ can generate ?: expressions with a throw expression in one
4380 branch and an rvalue in the other. Here, we resolve attempts to
4381 store the throw expression's nonexistant result. */
4382 if (want_value)
4383 abort ();
4384 expand_expr (exp, const0_rtx, VOIDmode, 0);
4385 return NULL_RTX;
4387 if (TREE_CODE (exp) == COMPOUND_EXPR)
4389 /* Perform first part of compound expression, then assign from second
4390 part. */
4391 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4392 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4393 emit_queue ();
4394 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4396 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4398 /* For conditional expression, get safe form of the target. Then
4399 test the condition, doing the appropriate assignment on either
4400 side. This avoids the creation of unnecessary temporaries.
4401 For non-BLKmode, it is more efficient not to do this. */
4403 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4405 emit_queue ();
4406 target = protect_from_queue (target, 1);
4408 do_pending_stack_adjust ();
4409 NO_DEFER_POP;
4410 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4411 start_cleanup_deferral ();
4412 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4413 end_cleanup_deferral ();
4414 emit_queue ();
4415 emit_jump_insn (gen_jump (lab2));
4416 emit_barrier ();
4417 emit_label (lab1);
4418 start_cleanup_deferral ();
4419 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4420 end_cleanup_deferral ();
4421 emit_queue ();
4422 emit_label (lab2);
4423 OK_DEFER_POP;
4425 return want_value & 1 ? target : NULL_RTX;
4427 else if (queued_subexp_p (target))
4428 /* If target contains a postincrement, let's not risk
4429 using it as the place to generate the rhs. */
4431 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4433 /* Expand EXP into a new pseudo. */
4434 temp = gen_reg_rtx (GET_MODE (target));
4435 temp = expand_expr (exp, temp, GET_MODE (target),
4436 (want_value & 2
4437 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4439 else
4440 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4441 (want_value & 2
4442 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4444 /* If target is volatile, ANSI requires accessing the value
4445 *from* the target, if it is accessed. So make that happen.
4446 In no case return the target itself. */
4447 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4448 dont_return_target = 1;
4450 else if ((want_value & 1) != 0
4451 && GET_CODE (target) == MEM
4452 && ! MEM_VOLATILE_P (target)
4453 && GET_MODE (target) != BLKmode)
4454 /* If target is in memory and caller wants value in a register instead,
4455 arrange that. Pass TARGET as target for expand_expr so that,
4456 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4457 We know expand_expr will not use the target in that case.
4458 Don't do this if TARGET is volatile because we are supposed
4459 to write it and then read it. */
4461 temp = expand_expr (exp, target, GET_MODE (target),
4462 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4463 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4465 /* If TEMP is already in the desired TARGET, only copy it from
4466 memory and don't store it there again. */
4467 if (temp == target
4468 || (rtx_equal_p (temp, target)
4469 && ! side_effects_p (temp) && ! side_effects_p (target)))
4470 dont_store_target = 1;
4471 temp = copy_to_reg (temp);
4473 dont_return_target = 1;
4475 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4476 /* If this is a scalar in a register that is stored in a wider mode
4477 than the declared mode, compute the result into its declared mode
4478 and then convert to the wider mode. Our value is the computed
4479 expression. */
4481 rtx inner_target = 0;
4483 /* If we don't want a value, we can do the conversion inside EXP,
4484 which will often result in some optimizations. Do the conversion
4485 in two steps: first change the signedness, if needed, then
4486 the extend. But don't do this if the type of EXP is a subtype
4487 of something else since then the conversion might involve
4488 more than just converting modes. */
4489 if ((want_value & 1) == 0
4490 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4491 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4493 if (TREE_UNSIGNED (TREE_TYPE (exp))
4494 != SUBREG_PROMOTED_UNSIGNED_P (target))
4495 exp = convert
4496 ((*lang_hooks.types.signed_or_unsigned_type)
4497 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4499 exp = convert ((*lang_hooks.types.type_for_mode)
4500 (GET_MODE (SUBREG_REG (target)),
4501 SUBREG_PROMOTED_UNSIGNED_P (target)),
4502 exp);
4504 inner_target = SUBREG_REG (target);
4507 temp = expand_expr (exp, inner_target, VOIDmode,
4508 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4510 /* If TEMP is a MEM and we want a result value, make the access
4511 now so it gets done only once. Strictly speaking, this is
4512 only necessary if the MEM is volatile, or if the address
4513 overlaps TARGET. But not performing the load twice also
4514 reduces the amount of rtl we generate and then have to CSE. */
4515 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4516 temp = copy_to_reg (temp);
4518 /* If TEMP is a VOIDmode constant, use convert_modes to make
4519 sure that we properly convert it. */
4520 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4522 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4523 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4524 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4525 GET_MODE (target), temp,
4526 SUBREG_PROMOTED_UNSIGNED_P (target));
4529 convert_move (SUBREG_REG (target), temp,
4530 SUBREG_PROMOTED_UNSIGNED_P (target));
4532 /* If we promoted a constant, change the mode back down to match
4533 target. Otherwise, the caller might get confused by a result whose
4534 mode is larger than expected. */
4536 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4538 if (GET_MODE (temp) != VOIDmode)
4540 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4541 SUBREG_PROMOTED_VAR_P (temp) = 1;
4542 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4543 SUBREG_PROMOTED_UNSIGNED_P (target));
4545 else
4546 temp = convert_modes (GET_MODE (target),
4547 GET_MODE (SUBREG_REG (target)),
4548 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4551 return want_value & 1 ? temp : NULL_RTX;
4553 else
4555 temp = expand_expr (exp, target, GET_MODE (target),
4556 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4557 /* Return TARGET if it's a specified hardware register.
4558 If TARGET is a volatile mem ref, either return TARGET
4559 or return a reg copied *from* TARGET; ANSI requires this.
4561 Otherwise, if TEMP is not TARGET, return TEMP
4562 if it is constant (for efficiency),
4563 or if we really want the correct value. */
4564 if (!(target && GET_CODE (target) == REG
4565 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4566 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4567 && ! rtx_equal_p (temp, target)
4568 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4569 dont_return_target = 1;
4572 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4573 the same as that of TARGET, adjust the constant. This is needed, for
4574 example, in case it is a CONST_DOUBLE and we want only a word-sized
4575 value. */
4576 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4577 && TREE_CODE (exp) != ERROR_MARK
4578 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4579 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4580 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4582 /* If value was not generated in the target, store it there.
4583 Convert the value to TARGET's type first if necessary.
4584 If TEMP and TARGET compare equal according to rtx_equal_p, but
4585 one or both of them are volatile memory refs, we have to distinguish
4586 two cases:
4587 - expand_expr has used TARGET. In this case, we must not generate
4588 another copy. This can be detected by TARGET being equal according
4589 to == .
4590 - expand_expr has not used TARGET - that means that the source just
4591 happens to have the same RTX form. Since temp will have been created
4592 by expand_expr, it will compare unequal according to == .
4593 We must generate a copy in this case, to reach the correct number
4594 of volatile memory references. */
4596 if ((! rtx_equal_p (temp, target)
4597 || (temp != target && (side_effects_p (temp)
4598 || side_effects_p (target))))
4599 && TREE_CODE (exp) != ERROR_MARK
4600 && ! dont_store_target
4601 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4602 but TARGET is not valid memory reference, TEMP will differ
4603 from TARGET although it is really the same location. */
4604 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4605 || target != DECL_RTL_IF_SET (exp))
4606 /* If there's nothing to copy, don't bother. Don't call expr_size
4607 unless necessary, because some front-ends (C++) expr_size-hook
4608 aborts on objects that are not supposed to be bit-copied or
4609 bit-initialized. */
4610 && expr_size (exp) != const0_rtx)
4612 target = protect_from_queue (target, 1);
4613 if (GET_MODE (temp) != GET_MODE (target)
4614 && GET_MODE (temp) != VOIDmode)
4616 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4617 if (dont_return_target)
4619 /* In this case, we will return TEMP,
4620 so make sure it has the proper mode.
4621 But don't forget to store the value into TARGET. */
4622 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4623 emit_move_insn (target, temp);
4625 else
4626 convert_move (target, temp, unsignedp);
4629 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4631 /* Handle copying a string constant into an array. The string
4632 constant may be shorter than the array. So copy just the string's
4633 actual length, and clear the rest. First get the size of the data
4634 type of the string, which is actually the size of the target. */
4635 rtx size = expr_size (exp);
4637 if (GET_CODE (size) == CONST_INT
4638 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4639 emit_block_move (target, temp, size,
4640 (want_value & 2
4641 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4642 else
4644 /* Compute the size of the data to copy from the string. */
4645 tree copy_size
4646 = size_binop (MIN_EXPR,
4647 make_tree (sizetype, size),
4648 size_int (TREE_STRING_LENGTH (exp)));
4649 rtx copy_size_rtx
4650 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4651 (want_value & 2
4652 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4653 rtx label = 0;
4655 /* Copy that much. */
4656 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4657 TREE_UNSIGNED (sizetype));
4658 emit_block_move (target, temp, copy_size_rtx,
4659 (want_value & 2
4660 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4662 /* Figure out how much is left in TARGET that we have to clear.
4663 Do all calculations in ptr_mode. */
4664 if (GET_CODE (copy_size_rtx) == CONST_INT)
4666 size = plus_constant (size, -INTVAL (copy_size_rtx));
4667 target = adjust_address (target, BLKmode,
4668 INTVAL (copy_size_rtx));
4670 else
4672 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4673 copy_size_rtx, NULL_RTX, 0,
4674 OPTAB_LIB_WIDEN);
4676 #ifdef POINTERS_EXTEND_UNSIGNED
4677 if (GET_MODE (copy_size_rtx) != Pmode)
4678 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4679 TREE_UNSIGNED (sizetype));
4680 #endif
4682 target = offset_address (target, copy_size_rtx,
4683 highest_pow2_factor (copy_size));
4684 label = gen_label_rtx ();
4685 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4686 GET_MODE (size), 0, label);
4689 if (size != const0_rtx)
4690 clear_storage (target, size);
4692 if (label)
4693 emit_label (label);
4696 /* Handle calls that return values in multiple non-contiguous locations.
4697 The Irix 6 ABI has examples of this. */
4698 else if (GET_CODE (target) == PARALLEL)
4699 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4700 else if (GET_MODE (temp) == BLKmode)
4701 emit_block_move (target, temp, expr_size (exp),
4702 (want_value & 2
4703 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4704 else
4705 emit_move_insn (target, temp);
4708 /* If we don't want a value, return NULL_RTX. */
4709 if ((want_value & 1) == 0)
4710 return NULL_RTX;
4712 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4713 ??? The latter test doesn't seem to make sense. */
4714 else if (dont_return_target && GET_CODE (temp) != MEM)
4715 return temp;
4717 /* Return TARGET itself if it is a hard register. */
4718 else if ((want_value & 1) != 0
4719 && GET_MODE (target) != BLKmode
4720 && ! (GET_CODE (target) == REG
4721 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4722 return copy_to_reg (target);
4724 else
4725 return target;
4728 /* Return 1 if EXP just contains zeros. */
4730 static int
4731 is_zeros_p (exp)
4732 tree exp;
4734 tree elt;
4736 switch (TREE_CODE (exp))
4738 case CONVERT_EXPR:
4739 case NOP_EXPR:
4740 case NON_LVALUE_EXPR:
4741 case VIEW_CONVERT_EXPR:
4742 return is_zeros_p (TREE_OPERAND (exp, 0));
4744 case INTEGER_CST:
4745 return integer_zerop (exp);
4747 case COMPLEX_CST:
4748 return
4749 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4751 case REAL_CST:
4752 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4754 case VECTOR_CST:
4755 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4756 elt = TREE_CHAIN (elt))
4757 if (!is_zeros_p (TREE_VALUE (elt)))
4758 return 0;
4760 return 1;
4762 case CONSTRUCTOR:
4763 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4764 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4765 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4766 if (! is_zeros_p (TREE_VALUE (elt)))
4767 return 0;
4769 return 1;
4771 default:
4772 return 0;
4776 /* Return 1 if EXP contains mostly (3/4) zeros. */
4778 static int
4779 mostly_zeros_p (exp)
4780 tree exp;
4782 if (TREE_CODE (exp) == CONSTRUCTOR)
4784 int elts = 0, zeros = 0;
4785 tree elt = CONSTRUCTOR_ELTS (exp);
4786 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4788 /* If there are no ranges of true bits, it is all zero. */
4789 return elt == NULL_TREE;
4791 for (; elt; elt = TREE_CHAIN (elt))
4793 /* We do not handle the case where the index is a RANGE_EXPR,
4794 so the statistic will be somewhat inaccurate.
4795 We do make a more accurate count in store_constructor itself,
4796 so since this function is only used for nested array elements,
4797 this should be close enough. */
4798 if (mostly_zeros_p (TREE_VALUE (elt)))
4799 zeros++;
4800 elts++;
4803 return 4 * zeros >= 3 * elts;
4806 return is_zeros_p (exp);
4809 /* Helper function for store_constructor.
4810 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4811 TYPE is the type of the CONSTRUCTOR, not the element type.
4812 CLEARED is as for store_constructor.
4813 ALIAS_SET is the alias set to use for any stores.
4815 This provides a recursive shortcut back to store_constructor when it isn't
4816 necessary to go through store_field. This is so that we can pass through
4817 the cleared field to let store_constructor know that we may not have to
4818 clear a substructure if the outer structure has already been cleared. */
4820 static void
4821 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4822 alias_set)
4823 rtx target;
4824 unsigned HOST_WIDE_INT bitsize;
4825 HOST_WIDE_INT bitpos;
4826 enum machine_mode mode;
4827 tree exp, type;
4828 int cleared;
4829 int alias_set;
4831 if (TREE_CODE (exp) == CONSTRUCTOR
4832 && bitpos % BITS_PER_UNIT == 0
4833 /* If we have a nonzero bitpos for a register target, then we just
4834 let store_field do the bitfield handling. This is unlikely to
4835 generate unnecessary clear instructions anyways. */
4836 && (bitpos == 0 || GET_CODE (target) == MEM))
4838 if (GET_CODE (target) == MEM)
4839 target
4840 = adjust_address (target,
4841 GET_MODE (target) == BLKmode
4842 || 0 != (bitpos
4843 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4844 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4847 /* Update the alias set, if required. */
4848 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4849 && MEM_ALIAS_SET (target) != 0)
4851 target = copy_rtx (target);
4852 set_mem_alias_set (target, alias_set);
4855 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4857 else
4858 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4859 alias_set);
4862 /* Store the value of constructor EXP into the rtx TARGET.
4863 TARGET is either a REG or a MEM; we know it cannot conflict, since
4864 safe_from_p has been called.
4865 CLEARED is true if TARGET is known to have been zero'd.
4866 SIZE is the number of bytes of TARGET we are allowed to modify: this
4867 may not be the same as the size of EXP if we are assigning to a field
4868 which has been packed to exclude padding bits. */
4870 static void
4871 store_constructor (exp, target, cleared, size)
4872 tree exp;
4873 rtx target;
4874 int cleared;
4875 HOST_WIDE_INT size;
4877 tree type = TREE_TYPE (exp);
4878 #ifdef WORD_REGISTER_OPERATIONS
4879 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4880 #endif
4882 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4883 || TREE_CODE (type) == QUAL_UNION_TYPE)
4885 tree elt;
4887 /* We either clear the aggregate or indicate the value is dead. */
4888 if ((TREE_CODE (type) == UNION_TYPE
4889 || TREE_CODE (type) == QUAL_UNION_TYPE)
4890 && ! cleared
4891 && ! CONSTRUCTOR_ELTS (exp))
4892 /* If the constructor is empty, clear the union. */
4894 clear_storage (target, expr_size (exp));
4895 cleared = 1;
4898 /* If we are building a static constructor into a register,
4899 set the initial value as zero so we can fold the value into
4900 a constant. But if more than one register is involved,
4901 this probably loses. */
4902 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4903 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4905 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4906 cleared = 1;
4909 /* If the constructor has fewer fields than the structure
4910 or if we are initializing the structure to mostly zeros,
4911 clear the whole structure first. Don't do this if TARGET is a
4912 register whose mode size isn't equal to SIZE since clear_storage
4913 can't handle this case. */
4914 else if (! cleared && size > 0
4915 && ((list_length (CONSTRUCTOR_ELTS (exp))
4916 != fields_length (type))
4917 || mostly_zeros_p (exp))
4918 && (GET_CODE (target) != REG
4919 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4920 == size)))
4922 rtx xtarget = target;
4924 if (readonly_fields_p (type))
4926 xtarget = copy_rtx (xtarget);
4927 RTX_UNCHANGING_P (xtarget) = 1;
4930 clear_storage (xtarget, GEN_INT (size));
4931 cleared = 1;
4934 if (! cleared)
4935 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4937 /* Store each element of the constructor into
4938 the corresponding field of TARGET. */
4940 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4942 tree field = TREE_PURPOSE (elt);
4943 tree value = TREE_VALUE (elt);
4944 enum machine_mode mode;
4945 HOST_WIDE_INT bitsize;
4946 HOST_WIDE_INT bitpos = 0;
4947 tree offset;
4948 rtx to_rtx = target;
4950 /* Just ignore missing fields.
4951 We cleared the whole structure, above,
4952 if any fields are missing. */
4953 if (field == 0)
4954 continue;
4956 if (cleared && is_zeros_p (value))
4957 continue;
4959 if (host_integerp (DECL_SIZE (field), 1))
4960 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4961 else
4962 bitsize = -1;
4964 mode = DECL_MODE (field);
4965 if (DECL_BIT_FIELD (field))
4966 mode = VOIDmode;
4968 offset = DECL_FIELD_OFFSET (field);
4969 if (host_integerp (offset, 0)
4970 && host_integerp (bit_position (field), 0))
4972 bitpos = int_bit_position (field);
4973 offset = 0;
4975 else
4976 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4978 if (offset)
4980 rtx offset_rtx;
4982 if (contains_placeholder_p (offset))
4983 offset = build (WITH_RECORD_EXPR, sizetype,
4984 offset, make_tree (TREE_TYPE (exp), target));
4986 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4987 if (GET_CODE (to_rtx) != MEM)
4988 abort ();
4990 #ifdef POINTERS_EXTEND_UNSIGNED
4991 if (GET_MODE (offset_rtx) != Pmode)
4992 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4993 #else
4994 if (GET_MODE (offset_rtx) != ptr_mode)
4995 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4996 #endif
4998 to_rtx = offset_address (to_rtx, offset_rtx,
4999 highest_pow2_factor (offset));
5002 if (TREE_READONLY (field))
5004 if (GET_CODE (to_rtx) == MEM)
5005 to_rtx = copy_rtx (to_rtx);
5007 RTX_UNCHANGING_P (to_rtx) = 1;
5010 #ifdef WORD_REGISTER_OPERATIONS
5011 /* If this initializes a field that is smaller than a word, at the
5012 start of a word, try to widen it to a full word.
5013 This special case allows us to output C++ member function
5014 initializations in a form that the optimizers can understand. */
5015 if (GET_CODE (target) == REG
5016 && bitsize < BITS_PER_WORD
5017 && bitpos % BITS_PER_WORD == 0
5018 && GET_MODE_CLASS (mode) == MODE_INT
5019 && TREE_CODE (value) == INTEGER_CST
5020 && exp_size >= 0
5021 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5023 tree type = TREE_TYPE (value);
5025 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5027 type = (*lang_hooks.types.type_for_size)
5028 (BITS_PER_WORD, TREE_UNSIGNED (type));
5029 value = convert (type, value);
5032 if (BYTES_BIG_ENDIAN)
5033 value
5034 = fold (build (LSHIFT_EXPR, type, value,
5035 build_int_2 (BITS_PER_WORD - bitsize, 0)));
5036 bitsize = BITS_PER_WORD;
5037 mode = word_mode;
5039 #endif
5041 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5042 && DECL_NONADDRESSABLE_P (field))
5044 to_rtx = copy_rtx (to_rtx);
5045 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5048 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5049 value, type, cleared,
5050 get_alias_set (TREE_TYPE (field)));
5053 else if (TREE_CODE (type) == ARRAY_TYPE
5054 || TREE_CODE (type) == VECTOR_TYPE)
5056 tree elt;
5057 int i;
5058 int need_to_clear;
5059 tree domain = TYPE_DOMAIN (type);
5060 tree elttype = TREE_TYPE (type);
5061 int const_bounds_p;
5062 HOST_WIDE_INT minelt = 0;
5063 HOST_WIDE_INT maxelt = 0;
5065 /* Vectors are like arrays, but the domain is stored via an array
5066 type indirectly. */
5067 if (TREE_CODE (type) == VECTOR_TYPE)
5069 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
5070 the same field as TYPE_DOMAIN, we are not guaranteed that
5071 it always will. */
5072 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
5073 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
5076 const_bounds_p = (TYPE_MIN_VALUE (domain)
5077 && TYPE_MAX_VALUE (domain)
5078 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5079 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5081 /* If we have constant bounds for the range of the type, get them. */
5082 if (const_bounds_p)
5084 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5085 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5088 /* If the constructor has fewer elements than the array,
5089 clear the whole array first. Similarly if this is
5090 static constructor of a non-BLKmode object. */
5091 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
5092 need_to_clear = 1;
5093 else
5095 HOST_WIDE_INT count = 0, zero_count = 0;
5096 need_to_clear = ! const_bounds_p;
5098 /* This loop is a more accurate version of the loop in
5099 mostly_zeros_p (it handles RANGE_EXPR in an index).
5100 It is also needed to check for missing elements. */
5101 for (elt = CONSTRUCTOR_ELTS (exp);
5102 elt != NULL_TREE && ! need_to_clear;
5103 elt = TREE_CHAIN (elt))
5105 tree index = TREE_PURPOSE (elt);
5106 HOST_WIDE_INT this_node_count;
5108 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5110 tree lo_index = TREE_OPERAND (index, 0);
5111 tree hi_index = TREE_OPERAND (index, 1);
5113 if (! host_integerp (lo_index, 1)
5114 || ! host_integerp (hi_index, 1))
5116 need_to_clear = 1;
5117 break;
5120 this_node_count = (tree_low_cst (hi_index, 1)
5121 - tree_low_cst (lo_index, 1) + 1);
5123 else
5124 this_node_count = 1;
5126 count += this_node_count;
5127 if (mostly_zeros_p (TREE_VALUE (elt)))
5128 zero_count += this_node_count;
5131 /* Clear the entire array first if there are any missing elements,
5132 or if the incidence of zero elements is >= 75%. */
5133 if (! need_to_clear
5134 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
5135 need_to_clear = 1;
5138 if (need_to_clear && size > 0)
5140 if (! cleared)
5142 if (REG_P (target))
5143 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5144 else
5145 clear_storage (target, GEN_INT (size));
5147 cleared = 1;
5149 else if (REG_P (target))
5150 /* Inform later passes that the old value is dead. */
5151 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5153 /* Store each element of the constructor into
5154 the corresponding element of TARGET, determined
5155 by counting the elements. */
5156 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5157 elt;
5158 elt = TREE_CHAIN (elt), i++)
5160 enum machine_mode mode;
5161 HOST_WIDE_INT bitsize;
5162 HOST_WIDE_INT bitpos;
5163 int unsignedp;
5164 tree value = TREE_VALUE (elt);
5165 tree index = TREE_PURPOSE (elt);
5166 rtx xtarget = target;
5168 if (cleared && is_zeros_p (value))
5169 continue;
5171 unsignedp = TREE_UNSIGNED (elttype);
5172 mode = TYPE_MODE (elttype);
5173 if (mode == BLKmode)
5174 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5175 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5176 : -1);
5177 else
5178 bitsize = GET_MODE_BITSIZE (mode);
5180 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5182 tree lo_index = TREE_OPERAND (index, 0);
5183 tree hi_index = TREE_OPERAND (index, 1);
5184 rtx index_r, pos_rtx, loop_end;
5185 struct nesting *loop;
5186 HOST_WIDE_INT lo, hi, count;
5187 tree position;
5189 /* If the range is constant and "small", unroll the loop. */
5190 if (const_bounds_p
5191 && host_integerp (lo_index, 0)
5192 && host_integerp (hi_index, 0)
5193 && (lo = tree_low_cst (lo_index, 0),
5194 hi = tree_low_cst (hi_index, 0),
5195 count = hi - lo + 1,
5196 (GET_CODE (target) != MEM
5197 || count <= 2
5198 || (host_integerp (TYPE_SIZE (elttype), 1)
5199 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5200 <= 40 * 8)))))
5202 lo -= minelt; hi -= minelt;
5203 for (; lo <= hi; lo++)
5205 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5207 if (GET_CODE (target) == MEM
5208 && !MEM_KEEP_ALIAS_SET_P (target)
5209 && TREE_CODE (type) == ARRAY_TYPE
5210 && TYPE_NONALIASED_COMPONENT (type))
5212 target = copy_rtx (target);
5213 MEM_KEEP_ALIAS_SET_P (target) = 1;
5216 store_constructor_field
5217 (target, bitsize, bitpos, mode, value, type, cleared,
5218 get_alias_set (elttype));
5221 else
5223 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5224 loop_end = gen_label_rtx ();
5226 unsignedp = TREE_UNSIGNED (domain);
5228 index = build_decl (VAR_DECL, NULL_TREE, domain);
5230 index_r
5231 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5232 &unsignedp, 0));
5233 SET_DECL_RTL (index, index_r);
5234 if (TREE_CODE (value) == SAVE_EXPR
5235 && SAVE_EXPR_RTL (value) == 0)
5237 /* Make sure value gets expanded once before the
5238 loop. */
5239 expand_expr (value, const0_rtx, VOIDmode, 0);
5240 emit_queue ();
5242 store_expr (lo_index, index_r, 0);
5243 loop = expand_start_loop (0);
5245 /* Assign value to element index. */
5246 position
5247 = convert (ssizetype,
5248 fold (build (MINUS_EXPR, TREE_TYPE (index),
5249 index, TYPE_MIN_VALUE (domain))));
5250 position = size_binop (MULT_EXPR, position,
5251 convert (ssizetype,
5252 TYPE_SIZE_UNIT (elttype)));
5254 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5255 xtarget = offset_address (target, pos_rtx,
5256 highest_pow2_factor (position));
5257 xtarget = adjust_address (xtarget, mode, 0);
5258 if (TREE_CODE (value) == CONSTRUCTOR)
5259 store_constructor (value, xtarget, cleared,
5260 bitsize / BITS_PER_UNIT);
5261 else
5262 store_expr (value, xtarget, 0);
5264 expand_exit_loop_if_false (loop,
5265 build (LT_EXPR, integer_type_node,
5266 index, hi_index));
5268 expand_increment (build (PREINCREMENT_EXPR,
5269 TREE_TYPE (index),
5270 index, integer_one_node), 0, 0);
5271 expand_end_loop ();
5272 emit_label (loop_end);
5275 else if ((index != 0 && ! host_integerp (index, 0))
5276 || ! host_integerp (TYPE_SIZE (elttype), 1))
5278 tree position;
5280 if (index == 0)
5281 index = ssize_int (1);
5283 if (minelt)
5284 index = convert (ssizetype,
5285 fold (build (MINUS_EXPR, index,
5286 TYPE_MIN_VALUE (domain))));
5288 position = size_binop (MULT_EXPR, index,
5289 convert (ssizetype,
5290 TYPE_SIZE_UNIT (elttype)));
5291 xtarget = offset_address (target,
5292 expand_expr (position, 0, VOIDmode, 0),
5293 highest_pow2_factor (position));
5294 xtarget = adjust_address (xtarget, mode, 0);
5295 store_expr (value, xtarget, 0);
5297 else
5299 if (index != 0)
5300 bitpos = ((tree_low_cst (index, 0) - minelt)
5301 * tree_low_cst (TYPE_SIZE (elttype), 1));
5302 else
5303 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5305 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5306 && TREE_CODE (type) == ARRAY_TYPE
5307 && TYPE_NONALIASED_COMPONENT (type))
5309 target = copy_rtx (target);
5310 MEM_KEEP_ALIAS_SET_P (target) = 1;
5313 store_constructor_field (target, bitsize, bitpos, mode, value,
5314 type, cleared, get_alias_set (elttype));
5320 /* Set constructor assignments. */
5321 else if (TREE_CODE (type) == SET_TYPE)
5323 tree elt = CONSTRUCTOR_ELTS (exp);
5324 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5325 tree domain = TYPE_DOMAIN (type);
5326 tree domain_min, domain_max, bitlength;
5328 /* The default implementation strategy is to extract the constant
5329 parts of the constructor, use that to initialize the target,
5330 and then "or" in whatever non-constant ranges we need in addition.
5332 If a large set is all zero or all ones, it is
5333 probably better to set it using memset (if available) or bzero.
5334 Also, if a large set has just a single range, it may also be
5335 better to first clear all the first clear the set (using
5336 bzero/memset), and set the bits we want. */
5338 /* Check for all zeros. */
5339 if (elt == NULL_TREE && size > 0)
5341 if (!cleared)
5342 clear_storage (target, GEN_INT (size));
5343 return;
5346 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5347 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5348 bitlength = size_binop (PLUS_EXPR,
5349 size_diffop (domain_max, domain_min),
5350 ssize_int (1));
5352 nbits = tree_low_cst (bitlength, 1);
5354 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5355 are "complicated" (more than one range), initialize (the
5356 constant parts) by copying from a constant. */
5357 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5358 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5360 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5361 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5362 char *bit_buffer = (char *) alloca (nbits);
5363 HOST_WIDE_INT word = 0;
5364 unsigned int bit_pos = 0;
5365 unsigned int ibit = 0;
5366 unsigned int offset = 0; /* In bytes from beginning of set. */
5368 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5369 for (;;)
5371 if (bit_buffer[ibit])
5373 if (BYTES_BIG_ENDIAN)
5374 word |= (1 << (set_word_size - 1 - bit_pos));
5375 else
5376 word |= 1 << bit_pos;
5379 bit_pos++; ibit++;
5380 if (bit_pos >= set_word_size || ibit == nbits)
5382 if (word != 0 || ! cleared)
5384 rtx datum = GEN_INT (word);
5385 rtx to_rtx;
5387 /* The assumption here is that it is safe to use
5388 XEXP if the set is multi-word, but not if
5389 it's single-word. */
5390 if (GET_CODE (target) == MEM)
5391 to_rtx = adjust_address (target, mode, offset);
5392 else if (offset == 0)
5393 to_rtx = target;
5394 else
5395 abort ();
5396 emit_move_insn (to_rtx, datum);
5399 if (ibit == nbits)
5400 break;
5401 word = 0;
5402 bit_pos = 0;
5403 offset += set_word_size / BITS_PER_UNIT;
5407 else if (!cleared)
5408 /* Don't bother clearing storage if the set is all ones. */
5409 if (TREE_CHAIN (elt) != NULL_TREE
5410 || (TREE_PURPOSE (elt) == NULL_TREE
5411 ? nbits != 1
5412 : ( ! host_integerp (TREE_VALUE (elt), 0)
5413 || ! host_integerp (TREE_PURPOSE (elt), 0)
5414 || (tree_low_cst (TREE_VALUE (elt), 0)
5415 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5416 != (HOST_WIDE_INT) nbits))))
5417 clear_storage (target, expr_size (exp));
5419 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5421 /* Start of range of element or NULL. */
5422 tree startbit = TREE_PURPOSE (elt);
5423 /* End of range of element, or element value. */
5424 tree endbit = TREE_VALUE (elt);
5425 HOST_WIDE_INT startb, endb;
5426 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5428 bitlength_rtx = expand_expr (bitlength,
5429 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5431 /* Handle non-range tuple element like [ expr ]. */
5432 if (startbit == NULL_TREE)
5434 startbit = save_expr (endbit);
5435 endbit = startbit;
5438 startbit = convert (sizetype, startbit);
5439 endbit = convert (sizetype, endbit);
5440 if (! integer_zerop (domain_min))
5442 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5443 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5445 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5446 EXPAND_CONST_ADDRESS);
5447 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5448 EXPAND_CONST_ADDRESS);
5450 if (REG_P (target))
5452 targetx
5453 = assign_temp
5454 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5455 (GET_MODE (target), 0),
5456 TYPE_QUAL_CONST)),
5457 0, 1, 1);
5458 emit_move_insn (targetx, target);
5461 else if (GET_CODE (target) == MEM)
5462 targetx = target;
5463 else
5464 abort ();
5466 /* Optimization: If startbit and endbit are constants divisible
5467 by BITS_PER_UNIT, call memset instead. */
5468 if (TARGET_MEM_FUNCTIONS
5469 && TREE_CODE (startbit) == INTEGER_CST
5470 && TREE_CODE (endbit) == INTEGER_CST
5471 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5472 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5474 emit_library_call (memset_libfunc, LCT_NORMAL,
5475 VOIDmode, 3,
5476 plus_constant (XEXP (targetx, 0),
5477 startb / BITS_PER_UNIT),
5478 Pmode,
5479 constm1_rtx, TYPE_MODE (integer_type_node),
5480 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5481 TYPE_MODE (sizetype));
5483 else
5484 emit_library_call (setbits_libfunc, LCT_NORMAL,
5485 VOIDmode, 4, XEXP (targetx, 0),
5486 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5487 startbit_rtx, TYPE_MODE (sizetype),
5488 endbit_rtx, TYPE_MODE (sizetype));
5490 if (REG_P (target))
5491 emit_move_insn (target, targetx);
5495 else
5496 abort ();
5499 /* Store the value of EXP (an expression tree)
5500 into a subfield of TARGET which has mode MODE and occupies
5501 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5502 If MODE is VOIDmode, it means that we are storing into a bit-field.
5504 If VALUE_MODE is VOIDmode, return nothing in particular.
5505 UNSIGNEDP is not used in this case.
5507 Otherwise, return an rtx for the value stored. This rtx
5508 has mode VALUE_MODE if that is convenient to do.
5509 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5511 TYPE is the type of the underlying object,
5513 ALIAS_SET is the alias set for the destination. This value will
5514 (in general) be different from that for TARGET, since TARGET is a
5515 reference to the containing structure. */
5517 static rtx
5518 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5519 alias_set)
5520 rtx target;
5521 HOST_WIDE_INT bitsize;
5522 HOST_WIDE_INT bitpos;
5523 enum machine_mode mode;
5524 tree exp;
5525 enum machine_mode value_mode;
5526 int unsignedp;
5527 tree type;
5528 int alias_set;
5530 HOST_WIDE_INT width_mask = 0;
5532 if (TREE_CODE (exp) == ERROR_MARK)
5533 return const0_rtx;
5535 /* If we have nothing to store, do nothing unless the expression has
5536 side-effects. */
5537 if (bitsize == 0)
5538 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5539 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5540 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5542 /* If we are storing into an unaligned field of an aligned union that is
5543 in a register, we may have the mode of TARGET being an integer mode but
5544 MODE == BLKmode. In that case, get an aligned object whose size and
5545 alignment are the same as TARGET and store TARGET into it (we can avoid
5546 the store if the field being stored is the entire width of TARGET). Then
5547 call ourselves recursively to store the field into a BLKmode version of
5548 that object. Finally, load from the object into TARGET. This is not
5549 very efficient in general, but should only be slightly more expensive
5550 than the otherwise-required unaligned accesses. Perhaps this can be
5551 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5552 twice, once with emit_move_insn and once via store_field. */
5554 if (mode == BLKmode
5555 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5557 rtx object = assign_temp (type, 0, 1, 1);
5558 rtx blk_object = adjust_address (object, BLKmode, 0);
5560 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5561 emit_move_insn (object, target);
5563 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5564 alias_set);
5566 emit_move_insn (target, object);
5568 /* We want to return the BLKmode version of the data. */
5569 return blk_object;
5572 if (GET_CODE (target) == CONCAT)
5574 /* We're storing into a struct containing a single __complex. */
5576 if (bitpos != 0)
5577 abort ();
5578 return store_expr (exp, target, 0);
5581 /* If the structure is in a register or if the component
5582 is a bit field, we cannot use addressing to access it.
5583 Use bit-field techniques or SUBREG to store in it. */
5585 if (mode == VOIDmode
5586 || (mode != BLKmode && ! direct_store[(int) mode]
5587 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5588 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5589 || GET_CODE (target) == REG
5590 || GET_CODE (target) == SUBREG
5591 /* If the field isn't aligned enough to store as an ordinary memref,
5592 store it as a bit field. */
5593 || (mode != BLKmode
5594 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5595 || bitpos % GET_MODE_ALIGNMENT (mode))
5596 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5597 || (bitpos % BITS_PER_UNIT != 0)))
5598 /* If the RHS and field are a constant size and the size of the
5599 RHS isn't the same size as the bitfield, we must use bitfield
5600 operations. */
5601 || (bitsize >= 0
5602 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5603 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5605 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5607 /* If BITSIZE is narrower than the size of the type of EXP
5608 we will be narrowing TEMP. Normally, what's wanted are the
5609 low-order bits. However, if EXP's type is a record and this is
5610 big-endian machine, we want the upper BITSIZE bits. */
5611 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5612 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5613 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5614 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5615 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5616 - bitsize),
5617 NULL_RTX, 1);
5619 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5620 MODE. */
5621 if (mode != VOIDmode && mode != BLKmode
5622 && mode != TYPE_MODE (TREE_TYPE (exp)))
5623 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5625 /* If the modes of TARGET and TEMP are both BLKmode, both
5626 must be in memory and BITPOS must be aligned on a byte
5627 boundary. If so, we simply do a block copy. */
5628 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5630 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5631 || bitpos % BITS_PER_UNIT != 0)
5632 abort ();
5634 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5635 emit_block_move (target, temp,
5636 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5637 / BITS_PER_UNIT),
5638 BLOCK_OP_NORMAL);
5640 return value_mode == VOIDmode ? const0_rtx : target;
5643 /* Store the value in the bitfield. */
5644 store_bit_field (target, bitsize, bitpos, mode, temp,
5645 int_size_in_bytes (type));
5647 if (value_mode != VOIDmode)
5649 /* The caller wants an rtx for the value.
5650 If possible, avoid refetching from the bitfield itself. */
5651 if (width_mask != 0
5652 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5654 tree count;
5655 enum machine_mode tmode;
5657 tmode = GET_MODE (temp);
5658 if (tmode == VOIDmode)
5659 tmode = value_mode;
5661 if (unsignedp)
5662 return expand_and (tmode, temp,
5663 gen_int_mode (width_mask, tmode),
5664 NULL_RTX);
5666 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5667 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5668 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5671 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5672 NULL_RTX, value_mode, VOIDmode,
5673 int_size_in_bytes (type));
5675 return const0_rtx;
5677 else
5679 rtx addr = XEXP (target, 0);
5680 rtx to_rtx = target;
5682 /* If a value is wanted, it must be the lhs;
5683 so make the address stable for multiple use. */
5685 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5686 && ! CONSTANT_ADDRESS_P (addr)
5687 /* A frame-pointer reference is already stable. */
5688 && ! (GET_CODE (addr) == PLUS
5689 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5690 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5691 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5692 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5694 /* Now build a reference to just the desired component. */
5696 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5698 if (to_rtx == target)
5699 to_rtx = copy_rtx (to_rtx);
5701 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5702 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5703 set_mem_alias_set (to_rtx, alias_set);
5705 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5709 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5710 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5711 codes and find the ultimate containing object, which we return.
5713 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5714 bit position, and *PUNSIGNEDP to the signedness of the field.
5715 If the position of the field is variable, we store a tree
5716 giving the variable offset (in units) in *POFFSET.
5717 This offset is in addition to the bit position.
5718 If the position is not variable, we store 0 in *POFFSET.
5720 If any of the extraction expressions is volatile,
5721 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5723 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5724 is a mode that can be used to access the field. In that case, *PBITSIZE
5725 is redundant.
5727 If the field describes a variable-sized object, *PMODE is set to
5728 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5729 this case, but the address of the object can be found. */
5731 tree
5732 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5733 punsignedp, pvolatilep)
5734 tree exp;
5735 HOST_WIDE_INT *pbitsize;
5736 HOST_WIDE_INT *pbitpos;
5737 tree *poffset;
5738 enum machine_mode *pmode;
5739 int *punsignedp;
5740 int *pvolatilep;
5742 tree size_tree = 0;
5743 enum machine_mode mode = VOIDmode;
5744 tree offset = size_zero_node;
5745 tree bit_offset = bitsize_zero_node;
5746 tree placeholder_ptr = 0;
5747 tree tem;
5749 /* First get the mode, signedness, and size. We do this from just the
5750 outermost expression. */
5751 if (TREE_CODE (exp) == COMPONENT_REF)
5753 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5754 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5755 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5757 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5759 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5761 size_tree = TREE_OPERAND (exp, 1);
5762 *punsignedp = TREE_UNSIGNED (exp);
5764 else
5766 mode = TYPE_MODE (TREE_TYPE (exp));
5767 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5769 if (mode == BLKmode)
5770 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5771 else
5772 *pbitsize = GET_MODE_BITSIZE (mode);
5775 if (size_tree != 0)
5777 if (! host_integerp (size_tree, 1))
5778 mode = BLKmode, *pbitsize = -1;
5779 else
5780 *pbitsize = tree_low_cst (size_tree, 1);
5783 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5784 and find the ultimate containing object. */
5785 while (1)
5787 if (TREE_CODE (exp) == BIT_FIELD_REF)
5788 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5789 else if (TREE_CODE (exp) == COMPONENT_REF)
5791 tree field = TREE_OPERAND (exp, 1);
5792 tree this_offset = DECL_FIELD_OFFSET (field);
5794 /* If this field hasn't been filled in yet, don't go
5795 past it. This should only happen when folding expressions
5796 made during type construction. */
5797 if (this_offset == 0)
5798 break;
5799 else if (! TREE_CONSTANT (this_offset)
5800 && contains_placeholder_p (this_offset))
5801 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5803 offset = size_binop (PLUS_EXPR, offset, this_offset);
5804 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5805 DECL_FIELD_BIT_OFFSET (field));
5807 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5810 else if (TREE_CODE (exp) == ARRAY_REF
5811 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5813 tree index = TREE_OPERAND (exp, 1);
5814 tree array = TREE_OPERAND (exp, 0);
5815 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5816 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5817 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5819 /* We assume all arrays have sizes that are a multiple of a byte.
5820 First subtract the lower bound, if any, in the type of the
5821 index, then convert to sizetype and multiply by the size of the
5822 array element. */
5823 if (low_bound != 0 && ! integer_zerop (low_bound))
5824 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5825 index, low_bound));
5827 /* If the index has a self-referential type, pass it to a
5828 WITH_RECORD_EXPR; if the component size is, pass our
5829 component to one. */
5830 if (! TREE_CONSTANT (index)
5831 && contains_placeholder_p (index))
5832 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5833 if (! TREE_CONSTANT (unit_size)
5834 && contains_placeholder_p (unit_size))
5835 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5837 offset = size_binop (PLUS_EXPR, offset,
5838 size_binop (MULT_EXPR,
5839 convert (sizetype, index),
5840 unit_size));
5843 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5845 tree new = find_placeholder (exp, &placeholder_ptr);
5847 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5848 We might have been called from tree optimization where we
5849 haven't set up an object yet. */
5850 if (new == 0)
5851 break;
5852 else
5853 exp = new;
5855 continue;
5858 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5859 conversions that don't change the mode, and all view conversions
5860 except those that need to "step up" the alignment. */
5861 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5862 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5863 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5864 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5865 && STRICT_ALIGNMENT
5866 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5867 < BIGGEST_ALIGNMENT)
5868 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5869 || TYPE_ALIGN_OK (TREE_TYPE
5870 (TREE_OPERAND (exp, 0))))))
5871 && ! ((TREE_CODE (exp) == NOP_EXPR
5872 || TREE_CODE (exp) == CONVERT_EXPR)
5873 && (TYPE_MODE (TREE_TYPE (exp))
5874 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5875 break;
5877 /* If any reference in the chain is volatile, the effect is volatile. */
5878 if (TREE_THIS_VOLATILE (exp))
5879 *pvolatilep = 1;
5881 exp = TREE_OPERAND (exp, 0);
5884 /* If OFFSET is constant, see if we can return the whole thing as a
5885 constant bit position. Otherwise, split it up. */
5886 if (host_integerp (offset, 0)
5887 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5888 bitsize_unit_node))
5889 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5890 && host_integerp (tem, 0))
5891 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5892 else
5893 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5895 *pmode = mode;
5896 return exp;
5899 /* Return 1 if T is an expression that get_inner_reference handles. */
5902 handled_component_p (t)
5903 tree t;
5905 switch (TREE_CODE (t))
5907 case BIT_FIELD_REF:
5908 case COMPONENT_REF:
5909 case ARRAY_REF:
5910 case ARRAY_RANGE_REF:
5911 case NON_LVALUE_EXPR:
5912 case VIEW_CONVERT_EXPR:
5913 return 1;
5915 case NOP_EXPR:
5916 case CONVERT_EXPR:
5917 return (TYPE_MODE (TREE_TYPE (t))
5918 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5920 default:
5921 return 0;
5925 /* Given an rtx VALUE that may contain additions and multiplications, return
5926 an equivalent value that just refers to a register, memory, or constant.
5927 This is done by generating instructions to perform the arithmetic and
5928 returning a pseudo-register containing the value.
5930 The returned value may be a REG, SUBREG, MEM or constant. */
5933 force_operand (value, target)
5934 rtx value, target;
5936 rtx op1, op2;
5937 /* Use subtarget as the target for operand 0 of a binary operation. */
5938 rtx subtarget = get_subtarget (target);
5939 enum rtx_code code = GET_CODE (value);
5941 /* Check for a PIC address load. */
5942 if ((code == PLUS || code == MINUS)
5943 && XEXP (value, 0) == pic_offset_table_rtx
5944 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5945 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5946 || GET_CODE (XEXP (value, 1)) == CONST))
5948 if (!subtarget)
5949 subtarget = gen_reg_rtx (GET_MODE (value));
5950 emit_move_insn (subtarget, value);
5951 return subtarget;
5954 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5956 if (!target)
5957 target = gen_reg_rtx (GET_MODE (value));
5958 convert_move (target, force_operand (XEXP (value, 0), NULL),
5959 code == ZERO_EXTEND);
5960 return target;
5963 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5965 op2 = XEXP (value, 1);
5966 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5967 subtarget = 0;
5968 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5970 code = PLUS;
5971 op2 = negate_rtx (GET_MODE (value), op2);
5974 /* Check for an addition with OP2 a constant integer and our first
5975 operand a PLUS of a virtual register and something else. In that
5976 case, we want to emit the sum of the virtual register and the
5977 constant first and then add the other value. This allows virtual
5978 register instantiation to simply modify the constant rather than
5979 creating another one around this addition. */
5980 if (code == PLUS && GET_CODE (op2) == CONST_INT
5981 && GET_CODE (XEXP (value, 0)) == PLUS
5982 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5983 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5984 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5986 rtx temp = expand_simple_binop (GET_MODE (value), code,
5987 XEXP (XEXP (value, 0), 0), op2,
5988 subtarget, 0, OPTAB_LIB_WIDEN);
5989 return expand_simple_binop (GET_MODE (value), code, temp,
5990 force_operand (XEXP (XEXP (value,
5991 0), 1), 0),
5992 target, 0, OPTAB_LIB_WIDEN);
5995 op1 = force_operand (XEXP (value, 0), subtarget);
5996 op2 = force_operand (op2, NULL_RTX);
5997 switch (code)
5999 case MULT:
6000 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6001 case DIV:
6002 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6003 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6004 target, 1, OPTAB_LIB_WIDEN);
6005 else
6006 return expand_divmod (0,
6007 FLOAT_MODE_P (GET_MODE (value))
6008 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6009 GET_MODE (value), op1, op2, target, 0);
6010 break;
6011 case MOD:
6012 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6013 target, 0);
6014 break;
6015 case UDIV:
6016 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6017 target, 1);
6018 break;
6019 case UMOD:
6020 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6021 target, 1);
6022 break;
6023 case ASHIFTRT:
6024 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6025 target, 0, OPTAB_LIB_WIDEN);
6026 break;
6027 default:
6028 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6029 target, 1, OPTAB_LIB_WIDEN);
6032 if (GET_RTX_CLASS (code) == '1')
6034 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6035 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6038 #ifdef INSN_SCHEDULING
6039 /* On machines that have insn scheduling, we want all memory reference to be
6040 explicit, so we need to deal with such paradoxical SUBREGs. */
6041 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
6042 && (GET_MODE_SIZE (GET_MODE (value))
6043 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6044 value
6045 = simplify_gen_subreg (GET_MODE (value),
6046 force_reg (GET_MODE (SUBREG_REG (value)),
6047 force_operand (SUBREG_REG (value),
6048 NULL_RTX)),
6049 GET_MODE (SUBREG_REG (value)),
6050 SUBREG_BYTE (value));
6051 #endif
6053 return value;
6056 /* Subroutine of expand_expr: return nonzero iff there is no way that
6057 EXP can reference X, which is being modified. TOP_P is nonzero if this
6058 call is going to be used to determine whether we need a temporary
6059 for EXP, as opposed to a recursive call to this function.
6061 It is always safe for this routine to return zero since it merely
6062 searches for optimization opportunities. */
6065 safe_from_p (x, exp, top_p)
6066 rtx x;
6067 tree exp;
6068 int top_p;
6070 rtx exp_rtl = 0;
6071 int i, nops;
6072 static tree save_expr_list;
6074 if (x == 0
6075 /* If EXP has varying size, we MUST use a target since we currently
6076 have no way of allocating temporaries of variable size
6077 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6078 So we assume here that something at a higher level has prevented a
6079 clash. This is somewhat bogus, but the best we can do. Only
6080 do this when X is BLKmode and when we are at the top level. */
6081 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6082 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6083 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6084 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6085 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6086 != INTEGER_CST)
6087 && GET_MODE (x) == BLKmode)
6088 /* If X is in the outgoing argument area, it is always safe. */
6089 || (GET_CODE (x) == MEM
6090 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6091 || (GET_CODE (XEXP (x, 0)) == PLUS
6092 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6093 return 1;
6095 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6096 find the underlying pseudo. */
6097 if (GET_CODE (x) == SUBREG)
6099 x = SUBREG_REG (x);
6100 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6101 return 0;
6104 /* A SAVE_EXPR might appear many times in the expression passed to the
6105 top-level safe_from_p call, and if it has a complex subexpression,
6106 examining it multiple times could result in a combinatorial explosion.
6107 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
6108 with optimization took about 28 minutes to compile -- even though it was
6109 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
6110 and turn that off when we are done. We keep a list of the SAVE_EXPRs
6111 we have processed. Note that the only test of top_p was above. */
6113 if (top_p)
6115 int rtn;
6116 tree t;
6118 save_expr_list = 0;
6120 rtn = safe_from_p (x, exp, 0);
6122 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
6123 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
6125 return rtn;
6128 /* Now look at our tree code and possibly recurse. */
6129 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6131 case 'd':
6132 exp_rtl = DECL_RTL_IF_SET (exp);
6133 break;
6135 case 'c':
6136 return 1;
6138 case 'x':
6139 if (TREE_CODE (exp) == TREE_LIST)
6141 while (1)
6143 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6144 return 0;
6145 exp = TREE_CHAIN (exp);
6146 if (!exp)
6147 return 1;
6148 if (TREE_CODE (exp) != TREE_LIST)
6149 return safe_from_p (x, exp, 0);
6152 else if (TREE_CODE (exp) == ERROR_MARK)
6153 return 1; /* An already-visited SAVE_EXPR? */
6154 else
6155 return 0;
6157 case '2':
6158 case '<':
6159 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6160 return 0;
6161 /* FALLTHRU */
6163 case '1':
6164 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6166 case 'e':
6167 case 'r':
6168 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6169 the expression. If it is set, we conflict iff we are that rtx or
6170 both are in memory. Otherwise, we check all operands of the
6171 expression recursively. */
6173 switch (TREE_CODE (exp))
6175 case ADDR_EXPR:
6176 /* If the operand is static or we are static, we can't conflict.
6177 Likewise if we don't conflict with the operand at all. */
6178 if (staticp (TREE_OPERAND (exp, 0))
6179 || TREE_STATIC (exp)
6180 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6181 return 1;
6183 /* Otherwise, the only way this can conflict is if we are taking
6184 the address of a DECL a that address if part of X, which is
6185 very rare. */
6186 exp = TREE_OPERAND (exp, 0);
6187 if (DECL_P (exp))
6189 if (!DECL_RTL_SET_P (exp)
6190 || GET_CODE (DECL_RTL (exp)) != MEM)
6191 return 0;
6192 else
6193 exp_rtl = XEXP (DECL_RTL (exp), 0);
6195 break;
6197 case INDIRECT_REF:
6198 if (GET_CODE (x) == MEM
6199 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6200 get_alias_set (exp)))
6201 return 0;
6202 break;
6204 case CALL_EXPR:
6205 /* Assume that the call will clobber all hard registers and
6206 all of memory. */
6207 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6208 || GET_CODE (x) == MEM)
6209 return 0;
6210 break;
6212 case RTL_EXPR:
6213 /* If a sequence exists, we would have to scan every instruction
6214 in the sequence to see if it was safe. This is probably not
6215 worthwhile. */
6216 if (RTL_EXPR_SEQUENCE (exp))
6217 return 0;
6219 exp_rtl = RTL_EXPR_RTL (exp);
6220 break;
6222 case WITH_CLEANUP_EXPR:
6223 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6224 break;
6226 case CLEANUP_POINT_EXPR:
6227 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6229 case SAVE_EXPR:
6230 exp_rtl = SAVE_EXPR_RTL (exp);
6231 if (exp_rtl)
6232 break;
6234 /* If we've already scanned this, don't do it again. Otherwise,
6235 show we've scanned it and record for clearing the flag if we're
6236 going on. */
6237 if (TREE_PRIVATE (exp))
6238 return 1;
6240 TREE_PRIVATE (exp) = 1;
6241 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6243 TREE_PRIVATE (exp) = 0;
6244 return 0;
6247 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6248 return 1;
6250 case BIND_EXPR:
6251 /* The only operand we look at is operand 1. The rest aren't
6252 part of the expression. */
6253 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6255 case METHOD_CALL_EXPR:
6256 /* This takes an rtx argument, but shouldn't appear here. */
6257 abort ();
6259 default:
6260 break;
6263 /* If we have an rtx, we do not need to scan our operands. */
6264 if (exp_rtl)
6265 break;
6267 nops = first_rtl_op (TREE_CODE (exp));
6268 for (i = 0; i < nops; i++)
6269 if (TREE_OPERAND (exp, i) != 0
6270 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6271 return 0;
6273 /* If this is a language-specific tree code, it may require
6274 special handling. */
6275 if ((unsigned int) TREE_CODE (exp)
6276 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6277 && !(*lang_hooks.safe_from_p) (x, exp))
6278 return 0;
6281 /* If we have an rtl, find any enclosed object. Then see if we conflict
6282 with it. */
6283 if (exp_rtl)
6285 if (GET_CODE (exp_rtl) == SUBREG)
6287 exp_rtl = SUBREG_REG (exp_rtl);
6288 if (GET_CODE (exp_rtl) == REG
6289 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6290 return 0;
6293 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6294 are memory and they conflict. */
6295 return ! (rtx_equal_p (x, exp_rtl)
6296 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6297 && true_dependence (exp_rtl, VOIDmode, x,
6298 rtx_addr_varies_p)));
6301 /* If we reach here, it is safe. */
6302 return 1;
6305 /* Subroutine of expand_expr: return rtx if EXP is a
6306 variable or parameter; else return 0. */
6308 static rtx
6309 var_rtx (exp)
6310 tree exp;
6312 STRIP_NOPS (exp);
6313 switch (TREE_CODE (exp))
6315 case PARM_DECL:
6316 case VAR_DECL:
6317 return DECL_RTL (exp);
6318 default:
6319 return 0;
6323 #ifdef MAX_INTEGER_COMPUTATION_MODE
6325 void
6326 check_max_integer_computation_mode (exp)
6327 tree exp;
6329 enum tree_code code;
6330 enum machine_mode mode;
6332 /* Strip any NOPs that don't change the mode. */
6333 STRIP_NOPS (exp);
6334 code = TREE_CODE (exp);
6336 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6337 if (code == NOP_EXPR
6338 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6339 return;
6341 /* First check the type of the overall operation. We need only look at
6342 unary, binary and relational operations. */
6343 if (TREE_CODE_CLASS (code) == '1'
6344 || TREE_CODE_CLASS (code) == '2'
6345 || TREE_CODE_CLASS (code) == '<')
6347 mode = TYPE_MODE (TREE_TYPE (exp));
6348 if (GET_MODE_CLASS (mode) == MODE_INT
6349 && mode > MAX_INTEGER_COMPUTATION_MODE)
6350 internal_error ("unsupported wide integer operation");
6353 /* Check operand of a unary op. */
6354 if (TREE_CODE_CLASS (code) == '1')
6356 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6357 if (GET_MODE_CLASS (mode) == MODE_INT
6358 && mode > MAX_INTEGER_COMPUTATION_MODE)
6359 internal_error ("unsupported wide integer operation");
6362 /* Check operands of a binary/comparison op. */
6363 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6365 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6366 if (GET_MODE_CLASS (mode) == MODE_INT
6367 && mode > MAX_INTEGER_COMPUTATION_MODE)
6368 internal_error ("unsupported wide integer operation");
6370 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6371 if (GET_MODE_CLASS (mode) == MODE_INT
6372 && mode > MAX_INTEGER_COMPUTATION_MODE)
6373 internal_error ("unsupported wide integer operation");
6376 #endif
6378 /* Return the highest power of two that EXP is known to be a multiple of.
6379 This is used in updating alignment of MEMs in array references. */
6381 static unsigned HOST_WIDE_INT
6382 highest_pow2_factor (exp)
6383 tree exp;
6385 unsigned HOST_WIDE_INT c0, c1;
6387 switch (TREE_CODE (exp))
6389 case INTEGER_CST:
6390 /* We can find the lowest bit that's a one. If the low
6391 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6392 We need to handle this case since we can find it in a COND_EXPR,
6393 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6394 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6395 later ICE. */
6396 if (TREE_CONSTANT_OVERFLOW (exp))
6397 return BIGGEST_ALIGNMENT;
6398 else
6400 /* Note: tree_low_cst is intentionally not used here,
6401 we don't care about the upper bits. */
6402 c0 = TREE_INT_CST_LOW (exp);
6403 c0 &= -c0;
6404 return c0 ? c0 : BIGGEST_ALIGNMENT;
6406 break;
6408 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6409 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6410 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6411 return MIN (c0, c1);
6413 case MULT_EXPR:
6414 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6415 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6416 return c0 * c1;
6418 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6419 case CEIL_DIV_EXPR:
6420 if (integer_pow2p (TREE_OPERAND (exp, 1))
6421 && host_integerp (TREE_OPERAND (exp, 1), 1))
6423 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6424 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6425 return MAX (1, c0 / c1);
6427 break;
6429 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6430 case SAVE_EXPR: case WITH_RECORD_EXPR:
6431 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6433 case COMPOUND_EXPR:
6434 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6436 case COND_EXPR:
6437 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6438 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6439 return MIN (c0, c1);
6441 default:
6442 break;
6445 return 1;
6448 /* Similar, except that it is known that the expression must be a multiple
6449 of the alignment of TYPE. */
6451 static unsigned HOST_WIDE_INT
6452 highest_pow2_factor_for_type (type, exp)
6453 tree type;
6454 tree exp;
6456 unsigned HOST_WIDE_INT type_align, factor;
6458 factor = highest_pow2_factor (exp);
6459 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6460 return MAX (factor, type_align);
6463 /* Return an object on the placeholder list that matches EXP, a
6464 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6465 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6466 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6467 is a location which initially points to a starting location in the
6468 placeholder list (zero means start of the list) and where a pointer into
6469 the placeholder list at which the object is found is placed. */
6471 tree
6472 find_placeholder (exp, plist)
6473 tree exp;
6474 tree *plist;
6476 tree type = TREE_TYPE (exp);
6477 tree placeholder_expr;
6479 for (placeholder_expr
6480 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6481 placeholder_expr != 0;
6482 placeholder_expr = TREE_CHAIN (placeholder_expr))
6484 tree need_type = TYPE_MAIN_VARIANT (type);
6485 tree elt;
6487 /* Find the outermost reference that is of the type we want. If none,
6488 see if any object has a type that is a pointer to the type we
6489 want. */
6490 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6491 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6492 || TREE_CODE (elt) == COND_EXPR)
6493 ? TREE_OPERAND (elt, 1)
6494 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6495 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6496 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6497 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6498 ? TREE_OPERAND (elt, 0) : 0))
6499 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6501 if (plist)
6502 *plist = placeholder_expr;
6503 return elt;
6506 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6508 = ((TREE_CODE (elt) == COMPOUND_EXPR
6509 || TREE_CODE (elt) == COND_EXPR)
6510 ? TREE_OPERAND (elt, 1)
6511 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6512 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6513 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6514 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6515 ? TREE_OPERAND (elt, 0) : 0))
6516 if (POINTER_TYPE_P (TREE_TYPE (elt))
6517 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6518 == need_type))
6520 if (plist)
6521 *plist = placeholder_expr;
6522 return build1 (INDIRECT_REF, need_type, elt);
6526 return 0;
6529 /* expand_expr: generate code for computing expression EXP.
6530 An rtx for the computed value is returned. The value is never null.
6531 In the case of a void EXP, const0_rtx is returned.
6533 The value may be stored in TARGET if TARGET is nonzero.
6534 TARGET is just a suggestion; callers must assume that
6535 the rtx returned may not be the same as TARGET.
6537 If TARGET is CONST0_RTX, it means that the value will be ignored.
6539 If TMODE is not VOIDmode, it suggests generating the
6540 result in mode TMODE. But this is done only when convenient.
6541 Otherwise, TMODE is ignored and the value generated in its natural mode.
6542 TMODE is just a suggestion; callers must assume that
6543 the rtx returned may not have mode TMODE.
6545 Note that TARGET may have neither TMODE nor MODE. In that case, it
6546 probably will not be used.
6548 If MODIFIER is EXPAND_SUM then when EXP is an addition
6549 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6550 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6551 products as above, or REG or MEM, or constant.
6552 Ordinarily in such cases we would output mul or add instructions
6553 and then return a pseudo reg containing the sum.
6555 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6556 it also marks a label as absolutely required (it can't be dead).
6557 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6558 This is used for outputting expressions used in initializers.
6560 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6561 with a constant address even if that address is not normally legitimate.
6562 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6564 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6565 a call parameter. Such targets require special care as we haven't yet
6566 marked TARGET so that it's safe from being trashed by libcalls. We
6567 don't want to use TARGET for anything but the final result;
6568 Intermediate values must go elsewhere. Additionally, calls to
6569 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6572 expand_expr (exp, target, tmode, modifier)
6573 tree exp;
6574 rtx target;
6575 enum machine_mode tmode;
6576 enum expand_modifier modifier;
6578 rtx op0, op1, temp;
6579 tree type = TREE_TYPE (exp);
6580 int unsignedp = TREE_UNSIGNED (type);
6581 enum machine_mode mode;
6582 enum tree_code code = TREE_CODE (exp);
6583 optab this_optab;
6584 rtx subtarget, original_target;
6585 int ignore;
6586 tree context;
6588 /* Handle ERROR_MARK before anybody tries to access its type. */
6589 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6591 op0 = CONST0_RTX (tmode);
6592 if (op0 != 0)
6593 return op0;
6594 return const0_rtx;
6597 mode = TYPE_MODE (type);
6598 /* Use subtarget as the target for operand 0 of a binary operation. */
6599 subtarget = get_subtarget (target);
6600 original_target = target;
6601 ignore = (target == const0_rtx
6602 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6603 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6604 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6605 && TREE_CODE (type) == VOID_TYPE));
6607 /* If we are going to ignore this result, we need only do something
6608 if there is a side-effect somewhere in the expression. If there
6609 is, short-circuit the most common cases here. Note that we must
6610 not call expand_expr with anything but const0_rtx in case this
6611 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6613 if (ignore)
6615 if (! TREE_SIDE_EFFECTS (exp))
6616 return const0_rtx;
6618 /* Ensure we reference a volatile object even if value is ignored, but
6619 don't do this if all we are doing is taking its address. */
6620 if (TREE_THIS_VOLATILE (exp)
6621 && TREE_CODE (exp) != FUNCTION_DECL
6622 && mode != VOIDmode && mode != BLKmode
6623 && modifier != EXPAND_CONST_ADDRESS)
6625 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6626 if (GET_CODE (temp) == MEM)
6627 temp = copy_to_reg (temp);
6628 return const0_rtx;
6631 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6632 || code == INDIRECT_REF || code == BUFFER_REF)
6633 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6634 modifier);
6636 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6637 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6639 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6640 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6641 return const0_rtx;
6643 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6644 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6645 /* If the second operand has no side effects, just evaluate
6646 the first. */
6647 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6648 modifier);
6649 else if (code == BIT_FIELD_REF)
6651 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6652 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6653 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6654 return const0_rtx;
6657 target = 0;
6660 #ifdef MAX_INTEGER_COMPUTATION_MODE
6661 /* Only check stuff here if the mode we want is different from the mode
6662 of the expression; if it's the same, check_max_integer_computation_mode
6663 will handle it. Do we really need to check this stuff at all? */
6665 if (target
6666 && GET_MODE (target) != mode
6667 && TREE_CODE (exp) != INTEGER_CST
6668 && TREE_CODE (exp) != PARM_DECL
6669 && TREE_CODE (exp) != ARRAY_REF
6670 && TREE_CODE (exp) != ARRAY_RANGE_REF
6671 && TREE_CODE (exp) != COMPONENT_REF
6672 && TREE_CODE (exp) != BIT_FIELD_REF
6673 && TREE_CODE (exp) != INDIRECT_REF
6674 && TREE_CODE (exp) != CALL_EXPR
6675 && TREE_CODE (exp) != VAR_DECL
6676 && TREE_CODE (exp) != RTL_EXPR)
6678 enum machine_mode mode = GET_MODE (target);
6680 if (GET_MODE_CLASS (mode) == MODE_INT
6681 && mode > MAX_INTEGER_COMPUTATION_MODE)
6682 internal_error ("unsupported wide integer operation");
6685 if (tmode != mode
6686 && TREE_CODE (exp) != INTEGER_CST
6687 && TREE_CODE (exp) != PARM_DECL
6688 && TREE_CODE (exp) != ARRAY_REF
6689 && TREE_CODE (exp) != ARRAY_RANGE_REF
6690 && TREE_CODE (exp) != COMPONENT_REF
6691 && TREE_CODE (exp) != BIT_FIELD_REF
6692 && TREE_CODE (exp) != INDIRECT_REF
6693 && TREE_CODE (exp) != VAR_DECL
6694 && TREE_CODE (exp) != CALL_EXPR
6695 && TREE_CODE (exp) != RTL_EXPR
6696 && GET_MODE_CLASS (tmode) == MODE_INT
6697 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6698 internal_error ("unsupported wide integer operation");
6700 check_max_integer_computation_mode (exp);
6701 #endif
6703 /* If will do cse, generate all results into pseudo registers
6704 since 1) that allows cse to find more things
6705 and 2) otherwise cse could produce an insn the machine
6706 cannot support. An exception is a CONSTRUCTOR into a multi-word
6707 MEM: that's much more likely to be most efficient into the MEM.
6708 Another is a CALL_EXPR which must return in memory. */
6710 if (! cse_not_expected && mode != BLKmode && target
6711 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6712 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6713 && ! (code == CALL_EXPR && aggregate_value_p (exp)))
6714 target = 0;
6716 switch (code)
6718 case LABEL_DECL:
6720 tree function = decl_function_context (exp);
6721 /* Labels in containing functions, or labels used from initializers,
6722 must be forced. */
6723 if (modifier == EXPAND_INITIALIZER
6724 || (function != current_function_decl
6725 && function != inline_function_decl
6726 && function != 0))
6727 temp = force_label_rtx (exp);
6728 else
6729 temp = label_rtx (exp);
6731 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
6732 if (function != current_function_decl
6733 && function != inline_function_decl && function != 0)
6734 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6735 return temp;
6738 case PARM_DECL:
6739 if (!DECL_RTL_SET_P (exp))
6741 error_with_decl (exp, "prior parameter's size depends on `%s'");
6742 return CONST0_RTX (mode);
6745 /* ... fall through ... */
6747 case VAR_DECL:
6748 /* If a static var's type was incomplete when the decl was written,
6749 but the type is complete now, lay out the decl now. */
6750 if (DECL_SIZE (exp) == 0
6751 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6752 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6753 layout_decl (exp, 0);
6755 /* ... fall through ... */
6757 case FUNCTION_DECL:
6758 case RESULT_DECL:
6759 if (DECL_RTL (exp) == 0)
6760 abort ();
6762 /* Ensure variable marked as used even if it doesn't go through
6763 a parser. If it hasn't be used yet, write out an external
6764 definition. */
6765 if (! TREE_USED (exp))
6767 assemble_external (exp);
6768 TREE_USED (exp) = 1;
6771 /* Show we haven't gotten RTL for this yet. */
6772 temp = 0;
6774 /* Handle variables inherited from containing functions. */
6775 context = decl_function_context (exp);
6777 /* We treat inline_function_decl as an alias for the current function
6778 because that is the inline function whose vars, types, etc.
6779 are being merged into the current function.
6780 See expand_inline_function. */
6782 if (context != 0 && context != current_function_decl
6783 && context != inline_function_decl
6784 /* If var is static, we don't need a static chain to access it. */
6785 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6786 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6788 rtx addr;
6790 /* Mark as non-local and addressable. */
6791 DECL_NONLOCAL (exp) = 1;
6792 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6793 abort ();
6794 (*lang_hooks.mark_addressable) (exp);
6795 if (GET_CODE (DECL_RTL (exp)) != MEM)
6796 abort ();
6797 addr = XEXP (DECL_RTL (exp), 0);
6798 if (GET_CODE (addr) == MEM)
6799 addr
6800 = replace_equiv_address (addr,
6801 fix_lexical_addr (XEXP (addr, 0), exp));
6802 else
6803 addr = fix_lexical_addr (addr, exp);
6805 temp = replace_equiv_address (DECL_RTL (exp), addr);
6808 /* This is the case of an array whose size is to be determined
6809 from its initializer, while the initializer is still being parsed.
6810 See expand_decl. */
6812 else if (GET_CODE (DECL_RTL (exp)) == MEM
6813 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6814 temp = validize_mem (DECL_RTL (exp));
6816 /* If DECL_RTL is memory, we are in the normal case and either
6817 the address is not valid or it is not a register and -fforce-addr
6818 is specified, get the address into a register. */
6820 else if (GET_CODE (DECL_RTL (exp)) == MEM
6821 && modifier != EXPAND_CONST_ADDRESS
6822 && modifier != EXPAND_SUM
6823 && modifier != EXPAND_INITIALIZER
6824 && (! memory_address_p (DECL_MODE (exp),
6825 XEXP (DECL_RTL (exp), 0))
6826 || (flag_force_addr
6827 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6828 temp = replace_equiv_address (DECL_RTL (exp),
6829 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6831 /* If we got something, return it. But first, set the alignment
6832 if the address is a register. */
6833 if (temp != 0)
6835 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6836 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6838 return temp;
6841 /* If the mode of DECL_RTL does not match that of the decl, it
6842 must be a promoted value. We return a SUBREG of the wanted mode,
6843 but mark it so that we know that it was already extended. */
6845 if (GET_CODE (DECL_RTL (exp)) == REG
6846 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6848 /* Get the signedness used for this variable. Ensure we get the
6849 same mode we got when the variable was declared. */
6850 if (GET_MODE (DECL_RTL (exp))
6851 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6852 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6853 abort ();
6855 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6856 SUBREG_PROMOTED_VAR_P (temp) = 1;
6857 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6858 return temp;
6861 return DECL_RTL (exp);
6863 case INTEGER_CST:
6864 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6865 TREE_INT_CST_HIGH (exp), mode);
6867 /* ??? If overflow is set, fold will have done an incomplete job,
6868 which can result in (plus xx (const_int 0)), which can get
6869 simplified by validate_replace_rtx during virtual register
6870 instantiation, which can result in unrecognizable insns.
6871 Avoid this by forcing all overflows into registers. */
6872 if (TREE_CONSTANT_OVERFLOW (exp)
6873 && modifier != EXPAND_INITIALIZER)
6874 temp = force_reg (mode, temp);
6876 return temp;
6878 case VECTOR_CST:
6879 return const_vector_from_tree (exp);
6881 case CONST_DECL:
6882 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6884 case REAL_CST:
6885 /* If optimized, generate immediate CONST_DOUBLE
6886 which will be turned into memory by reload if necessary.
6888 We used to force a register so that loop.c could see it. But
6889 this does not allow gen_* patterns to perform optimizations with
6890 the constants. It also produces two insns in cases like "x = 1.0;".
6891 On most machines, floating-point constants are not permitted in
6892 many insns, so we'd end up copying it to a register in any case.
6894 Now, we do the copying in expand_binop, if appropriate. */
6895 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6896 TYPE_MODE (TREE_TYPE (exp)));
6898 case COMPLEX_CST:
6899 case STRING_CST:
6900 temp = output_constant_def (exp, 1);
6902 /* temp contains a constant address.
6903 On RISC machines where a constant address isn't valid,
6904 make some insns to get that address into a register. */
6905 if (modifier != EXPAND_CONST_ADDRESS
6906 && modifier != EXPAND_INITIALIZER
6907 && modifier != EXPAND_SUM
6908 && (! memory_address_p (mode, XEXP (temp, 0))
6909 || flag_force_addr))
6910 return replace_equiv_address (temp,
6911 copy_rtx (XEXP (temp, 0)));
6912 return temp;
6914 case EXPR_WITH_FILE_LOCATION:
6916 rtx to_return;
6917 location_t saved_loc = input_location;
6918 input_filename = EXPR_WFL_FILENAME (exp);
6919 input_line = EXPR_WFL_LINENO (exp);
6920 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6921 emit_line_note (input_filename, input_line);
6922 /* Possibly avoid switching back and forth here. */
6923 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6924 input_location = saved_loc;
6925 return to_return;
6928 case SAVE_EXPR:
6929 context = decl_function_context (exp);
6931 /* If this SAVE_EXPR was at global context, assume we are an
6932 initialization function and move it into our context. */
6933 if (context == 0)
6934 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6936 /* We treat inline_function_decl as an alias for the current function
6937 because that is the inline function whose vars, types, etc.
6938 are being merged into the current function.
6939 See expand_inline_function. */
6940 if (context == current_function_decl || context == inline_function_decl)
6941 context = 0;
6943 /* If this is non-local, handle it. */
6944 if (context)
6946 /* The following call just exists to abort if the context is
6947 not of a containing function. */
6948 find_function_data (context);
6950 temp = SAVE_EXPR_RTL (exp);
6951 if (temp && GET_CODE (temp) == REG)
6953 put_var_into_stack (exp, /*rescan=*/true);
6954 temp = SAVE_EXPR_RTL (exp);
6956 if (temp == 0 || GET_CODE (temp) != MEM)
6957 abort ();
6958 return
6959 replace_equiv_address (temp,
6960 fix_lexical_addr (XEXP (temp, 0), exp));
6962 if (SAVE_EXPR_RTL (exp) == 0)
6964 if (mode == VOIDmode)
6965 temp = const0_rtx;
6966 else
6967 temp = assign_temp (build_qualified_type (type,
6968 (TYPE_QUALS (type)
6969 | TYPE_QUAL_CONST)),
6970 3, 0, 0);
6972 SAVE_EXPR_RTL (exp) = temp;
6973 if (!optimize && GET_CODE (temp) == REG)
6974 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6975 save_expr_regs);
6977 /* If the mode of TEMP does not match that of the expression, it
6978 must be a promoted value. We pass store_expr a SUBREG of the
6979 wanted mode but mark it so that we know that it was already
6980 extended. */
6982 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6984 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6985 promote_mode (type, mode, &unsignedp, 0);
6986 SUBREG_PROMOTED_VAR_P (temp) = 1;
6987 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6990 if (temp == const0_rtx)
6991 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6992 else
6993 store_expr (TREE_OPERAND (exp, 0), temp,
6994 modifier == EXPAND_STACK_PARM ? 2 : 0);
6996 TREE_USED (exp) = 1;
6999 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
7000 must be a promoted value. We return a SUBREG of the wanted mode,
7001 but mark it so that we know that it was already extended. */
7003 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
7004 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
7006 /* Compute the signedness and make the proper SUBREG. */
7007 promote_mode (type, mode, &unsignedp, 0);
7008 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
7009 SUBREG_PROMOTED_VAR_P (temp) = 1;
7010 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7011 return temp;
7014 return SAVE_EXPR_RTL (exp);
7016 case UNSAVE_EXPR:
7018 rtx temp;
7019 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7020 TREE_OPERAND (exp, 0)
7021 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
7022 return temp;
7025 case PLACEHOLDER_EXPR:
7027 tree old_list = placeholder_list;
7028 tree placeholder_expr = 0;
7030 exp = find_placeholder (exp, &placeholder_expr);
7031 if (exp == 0)
7032 abort ();
7034 placeholder_list = TREE_CHAIN (placeholder_expr);
7035 temp = expand_expr (exp, original_target, tmode, modifier);
7036 placeholder_list = old_list;
7037 return temp;
7040 case WITH_RECORD_EXPR:
7041 /* Put the object on the placeholder list, expand our first operand,
7042 and pop the list. */
7043 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
7044 placeholder_list);
7045 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
7046 modifier);
7047 placeholder_list = TREE_CHAIN (placeholder_list);
7048 return target;
7050 case GOTO_EXPR:
7051 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7052 expand_goto (TREE_OPERAND (exp, 0));
7053 else
7054 expand_computed_goto (TREE_OPERAND (exp, 0));
7055 return const0_rtx;
7057 case EXIT_EXPR:
7058 expand_exit_loop_if_false (NULL,
7059 invert_truthvalue (TREE_OPERAND (exp, 0)));
7060 return const0_rtx;
7062 case LABELED_BLOCK_EXPR:
7063 if (LABELED_BLOCK_BODY (exp))
7064 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
7065 /* Should perhaps use expand_label, but this is simpler and safer. */
7066 do_pending_stack_adjust ();
7067 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
7068 return const0_rtx;
7070 case EXIT_BLOCK_EXPR:
7071 if (EXIT_BLOCK_RETURN (exp))
7072 sorry ("returned value in block_exit_expr");
7073 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
7074 return const0_rtx;
7076 case LOOP_EXPR:
7077 push_temp_slots ();
7078 expand_start_loop (1);
7079 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
7080 expand_end_loop ();
7081 pop_temp_slots ();
7083 return const0_rtx;
7085 case BIND_EXPR:
7087 tree vars = TREE_OPERAND (exp, 0);
7089 /* Need to open a binding contour here because
7090 if there are any cleanups they must be contained here. */
7091 expand_start_bindings (2);
7093 /* Mark the corresponding BLOCK for output in its proper place. */
7094 if (TREE_OPERAND (exp, 2) != 0
7095 && ! TREE_USED (TREE_OPERAND (exp, 2)))
7096 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
7098 /* If VARS have not yet been expanded, expand them now. */
7099 while (vars)
7101 if (!DECL_RTL_SET_P (vars))
7102 expand_decl (vars);
7103 expand_decl_init (vars);
7104 vars = TREE_CHAIN (vars);
7107 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
7109 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7111 return temp;
7114 case RTL_EXPR:
7115 if (RTL_EXPR_SEQUENCE (exp))
7117 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
7118 abort ();
7119 emit_insn (RTL_EXPR_SEQUENCE (exp));
7120 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
7122 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
7123 free_temps_for_rtl_expr (exp);
7124 return RTL_EXPR_RTL (exp);
7126 case CONSTRUCTOR:
7127 /* If we don't need the result, just ensure we evaluate any
7128 subexpressions. */
7129 if (ignore)
7131 tree elt;
7133 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
7134 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
7136 return const0_rtx;
7139 /* All elts simple constants => refer to a constant in memory. But
7140 if this is a non-BLKmode mode, let it store a field at a time
7141 since that should make a CONST_INT or CONST_DOUBLE when we
7142 fold. Likewise, if we have a target we can use, it is best to
7143 store directly into the target unless the type is large enough
7144 that memcpy will be used. If we are making an initializer and
7145 all operands are constant, put it in memory as well.
7147 FIXME: Avoid trying to fill vector constructors piece-meal.
7148 Output them with output_constant_def below unless we're sure
7149 they're zeros. This should go away when vector initializers
7150 are treated like VECTOR_CST instead of arrays.
7152 else if ((TREE_STATIC (exp)
7153 && ((mode == BLKmode
7154 && ! (target != 0 && safe_from_p (target, exp, 1)))
7155 || TREE_ADDRESSABLE (exp)
7156 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7157 && (! MOVE_BY_PIECES_P
7158 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7159 TYPE_ALIGN (type)))
7160 && ((TREE_CODE (type) == VECTOR_TYPE
7161 && !is_zeros_p (exp))
7162 || ! mostly_zeros_p (exp)))))
7163 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
7165 rtx constructor = output_constant_def (exp, 1);
7167 if (modifier != EXPAND_CONST_ADDRESS
7168 && modifier != EXPAND_INITIALIZER
7169 && modifier != EXPAND_SUM)
7170 constructor = validize_mem (constructor);
7172 return constructor;
7174 else
7176 /* Handle calls that pass values in multiple non-contiguous
7177 locations. The Irix 6 ABI has examples of this. */
7178 if (target == 0 || ! safe_from_p (target, exp, 1)
7179 || GET_CODE (target) == PARALLEL
7180 || modifier == EXPAND_STACK_PARM)
7181 target
7182 = assign_temp (build_qualified_type (type,
7183 (TYPE_QUALS (type)
7184 | (TREE_READONLY (exp)
7185 * TYPE_QUAL_CONST))),
7186 0, TREE_ADDRESSABLE (exp), 1);
7188 store_constructor (exp, target, 0, int_expr_size (exp));
7189 return target;
7192 case INDIRECT_REF:
7194 tree exp1 = TREE_OPERAND (exp, 0);
7195 tree index;
7196 tree string = string_constant (exp1, &index);
7198 /* Try to optimize reads from const strings. */
7199 if (string
7200 && TREE_CODE (string) == STRING_CST
7201 && TREE_CODE (index) == INTEGER_CST
7202 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7203 && GET_MODE_CLASS (mode) == MODE_INT
7204 && GET_MODE_SIZE (mode) == 1
7205 && modifier != EXPAND_WRITE)
7206 return gen_int_mode (TREE_STRING_POINTER (string)
7207 [TREE_INT_CST_LOW (index)], mode);
7209 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7210 op0 = memory_address (mode, op0);
7211 temp = gen_rtx_MEM (mode, op0);
7212 set_mem_attributes (temp, exp, 0);
7214 /* If we are writing to this object and its type is a record with
7215 readonly fields, we must mark it as readonly so it will
7216 conflict with readonly references to those fields. */
7217 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7218 RTX_UNCHANGING_P (temp) = 1;
7220 return temp;
7223 case ARRAY_REF:
7224 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7225 abort ();
7228 tree array = TREE_OPERAND (exp, 0);
7229 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7230 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7231 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7232 HOST_WIDE_INT i;
7234 /* Optimize the special-case of a zero lower bound.
7236 We convert the low_bound to sizetype to avoid some problems
7237 with constant folding. (E.g. suppose the lower bound is 1,
7238 and its mode is QI. Without the conversion, (ARRAY
7239 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7240 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7242 if (! integer_zerop (low_bound))
7243 index = size_diffop (index, convert (sizetype, low_bound));
7245 /* Fold an expression like: "foo"[2].
7246 This is not done in fold so it won't happen inside &.
7247 Don't fold if this is for wide characters since it's too
7248 difficult to do correctly and this is a very rare case. */
7250 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7251 && TREE_CODE (array) == STRING_CST
7252 && TREE_CODE (index) == INTEGER_CST
7253 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
7254 && GET_MODE_CLASS (mode) == MODE_INT
7255 && GET_MODE_SIZE (mode) == 1)
7256 return gen_int_mode (TREE_STRING_POINTER (array)
7257 [TREE_INT_CST_LOW (index)], mode);
7259 /* If this is a constant index into a constant array,
7260 just get the value from the array. Handle both the cases when
7261 we have an explicit constructor and when our operand is a variable
7262 that was declared const. */
7264 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7265 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
7266 && TREE_CODE (index) == INTEGER_CST
7267 && 0 > compare_tree_int (index,
7268 list_length (CONSTRUCTOR_ELTS
7269 (TREE_OPERAND (exp, 0)))))
7271 tree elem;
7273 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7274 i = TREE_INT_CST_LOW (index);
7275 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7278 if (elem)
7279 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7280 modifier);
7283 else if (optimize >= 1
7284 && modifier != EXPAND_CONST_ADDRESS
7285 && modifier != EXPAND_INITIALIZER
7286 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7287 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7288 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7290 if (TREE_CODE (index) == INTEGER_CST)
7292 tree init = DECL_INITIAL (array);
7294 if (TREE_CODE (init) == CONSTRUCTOR)
7296 tree elem;
7298 for (elem = CONSTRUCTOR_ELTS (init);
7299 (elem
7300 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7301 elem = TREE_CHAIN (elem))
7304 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7305 return expand_expr (fold (TREE_VALUE (elem)), target,
7306 tmode, modifier);
7308 else if (TREE_CODE (init) == STRING_CST
7309 && 0 > compare_tree_int (index,
7310 TREE_STRING_LENGTH (init)))
7312 tree type = TREE_TYPE (TREE_TYPE (init));
7313 enum machine_mode mode = TYPE_MODE (type);
7315 if (GET_MODE_CLASS (mode) == MODE_INT
7316 && GET_MODE_SIZE (mode) == 1)
7317 return gen_int_mode (TREE_STRING_POINTER (init)
7318 [TREE_INT_CST_LOW (index)], mode);
7323 goto normal_inner_ref;
7325 case COMPONENT_REF:
7326 /* If the operand is a CONSTRUCTOR, we can just extract the
7327 appropriate field if it is present. */
7328 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7330 tree elt;
7332 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7333 elt = TREE_CHAIN (elt))
7334 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7335 /* We can normally use the value of the field in the
7336 CONSTRUCTOR. However, if this is a bitfield in
7337 an integral mode that we can fit in a HOST_WIDE_INT,
7338 we must mask only the number of bits in the bitfield,
7339 since this is done implicitly by the constructor. If
7340 the bitfield does not meet either of those conditions,
7341 we can't do this optimization. */
7342 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7343 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7344 == MODE_INT)
7345 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7346 <= HOST_BITS_PER_WIDE_INT))))
7348 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7349 && modifier == EXPAND_STACK_PARM)
7350 target = 0;
7351 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7352 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7354 HOST_WIDE_INT bitsize
7355 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7356 enum machine_mode imode
7357 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7359 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7361 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7362 op0 = expand_and (imode, op0, op1, target);
7364 else
7366 tree count
7367 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7370 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7371 target, 0);
7372 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7373 target, 0);
7377 return op0;
7380 goto normal_inner_ref;
7382 case BIT_FIELD_REF:
7383 case ARRAY_RANGE_REF:
7384 normal_inner_ref:
7386 enum machine_mode mode1;
7387 HOST_WIDE_INT bitsize, bitpos;
7388 tree offset;
7389 int volatilep = 0;
7390 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7391 &mode1, &unsignedp, &volatilep);
7392 rtx orig_op0;
7394 /* If we got back the original object, something is wrong. Perhaps
7395 we are evaluating an expression too early. In any event, don't
7396 infinitely recurse. */
7397 if (tem == exp)
7398 abort ();
7400 /* If TEM's type is a union of variable size, pass TARGET to the inner
7401 computation, since it will need a temporary and TARGET is known
7402 to have to do. This occurs in unchecked conversion in Ada. */
7404 orig_op0 = op0
7405 = expand_expr (tem,
7406 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7407 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7408 != INTEGER_CST)
7409 && modifier != EXPAND_STACK_PARM
7410 ? target : NULL_RTX),
7411 VOIDmode,
7412 (modifier == EXPAND_INITIALIZER
7413 || modifier == EXPAND_CONST_ADDRESS
7414 || modifier == EXPAND_STACK_PARM)
7415 ? modifier : EXPAND_NORMAL);
7417 /* If this is a constant, put it into a register if it is a
7418 legitimate constant and OFFSET is 0 and memory if it isn't. */
7419 if (CONSTANT_P (op0))
7421 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7422 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7423 && offset == 0)
7424 op0 = force_reg (mode, op0);
7425 else
7426 op0 = validize_mem (force_const_mem (mode, op0));
7429 if (offset != 0)
7431 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7432 EXPAND_SUM);
7434 /* If this object is in a register, put it into memory.
7435 This case can't occur in C, but can in Ada if we have
7436 unchecked conversion of an expression from a scalar type to
7437 an array or record type. */
7438 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7439 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7441 /* If the operand is a SAVE_EXPR, we can deal with this by
7442 forcing the SAVE_EXPR into memory. */
7443 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7445 put_var_into_stack (TREE_OPERAND (exp, 0),
7446 /*rescan=*/true);
7447 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7449 else
7451 tree nt
7452 = build_qualified_type (TREE_TYPE (tem),
7453 (TYPE_QUALS (TREE_TYPE (tem))
7454 | TYPE_QUAL_CONST));
7455 rtx memloc = assign_temp (nt, 1, 1, 1);
7457 emit_move_insn (memloc, op0);
7458 op0 = memloc;
7462 if (GET_CODE (op0) != MEM)
7463 abort ();
7465 #ifdef POINTERS_EXTEND_UNSIGNED
7466 if (GET_MODE (offset_rtx) != Pmode)
7467 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7468 #else
7469 if (GET_MODE (offset_rtx) != ptr_mode)
7470 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7471 #endif
7473 /* A constant address in OP0 can have VOIDmode, we must not try
7474 to call force_reg for that case. Avoid that case. */
7475 if (GET_CODE (op0) == MEM
7476 && GET_MODE (op0) == BLKmode
7477 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7478 && bitsize != 0
7479 && (bitpos % bitsize) == 0
7480 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7481 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7483 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7484 bitpos = 0;
7487 op0 = offset_address (op0, offset_rtx,
7488 highest_pow2_factor (offset));
7491 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7492 record its alignment as BIGGEST_ALIGNMENT. */
7493 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7494 && is_aligning_offset (offset, tem))
7495 set_mem_align (op0, BIGGEST_ALIGNMENT);
7497 /* Don't forget about volatility even if this is a bitfield. */
7498 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7500 if (op0 == orig_op0)
7501 op0 = copy_rtx (op0);
7503 MEM_VOLATILE_P (op0) = 1;
7506 /* The following code doesn't handle CONCAT.
7507 Assume only bitpos == 0 can be used for CONCAT, due to
7508 one element arrays having the same mode as its element. */
7509 if (GET_CODE (op0) == CONCAT)
7511 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7512 abort ();
7513 return op0;
7516 /* In cases where an aligned union has an unaligned object
7517 as a field, we might be extracting a BLKmode value from
7518 an integer-mode (e.g., SImode) object. Handle this case
7519 by doing the extract into an object as wide as the field
7520 (which we know to be the width of a basic mode), then
7521 storing into memory, and changing the mode to BLKmode. */
7522 if (mode1 == VOIDmode
7523 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7524 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7525 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7526 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7527 && modifier != EXPAND_CONST_ADDRESS
7528 && modifier != EXPAND_INITIALIZER)
7529 /* If the field isn't aligned enough to fetch as a memref,
7530 fetch it as a bit field. */
7531 || (mode1 != BLKmode
7532 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7533 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))
7534 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0)))
7535 || (bitpos % BITS_PER_UNIT != 0)))
7536 /* If the type and the field are a constant size and the
7537 size of the type isn't the same size as the bitfield,
7538 we must use bitfield operations. */
7539 || (bitsize >= 0
7540 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7541 == INTEGER_CST)
7542 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7543 bitsize)))
7545 enum machine_mode ext_mode = mode;
7547 if (ext_mode == BLKmode
7548 && ! (target != 0 && GET_CODE (op0) == MEM
7549 && GET_CODE (target) == MEM
7550 && bitpos % BITS_PER_UNIT == 0))
7551 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7553 if (ext_mode == BLKmode)
7555 /* In this case, BITPOS must start at a byte boundary and
7556 TARGET, if specified, must be a MEM. */
7557 if (GET_CODE (op0) != MEM
7558 || (target != 0 && GET_CODE (target) != MEM)
7559 || bitpos % BITS_PER_UNIT != 0)
7560 abort ();
7562 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7563 if (target == 0)
7564 target = assign_temp (type, 0, 1, 1);
7566 emit_block_move (target, op0,
7567 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7568 / BITS_PER_UNIT),
7569 (modifier == EXPAND_STACK_PARM
7570 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7572 return target;
7575 op0 = validize_mem (op0);
7577 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7578 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7580 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7581 (modifier == EXPAND_STACK_PARM
7582 ? NULL_RTX : target),
7583 ext_mode, ext_mode,
7584 int_size_in_bytes (TREE_TYPE (tem)));
7586 /* If the result is a record type and BITSIZE is narrower than
7587 the mode of OP0, an integral mode, and this is a big endian
7588 machine, we must put the field into the high-order bits. */
7589 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7590 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7591 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7592 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7593 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7594 - bitsize),
7595 op0, 1);
7597 if (mode == BLKmode)
7599 rtx new = assign_temp (build_qualified_type
7600 ((*lang_hooks.types.type_for_mode)
7601 (ext_mode, 0),
7602 TYPE_QUAL_CONST), 0, 1, 1);
7604 emit_move_insn (new, op0);
7605 op0 = copy_rtx (new);
7606 PUT_MODE (op0, BLKmode);
7607 set_mem_attributes (op0, exp, 1);
7610 return op0;
7613 /* If the result is BLKmode, use that to access the object
7614 now as well. */
7615 if (mode == BLKmode)
7616 mode1 = BLKmode;
7618 /* Get a reference to just this component. */
7619 if (modifier == EXPAND_CONST_ADDRESS
7620 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7621 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7622 else
7623 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7625 if (op0 == orig_op0)
7626 op0 = copy_rtx (op0);
7628 set_mem_attributes (op0, exp, 0);
7629 if (GET_CODE (XEXP (op0, 0)) == REG)
7630 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7632 MEM_VOLATILE_P (op0) |= volatilep;
7633 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7634 || modifier == EXPAND_CONST_ADDRESS
7635 || modifier == EXPAND_INITIALIZER)
7636 return op0;
7637 else if (target == 0)
7638 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7640 convert_move (target, op0, unsignedp);
7641 return target;
7644 case VTABLE_REF:
7646 rtx insn, before = get_last_insn (), vtbl_ref;
7648 /* Evaluate the interior expression. */
7649 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7650 tmode, modifier);
7652 /* Get or create an instruction off which to hang a note. */
7653 if (REG_P (subtarget))
7655 target = subtarget;
7656 insn = get_last_insn ();
7657 if (insn == before)
7658 abort ();
7659 if (! INSN_P (insn))
7660 insn = prev_nonnote_insn (insn);
7662 else
7664 target = gen_reg_rtx (GET_MODE (subtarget));
7665 insn = emit_move_insn (target, subtarget);
7668 /* Collect the data for the note. */
7669 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7670 vtbl_ref = plus_constant (vtbl_ref,
7671 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7672 /* Discard the initial CONST that was added. */
7673 vtbl_ref = XEXP (vtbl_ref, 0);
7675 REG_NOTES (insn)
7676 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7678 return target;
7681 /* Intended for a reference to a buffer of a file-object in Pascal.
7682 But it's not certain that a special tree code will really be
7683 necessary for these. INDIRECT_REF might work for them. */
7684 case BUFFER_REF:
7685 abort ();
7687 case IN_EXPR:
7689 /* Pascal set IN expression.
7691 Algorithm:
7692 rlo = set_low - (set_low%bits_per_word);
7693 the_word = set [ (index - rlo)/bits_per_word ];
7694 bit_index = index % bits_per_word;
7695 bitmask = 1 << bit_index;
7696 return !!(the_word & bitmask); */
7698 tree set = TREE_OPERAND (exp, 0);
7699 tree index = TREE_OPERAND (exp, 1);
7700 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7701 tree set_type = TREE_TYPE (set);
7702 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7703 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7704 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7705 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7706 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7707 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7708 rtx setaddr = XEXP (setval, 0);
7709 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7710 rtx rlow;
7711 rtx diff, quo, rem, addr, bit, result;
7713 /* If domain is empty, answer is no. Likewise if index is constant
7714 and out of bounds. */
7715 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7716 && TREE_CODE (set_low_bound) == INTEGER_CST
7717 && tree_int_cst_lt (set_high_bound, set_low_bound))
7718 || (TREE_CODE (index) == INTEGER_CST
7719 && TREE_CODE (set_low_bound) == INTEGER_CST
7720 && tree_int_cst_lt (index, set_low_bound))
7721 || (TREE_CODE (set_high_bound) == INTEGER_CST
7722 && TREE_CODE (index) == INTEGER_CST
7723 && tree_int_cst_lt (set_high_bound, index))))
7724 return const0_rtx;
7726 if (target == 0)
7727 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7729 /* If we get here, we have to generate the code for both cases
7730 (in range and out of range). */
7732 op0 = gen_label_rtx ();
7733 op1 = gen_label_rtx ();
7735 if (! (GET_CODE (index_val) == CONST_INT
7736 && GET_CODE (lo_r) == CONST_INT))
7737 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7738 GET_MODE (index_val), iunsignedp, op1);
7740 if (! (GET_CODE (index_val) == CONST_INT
7741 && GET_CODE (hi_r) == CONST_INT))
7742 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7743 GET_MODE (index_val), iunsignedp, op1);
7745 /* Calculate the element number of bit zero in the first word
7746 of the set. */
7747 if (GET_CODE (lo_r) == CONST_INT)
7748 rlow = GEN_INT (INTVAL (lo_r)
7749 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7750 else
7751 rlow = expand_binop (index_mode, and_optab, lo_r,
7752 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7753 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7755 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7756 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7758 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7759 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7760 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7761 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7763 addr = memory_address (byte_mode,
7764 expand_binop (index_mode, add_optab, diff,
7765 setaddr, NULL_RTX, iunsignedp,
7766 OPTAB_LIB_WIDEN));
7768 /* Extract the bit we want to examine. */
7769 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7770 gen_rtx_MEM (byte_mode, addr),
7771 make_tree (TREE_TYPE (index), rem),
7772 NULL_RTX, 1);
7773 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7774 GET_MODE (target) == byte_mode ? target : 0,
7775 1, OPTAB_LIB_WIDEN);
7777 if (result != target)
7778 convert_move (target, result, 1);
7780 /* Output the code to handle the out-of-range case. */
7781 emit_jump (op0);
7782 emit_label (op1);
7783 emit_move_insn (target, const0_rtx);
7784 emit_label (op0);
7785 return target;
7788 case WITH_CLEANUP_EXPR:
7789 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7791 WITH_CLEANUP_EXPR_RTL (exp)
7792 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7793 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7794 CLEANUP_EH_ONLY (exp));
7796 /* That's it for this cleanup. */
7797 TREE_OPERAND (exp, 1) = 0;
7799 return WITH_CLEANUP_EXPR_RTL (exp);
7801 case CLEANUP_POINT_EXPR:
7803 /* Start a new binding layer that will keep track of all cleanup
7804 actions to be performed. */
7805 expand_start_bindings (2);
7807 target_temp_slot_level = temp_slot_level;
7809 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7810 /* If we're going to use this value, load it up now. */
7811 if (! ignore)
7812 op0 = force_not_mem (op0);
7813 preserve_temp_slots (op0);
7814 expand_end_bindings (NULL_TREE, 0, 0);
7816 return op0;
7818 case CALL_EXPR:
7819 /* Check for a built-in function. */
7820 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7821 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7822 == FUNCTION_DECL)
7823 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7825 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7826 == BUILT_IN_FRONTEND)
7827 return (*lang_hooks.expand_expr) (exp, original_target,
7828 tmode, modifier);
7829 else
7830 return expand_builtin (exp, target, subtarget, tmode, ignore);
7833 return expand_call (exp, target, ignore);
7835 case NON_LVALUE_EXPR:
7836 case NOP_EXPR:
7837 case CONVERT_EXPR:
7838 case REFERENCE_EXPR:
7839 if (TREE_OPERAND (exp, 0) == error_mark_node)
7840 return const0_rtx;
7842 if (TREE_CODE (type) == UNION_TYPE)
7844 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7846 /* If both input and output are BLKmode, this conversion isn't doing
7847 anything except possibly changing memory attribute. */
7848 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7850 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7851 modifier);
7853 result = copy_rtx (result);
7854 set_mem_attributes (result, exp, 0);
7855 return result;
7858 if (target == 0)
7859 target = assign_temp (type, 0, 1, 1);
7861 if (GET_CODE (target) == MEM)
7862 /* Store data into beginning of memory target. */
7863 store_expr (TREE_OPERAND (exp, 0),
7864 adjust_address (target, TYPE_MODE (valtype), 0),
7865 modifier == EXPAND_STACK_PARM ? 2 : 0);
7867 else if (GET_CODE (target) == REG)
7868 /* Store this field into a union of the proper type. */
7869 store_field (target,
7870 MIN ((int_size_in_bytes (TREE_TYPE
7871 (TREE_OPERAND (exp, 0)))
7872 * BITS_PER_UNIT),
7873 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7874 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7875 VOIDmode, 0, type, 0);
7876 else
7877 abort ();
7879 /* Return the entire union. */
7880 return target;
7883 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7885 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7886 modifier);
7888 /* If the signedness of the conversion differs and OP0 is
7889 a promoted SUBREG, clear that indication since we now
7890 have to do the proper extension. */
7891 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7892 && GET_CODE (op0) == SUBREG)
7893 SUBREG_PROMOTED_VAR_P (op0) = 0;
7895 return op0;
7898 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7899 if (GET_MODE (op0) == mode)
7900 return op0;
7902 /* If OP0 is a constant, just convert it into the proper mode. */
7903 if (CONSTANT_P (op0))
7905 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7906 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7908 if (modifier == EXPAND_INITIALIZER)
7909 return simplify_gen_subreg (mode, op0, inner_mode,
7910 subreg_lowpart_offset (mode,
7911 inner_mode));
7912 else
7913 return convert_modes (mode, inner_mode, op0,
7914 TREE_UNSIGNED (inner_type));
7917 if (modifier == EXPAND_INITIALIZER)
7918 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7920 if (target == 0)
7921 return
7922 convert_to_mode (mode, op0,
7923 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7924 else
7925 convert_move (target, op0,
7926 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7927 return target;
7929 case VIEW_CONVERT_EXPR:
7930 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7932 /* If the input and output modes are both the same, we are done.
7933 Otherwise, if neither mode is BLKmode and both are integral and within
7934 a word, we can use gen_lowpart. If neither is true, make sure the
7935 operand is in memory and convert the MEM to the new mode. */
7936 if (TYPE_MODE (type) == GET_MODE (op0))
7938 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7939 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7940 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7941 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7942 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7943 op0 = gen_lowpart (TYPE_MODE (type), op0);
7944 else if (GET_CODE (op0) != MEM)
7946 /* If the operand is not a MEM, force it into memory. Since we
7947 are going to be be changing the mode of the MEM, don't call
7948 force_const_mem for constants because we don't allow pool
7949 constants to change mode. */
7950 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7952 if (TREE_ADDRESSABLE (exp))
7953 abort ();
7955 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7956 target
7957 = assign_stack_temp_for_type
7958 (TYPE_MODE (inner_type),
7959 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7961 emit_move_insn (target, op0);
7962 op0 = target;
7965 /* At this point, OP0 is in the correct mode. If the output type is such
7966 that the operand is known to be aligned, indicate that it is.
7967 Otherwise, we need only be concerned about alignment for non-BLKmode
7968 results. */
7969 if (GET_CODE (op0) == MEM)
7971 op0 = copy_rtx (op0);
7973 if (TYPE_ALIGN_OK (type))
7974 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7975 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7976 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7978 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7979 HOST_WIDE_INT temp_size
7980 = MAX (int_size_in_bytes (inner_type),
7981 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7982 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7983 temp_size, 0, type);
7984 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7986 if (TREE_ADDRESSABLE (exp))
7987 abort ();
7989 if (GET_MODE (op0) == BLKmode)
7990 emit_block_move (new_with_op0_mode, op0,
7991 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7992 (modifier == EXPAND_STACK_PARM
7993 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7994 else
7995 emit_move_insn (new_with_op0_mode, op0);
7997 op0 = new;
8000 op0 = adjust_address (op0, TYPE_MODE (type), 0);
8003 return op0;
8005 case PLUS_EXPR:
8006 this_optab = ! unsignedp && flag_trapv
8007 && (GET_MODE_CLASS (mode) == MODE_INT)
8008 ? addv_optab : add_optab;
8010 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
8011 something else, make sure we add the register to the constant and
8012 then to the other thing. This case can occur during strength
8013 reduction and doing it this way will produce better code if the
8014 frame pointer or argument pointer is eliminated.
8016 fold-const.c will ensure that the constant is always in the inner
8017 PLUS_EXPR, so the only case we need to do anything about is if
8018 sp, ap, or fp is our second argument, in which case we must swap
8019 the innermost first argument and our second argument. */
8021 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8022 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8023 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
8024 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8025 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8026 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8028 tree t = TREE_OPERAND (exp, 1);
8030 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8031 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8034 /* If the result is to be ptr_mode and we are adding an integer to
8035 something, we might be forming a constant. So try to use
8036 plus_constant. If it produces a sum and we can't accept it,
8037 use force_operand. This allows P = &ARR[const] to generate
8038 efficient code on machines where a SYMBOL_REF is not a valid
8039 address.
8041 If this is an EXPAND_SUM call, always return the sum. */
8042 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8043 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8045 if (modifier == EXPAND_STACK_PARM)
8046 target = 0;
8047 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8048 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8049 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8051 rtx constant_part;
8053 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8054 EXPAND_SUM);
8055 /* Use immed_double_const to ensure that the constant is
8056 truncated according to the mode of OP1, then sign extended
8057 to a HOST_WIDE_INT. Using the constant directly can result
8058 in non-canonical RTL in a 64x32 cross compile. */
8059 constant_part
8060 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8061 (HOST_WIDE_INT) 0,
8062 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8063 op1 = plus_constant (op1, INTVAL (constant_part));
8064 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8065 op1 = force_operand (op1, target);
8066 return op1;
8069 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8070 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
8071 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8073 rtx constant_part;
8075 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8076 (modifier == EXPAND_INITIALIZER
8077 ? EXPAND_INITIALIZER : EXPAND_SUM));
8078 if (! CONSTANT_P (op0))
8080 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8081 VOIDmode, modifier);
8082 /* Don't go to both_summands if modifier
8083 says it's not right to return a PLUS. */
8084 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8085 goto binop2;
8086 goto both_summands;
8088 /* Use immed_double_const to ensure that the constant is
8089 truncated according to the mode of OP1, then sign extended
8090 to a HOST_WIDE_INT. Using the constant directly can result
8091 in non-canonical RTL in a 64x32 cross compile. */
8092 constant_part
8093 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8094 (HOST_WIDE_INT) 0,
8095 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8096 op0 = plus_constant (op0, INTVAL (constant_part));
8097 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8098 op0 = force_operand (op0, target);
8099 return op0;
8103 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8104 subtarget = 0;
8106 /* No sense saving up arithmetic to be done
8107 if it's all in the wrong mode to form part of an address.
8108 And force_operand won't know whether to sign-extend or
8109 zero-extend. */
8110 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8111 || mode != ptr_mode)
8113 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8114 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8115 if (op0 == const0_rtx)
8116 return op1;
8117 if (op1 == const0_rtx)
8118 return op0;
8119 goto binop2;
8122 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8123 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8125 /* We come here from MINUS_EXPR when the second operand is a
8126 constant. */
8127 both_summands:
8128 /* Make sure any term that's a sum with a constant comes last. */
8129 if (GET_CODE (op0) == PLUS
8130 && CONSTANT_P (XEXP (op0, 1)))
8132 temp = op0;
8133 op0 = op1;
8134 op1 = temp;
8136 /* If adding to a sum including a constant,
8137 associate it to put the constant outside. */
8138 if (GET_CODE (op1) == PLUS
8139 && CONSTANT_P (XEXP (op1, 1)))
8141 rtx constant_term = const0_rtx;
8143 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
8144 if (temp != 0)
8145 op0 = temp;
8146 /* Ensure that MULT comes first if there is one. */
8147 else if (GET_CODE (op0) == MULT)
8148 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
8149 else
8150 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
8152 /* Let's also eliminate constants from op0 if possible. */
8153 op0 = eliminate_constant_term (op0, &constant_term);
8155 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
8156 their sum should be a constant. Form it into OP1, since the
8157 result we want will then be OP0 + OP1. */
8159 temp = simplify_binary_operation (PLUS, mode, constant_term,
8160 XEXP (op1, 1));
8161 if (temp != 0)
8162 op1 = temp;
8163 else
8164 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
8167 /* Put a constant term last and put a multiplication first. */
8168 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
8169 temp = op1, op1 = op0, op0 = temp;
8171 temp = simplify_binary_operation (PLUS, mode, op0, op1);
8172 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
8174 case MINUS_EXPR:
8175 /* For initializers, we are allowed to return a MINUS of two
8176 symbolic constants. Here we handle all cases when both operands
8177 are constant. */
8178 /* Handle difference of two symbolic constants,
8179 for the sake of an initializer. */
8180 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8181 && really_constant_p (TREE_OPERAND (exp, 0))
8182 && really_constant_p (TREE_OPERAND (exp, 1)))
8184 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
8185 modifier);
8186 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8187 modifier);
8189 /* If the last operand is a CONST_INT, use plus_constant of
8190 the negated constant. Else make the MINUS. */
8191 if (GET_CODE (op1) == CONST_INT)
8192 return plus_constant (op0, - INTVAL (op1));
8193 else
8194 return gen_rtx_MINUS (mode, op0, op1);
8197 this_optab = ! unsignedp && flag_trapv
8198 && (GET_MODE_CLASS(mode) == MODE_INT)
8199 ? subv_optab : sub_optab;
8201 /* No sense saving up arithmetic to be done
8202 if it's all in the wrong mode to form part of an address.
8203 And force_operand won't know whether to sign-extend or
8204 zero-extend. */
8205 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8206 || mode != ptr_mode)
8207 goto binop;
8209 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8210 subtarget = 0;
8212 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8213 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8215 /* Convert A - const to A + (-const). */
8216 if (GET_CODE (op1) == CONST_INT)
8218 op1 = negate_rtx (mode, op1);
8219 goto both_summands;
8222 goto binop2;
8224 case MULT_EXPR:
8225 /* If first operand is constant, swap them.
8226 Thus the following special case checks need only
8227 check the second operand. */
8228 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8230 tree t1 = TREE_OPERAND (exp, 0);
8231 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8232 TREE_OPERAND (exp, 1) = t1;
8235 /* Attempt to return something suitable for generating an
8236 indexed address, for machines that support that. */
8238 if (modifier == EXPAND_SUM && mode == ptr_mode
8239 && host_integerp (TREE_OPERAND (exp, 1), 0))
8241 tree exp1 = TREE_OPERAND (exp, 1);
8243 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8244 EXPAND_SUM);
8246 /* If we knew for certain that this is arithmetic for an array
8247 reference, and we knew the bounds of the array, then we could
8248 apply the distributive law across (PLUS X C) for constant C.
8249 Without such knowledge, we risk overflowing the computation
8250 when both X and C are large, but X+C isn't. */
8251 /* ??? Could perhaps special-case EXP being unsigned and C being
8252 positive. In that case we are certain that X+C is no smaller
8253 than X and so the transformed expression will overflow iff the
8254 original would have. */
8256 if (GET_CODE (op0) != REG)
8257 op0 = force_operand (op0, NULL_RTX);
8258 if (GET_CODE (op0) != REG)
8259 op0 = copy_to_mode_reg (mode, op0);
8261 return gen_rtx_MULT (mode, op0,
8262 gen_int_mode (tree_low_cst (exp1, 0),
8263 TYPE_MODE (TREE_TYPE (exp1))));
8266 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8267 subtarget = 0;
8269 if (modifier == EXPAND_STACK_PARM)
8270 target = 0;
8272 /* Check for multiplying things that have been extended
8273 from a narrower type. If this machine supports multiplying
8274 in that narrower type with a result in the desired type,
8275 do it that way, and avoid the explicit type-conversion. */
8276 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8277 && TREE_CODE (type) == INTEGER_TYPE
8278 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8279 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8280 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8281 && int_fits_type_p (TREE_OPERAND (exp, 1),
8282 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8283 /* Don't use a widening multiply if a shift will do. */
8284 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8285 > HOST_BITS_PER_WIDE_INT)
8286 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8288 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8289 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8291 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8292 /* If both operands are extended, they must either both
8293 be zero-extended or both be sign-extended. */
8294 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8296 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8298 enum machine_mode innermode
8299 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
8300 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8301 ? smul_widen_optab : umul_widen_optab);
8302 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8303 ? umul_widen_optab : smul_widen_optab);
8304 if (mode == GET_MODE_WIDER_MODE (innermode))
8306 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8308 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8309 NULL_RTX, VOIDmode, 0);
8310 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8311 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8312 VOIDmode, 0);
8313 else
8314 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8315 NULL_RTX, VOIDmode, 0);
8316 goto binop2;
8318 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8319 && innermode == word_mode)
8321 rtx htem;
8322 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8323 NULL_RTX, VOIDmode, 0);
8324 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8325 op1 = convert_modes (innermode, mode,
8326 expand_expr (TREE_OPERAND (exp, 1),
8327 NULL_RTX, VOIDmode, 0),
8328 unsignedp);
8329 else
8330 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8331 NULL_RTX, VOIDmode, 0);
8332 temp = expand_binop (mode, other_optab, op0, op1, target,
8333 unsignedp, OPTAB_LIB_WIDEN);
8334 htem = expand_mult_highpart_adjust (innermode,
8335 gen_highpart (innermode, temp),
8336 op0, op1,
8337 gen_highpart (innermode, temp),
8338 unsignedp);
8339 emit_move_insn (gen_highpart (innermode, temp), htem);
8340 return temp;
8344 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8345 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8346 return expand_mult (mode, op0, op1, target, unsignedp);
8348 case TRUNC_DIV_EXPR:
8349 case FLOOR_DIV_EXPR:
8350 case CEIL_DIV_EXPR:
8351 case ROUND_DIV_EXPR:
8352 case EXACT_DIV_EXPR:
8353 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8354 subtarget = 0;
8355 if (modifier == EXPAND_STACK_PARM)
8356 target = 0;
8357 /* Possible optimization: compute the dividend with EXPAND_SUM
8358 then if the divisor is constant can optimize the case
8359 where some terms of the dividend have coeffs divisible by it. */
8360 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8361 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8362 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8364 case RDIV_EXPR:
8365 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8366 expensive divide. If not, combine will rebuild the original
8367 computation. */
8368 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8369 && TREE_CODE (type) == REAL_TYPE
8370 && !real_onep (TREE_OPERAND (exp, 0)))
8371 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8372 build (RDIV_EXPR, type,
8373 build_real (type, dconst1),
8374 TREE_OPERAND (exp, 1))),
8375 target, tmode, modifier);
8376 this_optab = sdiv_optab;
8377 goto binop;
8379 case TRUNC_MOD_EXPR:
8380 case FLOOR_MOD_EXPR:
8381 case CEIL_MOD_EXPR:
8382 case ROUND_MOD_EXPR:
8383 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8384 subtarget = 0;
8385 if (modifier == EXPAND_STACK_PARM)
8386 target = 0;
8387 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8388 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8389 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8391 case FIX_ROUND_EXPR:
8392 case FIX_FLOOR_EXPR:
8393 case FIX_CEIL_EXPR:
8394 abort (); /* Not used for C. */
8396 case FIX_TRUNC_EXPR:
8397 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8398 if (target == 0 || modifier == EXPAND_STACK_PARM)
8399 target = gen_reg_rtx (mode);
8400 expand_fix (target, op0, unsignedp);
8401 return target;
8403 case FLOAT_EXPR:
8404 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8405 if (target == 0 || modifier == EXPAND_STACK_PARM)
8406 target = gen_reg_rtx (mode);
8407 /* expand_float can't figure out what to do if FROM has VOIDmode.
8408 So give it the correct mode. With -O, cse will optimize this. */
8409 if (GET_MODE (op0) == VOIDmode)
8410 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8411 op0);
8412 expand_float (target, op0,
8413 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8414 return target;
8416 case NEGATE_EXPR:
8417 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8418 if (modifier == EXPAND_STACK_PARM)
8419 target = 0;
8420 temp = expand_unop (mode,
8421 ! unsignedp && flag_trapv
8422 && (GET_MODE_CLASS(mode) == MODE_INT)
8423 ? negv_optab : neg_optab, op0, target, 0);
8424 if (temp == 0)
8425 abort ();
8426 return temp;
8428 case ABS_EXPR:
8429 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8430 if (modifier == EXPAND_STACK_PARM)
8431 target = 0;
8433 /* Handle complex values specially. */
8434 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8435 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8436 return expand_complex_abs (mode, op0, target, unsignedp);
8438 /* Unsigned abs is simply the operand. Testing here means we don't
8439 risk generating incorrect code below. */
8440 if (TREE_UNSIGNED (type))
8441 return op0;
8443 return expand_abs (mode, op0, target, unsignedp,
8444 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8446 case MAX_EXPR:
8447 case MIN_EXPR:
8448 target = original_target;
8449 if (target == 0
8450 || modifier == EXPAND_STACK_PARM
8451 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8452 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8453 || GET_MODE (target) != mode
8454 || (GET_CODE (target) == REG
8455 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8456 target = gen_reg_rtx (mode);
8457 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8458 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8460 /* First try to do it with a special MIN or MAX instruction.
8461 If that does not win, use a conditional jump to select the proper
8462 value. */
8463 this_optab = (TREE_UNSIGNED (type)
8464 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8465 : (code == MIN_EXPR ? smin_optab : smax_optab));
8467 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8468 OPTAB_WIDEN);
8469 if (temp != 0)
8470 return temp;
8472 /* At this point, a MEM target is no longer useful; we will get better
8473 code without it. */
8475 if (GET_CODE (target) == MEM)
8476 target = gen_reg_rtx (mode);
8478 if (target != op0)
8479 emit_move_insn (target, op0);
8481 op0 = gen_label_rtx ();
8483 /* If this mode is an integer too wide to compare properly,
8484 compare word by word. Rely on cse to optimize constant cases. */
8485 if (GET_MODE_CLASS (mode) == MODE_INT
8486 && ! can_compare_p (GE, mode, ccp_jump))
8488 if (code == MAX_EXPR)
8489 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8490 target, op1, NULL_RTX, op0);
8491 else
8492 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8493 op1, target, NULL_RTX, op0);
8495 else
8497 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8498 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8499 unsignedp, mode, NULL_RTX, NULL_RTX,
8500 op0);
8502 emit_move_insn (target, op1);
8503 emit_label (op0);
8504 return target;
8506 case BIT_NOT_EXPR:
8507 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8508 if (modifier == EXPAND_STACK_PARM)
8509 target = 0;
8510 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8511 if (temp == 0)
8512 abort ();
8513 return temp;
8515 case FFS_EXPR:
8516 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8517 if (modifier == EXPAND_STACK_PARM)
8518 target = 0;
8519 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8520 if (temp == 0)
8521 abort ();
8522 return temp;
8524 case CLZ_EXPR:
8525 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8526 temp = expand_unop (mode, clz_optab, op0, target, 1);
8527 if (temp == 0)
8528 abort ();
8529 return temp;
8531 case CTZ_EXPR:
8532 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8533 temp = expand_unop (mode, ctz_optab, op0, target, 1);
8534 if (temp == 0)
8535 abort ();
8536 return temp;
8538 case POPCOUNT_EXPR:
8539 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8540 temp = expand_unop (mode, popcount_optab, op0, target, 1);
8541 if (temp == 0)
8542 abort ();
8543 return temp;
8545 case PARITY_EXPR:
8546 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8547 temp = expand_unop (mode, parity_optab, op0, target, 1);
8548 if (temp == 0)
8549 abort ();
8550 return temp;
8552 /* ??? Can optimize bitwise operations with one arg constant.
8553 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8554 and (a bitwise1 b) bitwise2 b (etc)
8555 but that is probably not worth while. */
8557 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8558 boolean values when we want in all cases to compute both of them. In
8559 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8560 as actual zero-or-1 values and then bitwise anding. In cases where
8561 there cannot be any side effects, better code would be made by
8562 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8563 how to recognize those cases. */
8565 case TRUTH_AND_EXPR:
8566 case BIT_AND_EXPR:
8567 this_optab = and_optab;
8568 goto binop;
8570 case TRUTH_OR_EXPR:
8571 case BIT_IOR_EXPR:
8572 this_optab = ior_optab;
8573 goto binop;
8575 case TRUTH_XOR_EXPR:
8576 case BIT_XOR_EXPR:
8577 this_optab = xor_optab;
8578 goto binop;
8580 case LSHIFT_EXPR:
8581 case RSHIFT_EXPR:
8582 case LROTATE_EXPR:
8583 case RROTATE_EXPR:
8584 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8585 subtarget = 0;
8586 if (modifier == EXPAND_STACK_PARM)
8587 target = 0;
8588 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8589 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8590 unsignedp);
8592 /* Could determine the answer when only additive constants differ. Also,
8593 the addition of one can be handled by changing the condition. */
8594 case LT_EXPR:
8595 case LE_EXPR:
8596 case GT_EXPR:
8597 case GE_EXPR:
8598 case EQ_EXPR:
8599 case NE_EXPR:
8600 case UNORDERED_EXPR:
8601 case ORDERED_EXPR:
8602 case UNLT_EXPR:
8603 case UNLE_EXPR:
8604 case UNGT_EXPR:
8605 case UNGE_EXPR:
8606 case UNEQ_EXPR:
8607 temp = do_store_flag (exp,
8608 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8609 tmode != VOIDmode ? tmode : mode, 0);
8610 if (temp != 0)
8611 return temp;
8613 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8614 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8615 && original_target
8616 && GET_CODE (original_target) == REG
8617 && (GET_MODE (original_target)
8618 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8620 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8621 VOIDmode, 0);
8623 /* If temp is constant, we can just compute the result. */
8624 if (GET_CODE (temp) == CONST_INT)
8626 if (INTVAL (temp) != 0)
8627 emit_move_insn (target, const1_rtx);
8628 else
8629 emit_move_insn (target, const0_rtx);
8631 return target;
8634 if (temp != original_target)
8636 enum machine_mode mode1 = GET_MODE (temp);
8637 if (mode1 == VOIDmode)
8638 mode1 = tmode != VOIDmode ? tmode : mode;
8640 temp = copy_to_mode_reg (mode1, temp);
8643 op1 = gen_label_rtx ();
8644 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8645 GET_MODE (temp), unsignedp, op1);
8646 emit_move_insn (temp, const1_rtx);
8647 emit_label (op1);
8648 return temp;
8651 /* If no set-flag instruction, must generate a conditional
8652 store into a temporary variable. Drop through
8653 and handle this like && and ||. */
8655 case TRUTH_ANDIF_EXPR:
8656 case TRUTH_ORIF_EXPR:
8657 if (! ignore
8658 && (target == 0
8659 || modifier == EXPAND_STACK_PARM
8660 || ! safe_from_p (target, exp, 1)
8661 /* Make sure we don't have a hard reg (such as function's return
8662 value) live across basic blocks, if not optimizing. */
8663 || (!optimize && GET_CODE (target) == REG
8664 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8665 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8667 if (target)
8668 emit_clr_insn (target);
8670 op1 = gen_label_rtx ();
8671 jumpifnot (exp, op1);
8673 if (target)
8674 emit_0_to_1_insn (target);
8676 emit_label (op1);
8677 return ignore ? const0_rtx : target;
8679 case TRUTH_NOT_EXPR:
8680 if (modifier == EXPAND_STACK_PARM)
8681 target = 0;
8682 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8683 /* The parser is careful to generate TRUTH_NOT_EXPR
8684 only with operands that are always zero or one. */
8685 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8686 target, 1, OPTAB_LIB_WIDEN);
8687 if (temp == 0)
8688 abort ();
8689 return temp;
8691 case COMPOUND_EXPR:
8692 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8693 emit_queue ();
8694 return expand_expr (TREE_OPERAND (exp, 1),
8695 (ignore ? const0_rtx : target),
8696 VOIDmode, modifier);
8698 case COND_EXPR:
8699 /* If we would have a "singleton" (see below) were it not for a
8700 conversion in each arm, bring that conversion back out. */
8701 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8702 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8703 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8704 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8706 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8707 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8709 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8710 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8711 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8712 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8713 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8714 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8715 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8716 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8717 return expand_expr (build1 (NOP_EXPR, type,
8718 build (COND_EXPR, TREE_TYPE (iftrue),
8719 TREE_OPERAND (exp, 0),
8720 iftrue, iffalse)),
8721 target, tmode, modifier);
8725 /* Note that COND_EXPRs whose type is a structure or union
8726 are required to be constructed to contain assignments of
8727 a temporary variable, so that we can evaluate them here
8728 for side effect only. If type is void, we must do likewise. */
8730 /* If an arm of the branch requires a cleanup,
8731 only that cleanup is performed. */
8733 tree singleton = 0;
8734 tree binary_op = 0, unary_op = 0;
8736 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8737 convert it to our mode, if necessary. */
8738 if (integer_onep (TREE_OPERAND (exp, 1))
8739 && integer_zerop (TREE_OPERAND (exp, 2))
8740 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8742 if (ignore)
8744 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8745 modifier);
8746 return const0_rtx;
8749 if (modifier == EXPAND_STACK_PARM)
8750 target = 0;
8751 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8752 if (GET_MODE (op0) == mode)
8753 return op0;
8755 if (target == 0)
8756 target = gen_reg_rtx (mode);
8757 convert_move (target, op0, unsignedp);
8758 return target;
8761 /* Check for X ? A + B : A. If we have this, we can copy A to the
8762 output and conditionally add B. Similarly for unary operations.
8763 Don't do this if X has side-effects because those side effects
8764 might affect A or B and the "?" operation is a sequence point in
8765 ANSI. (operand_equal_p tests for side effects.) */
8767 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8768 && operand_equal_p (TREE_OPERAND (exp, 2),
8769 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8770 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8771 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8772 && operand_equal_p (TREE_OPERAND (exp, 1),
8773 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8774 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8775 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8776 && operand_equal_p (TREE_OPERAND (exp, 2),
8777 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8778 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8779 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8780 && operand_equal_p (TREE_OPERAND (exp, 1),
8781 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8782 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8784 /* If we are not to produce a result, we have no target. Otherwise,
8785 if a target was specified use it; it will not be used as an
8786 intermediate target unless it is safe. If no target, use a
8787 temporary. */
8789 if (ignore)
8790 temp = 0;
8791 else if (modifier == EXPAND_STACK_PARM)
8792 temp = assign_temp (type, 0, 0, 1);
8793 else if (original_target
8794 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8795 || (singleton && GET_CODE (original_target) == REG
8796 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8797 && original_target == var_rtx (singleton)))
8798 && GET_MODE (original_target) == mode
8799 #ifdef HAVE_conditional_move
8800 && (! can_conditionally_move_p (mode)
8801 || GET_CODE (original_target) == REG
8802 || TREE_ADDRESSABLE (type))
8803 #endif
8804 && (GET_CODE (original_target) != MEM
8805 || TREE_ADDRESSABLE (type)))
8806 temp = original_target;
8807 else if (TREE_ADDRESSABLE (type))
8808 abort ();
8809 else
8810 temp = assign_temp (type, 0, 0, 1);
8812 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8813 do the test of X as a store-flag operation, do this as
8814 A + ((X != 0) << log C). Similarly for other simple binary
8815 operators. Only do for C == 1 if BRANCH_COST is low. */
8816 if (temp && singleton && binary_op
8817 && (TREE_CODE (binary_op) == PLUS_EXPR
8818 || TREE_CODE (binary_op) == MINUS_EXPR
8819 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8820 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8821 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8822 : integer_onep (TREE_OPERAND (binary_op, 1)))
8823 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8825 rtx result;
8826 tree cond;
8827 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8828 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8829 ? addv_optab : add_optab)
8830 : TREE_CODE (binary_op) == MINUS_EXPR
8831 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8832 ? subv_optab : sub_optab)
8833 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8834 : xor_optab);
8836 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8837 if (singleton == TREE_OPERAND (exp, 1))
8838 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8839 else
8840 cond = TREE_OPERAND (exp, 0);
8842 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8843 ? temp : NULL_RTX),
8844 mode, BRANCH_COST <= 1);
8846 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8847 result = expand_shift (LSHIFT_EXPR, mode, result,
8848 build_int_2 (tree_log2
8849 (TREE_OPERAND
8850 (binary_op, 1)),
8852 (safe_from_p (temp, singleton, 1)
8853 ? temp : NULL_RTX), 0);
8855 if (result)
8857 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8858 return expand_binop (mode, boptab, op1, result, temp,
8859 unsignedp, OPTAB_LIB_WIDEN);
8863 do_pending_stack_adjust ();
8864 NO_DEFER_POP;
8865 op0 = gen_label_rtx ();
8867 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8869 if (temp != 0)
8871 /* If the target conflicts with the other operand of the
8872 binary op, we can't use it. Also, we can't use the target
8873 if it is a hard register, because evaluating the condition
8874 might clobber it. */
8875 if ((binary_op
8876 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8877 || (GET_CODE (temp) == REG
8878 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8879 temp = gen_reg_rtx (mode);
8880 store_expr (singleton, temp,
8881 modifier == EXPAND_STACK_PARM ? 2 : 0);
8883 else
8884 expand_expr (singleton,
8885 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8886 if (singleton == TREE_OPERAND (exp, 1))
8887 jumpif (TREE_OPERAND (exp, 0), op0);
8888 else
8889 jumpifnot (TREE_OPERAND (exp, 0), op0);
8891 start_cleanup_deferral ();
8892 if (binary_op && temp == 0)
8893 /* Just touch the other operand. */
8894 expand_expr (TREE_OPERAND (binary_op, 1),
8895 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8896 else if (binary_op)
8897 store_expr (build (TREE_CODE (binary_op), type,
8898 make_tree (type, temp),
8899 TREE_OPERAND (binary_op, 1)),
8900 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8901 else
8902 store_expr (build1 (TREE_CODE (unary_op), type,
8903 make_tree (type, temp)),
8904 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8905 op1 = op0;
8907 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8908 comparison operator. If we have one of these cases, set the
8909 output to A, branch on A (cse will merge these two references),
8910 then set the output to FOO. */
8911 else if (temp
8912 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8913 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8914 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8915 TREE_OPERAND (exp, 1), 0)
8916 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8917 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8918 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8920 if (GET_CODE (temp) == REG
8921 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8922 temp = gen_reg_rtx (mode);
8923 store_expr (TREE_OPERAND (exp, 1), temp,
8924 modifier == EXPAND_STACK_PARM ? 2 : 0);
8925 jumpif (TREE_OPERAND (exp, 0), op0);
8927 start_cleanup_deferral ();
8928 store_expr (TREE_OPERAND (exp, 2), temp,
8929 modifier == EXPAND_STACK_PARM ? 2 : 0);
8930 op1 = op0;
8932 else if (temp
8933 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8934 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8935 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8936 TREE_OPERAND (exp, 2), 0)
8937 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8938 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8939 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8941 if (GET_CODE (temp) == REG
8942 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8943 temp = gen_reg_rtx (mode);
8944 store_expr (TREE_OPERAND (exp, 2), temp,
8945 modifier == EXPAND_STACK_PARM ? 2 : 0);
8946 jumpifnot (TREE_OPERAND (exp, 0), op0);
8948 start_cleanup_deferral ();
8949 store_expr (TREE_OPERAND (exp, 1), temp,
8950 modifier == EXPAND_STACK_PARM ? 2 : 0);
8951 op1 = op0;
8953 else
8955 op1 = gen_label_rtx ();
8956 jumpifnot (TREE_OPERAND (exp, 0), op0);
8958 start_cleanup_deferral ();
8960 /* One branch of the cond can be void, if it never returns. For
8961 example A ? throw : E */
8962 if (temp != 0
8963 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8964 store_expr (TREE_OPERAND (exp, 1), temp,
8965 modifier == EXPAND_STACK_PARM ? 2 : 0);
8966 else
8967 expand_expr (TREE_OPERAND (exp, 1),
8968 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8969 end_cleanup_deferral ();
8970 emit_queue ();
8971 emit_jump_insn (gen_jump (op1));
8972 emit_barrier ();
8973 emit_label (op0);
8974 start_cleanup_deferral ();
8975 if (temp != 0
8976 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8977 store_expr (TREE_OPERAND (exp, 2), temp,
8978 modifier == EXPAND_STACK_PARM ? 2 : 0);
8979 else
8980 expand_expr (TREE_OPERAND (exp, 2),
8981 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8984 end_cleanup_deferral ();
8986 emit_queue ();
8987 emit_label (op1);
8988 OK_DEFER_POP;
8990 return temp;
8993 case TARGET_EXPR:
8995 /* Something needs to be initialized, but we didn't know
8996 where that thing was when building the tree. For example,
8997 it could be the return value of a function, or a parameter
8998 to a function which lays down in the stack, or a temporary
8999 variable which must be passed by reference.
9001 We guarantee that the expression will either be constructed
9002 or copied into our original target. */
9004 tree slot = TREE_OPERAND (exp, 0);
9005 tree cleanups = NULL_TREE;
9006 tree exp1;
9008 if (TREE_CODE (slot) != VAR_DECL)
9009 abort ();
9011 if (! ignore)
9012 target = original_target;
9014 /* Set this here so that if we get a target that refers to a
9015 register variable that's already been used, put_reg_into_stack
9016 knows that it should fix up those uses. */
9017 TREE_USED (slot) = 1;
9019 if (target == 0)
9021 if (DECL_RTL_SET_P (slot))
9023 target = DECL_RTL (slot);
9024 /* If we have already expanded the slot, so don't do
9025 it again. (mrs) */
9026 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9027 return target;
9029 else
9031 target = assign_temp (type, 2, 0, 1);
9032 /* All temp slots at this level must not conflict. */
9033 preserve_temp_slots (target);
9034 SET_DECL_RTL (slot, target);
9035 if (TREE_ADDRESSABLE (slot))
9036 put_var_into_stack (slot, /*rescan=*/false);
9038 /* Since SLOT is not known to the called function
9039 to belong to its stack frame, we must build an explicit
9040 cleanup. This case occurs when we must build up a reference
9041 to pass the reference as an argument. In this case,
9042 it is very likely that such a reference need not be
9043 built here. */
9045 if (TREE_OPERAND (exp, 2) == 0)
9046 TREE_OPERAND (exp, 2)
9047 = (*lang_hooks.maybe_build_cleanup) (slot);
9048 cleanups = TREE_OPERAND (exp, 2);
9051 else
9053 /* This case does occur, when expanding a parameter which
9054 needs to be constructed on the stack. The target
9055 is the actual stack address that we want to initialize.
9056 The function we call will perform the cleanup in this case. */
9058 /* If we have already assigned it space, use that space,
9059 not target that we were passed in, as our target
9060 parameter is only a hint. */
9061 if (DECL_RTL_SET_P (slot))
9063 target = DECL_RTL (slot);
9064 /* If we have already expanded the slot, so don't do
9065 it again. (mrs) */
9066 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9067 return target;
9069 else
9071 SET_DECL_RTL (slot, target);
9072 /* If we must have an addressable slot, then make sure that
9073 the RTL that we just stored in slot is OK. */
9074 if (TREE_ADDRESSABLE (slot))
9075 put_var_into_stack (slot, /*rescan=*/true);
9079 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
9080 /* Mark it as expanded. */
9081 TREE_OPERAND (exp, 1) = NULL_TREE;
9083 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
9085 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
9087 return target;
9090 case INIT_EXPR:
9092 tree lhs = TREE_OPERAND (exp, 0);
9093 tree rhs = TREE_OPERAND (exp, 1);
9095 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9096 return temp;
9099 case MODIFY_EXPR:
9101 /* If lhs is complex, expand calls in rhs before computing it.
9102 That's so we don't compute a pointer and save it over a
9103 call. If lhs is simple, compute it first so we can give it
9104 as a target if the rhs is just a call. This avoids an
9105 extra temp and copy and that prevents a partial-subsumption
9106 which makes bad code. Actually we could treat
9107 component_ref's of vars like vars. */
9109 tree lhs = TREE_OPERAND (exp, 0);
9110 tree rhs = TREE_OPERAND (exp, 1);
9112 temp = 0;
9114 /* Check for |= or &= of a bitfield of size one into another bitfield
9115 of size 1. In this case, (unless we need the result of the
9116 assignment) we can do this more efficiently with a
9117 test followed by an assignment, if necessary.
9119 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9120 things change so we do, this code should be enhanced to
9121 support it. */
9122 if (ignore
9123 && TREE_CODE (lhs) == COMPONENT_REF
9124 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9125 || TREE_CODE (rhs) == BIT_AND_EXPR)
9126 && TREE_OPERAND (rhs, 0) == lhs
9127 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9128 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9129 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9131 rtx label = gen_label_rtx ();
9133 do_jump (TREE_OPERAND (rhs, 1),
9134 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
9135 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
9136 expand_assignment (lhs, convert (TREE_TYPE (rhs),
9137 (TREE_CODE (rhs) == BIT_IOR_EXPR
9138 ? integer_one_node
9139 : integer_zero_node)),
9140 0, 0);
9141 do_pending_stack_adjust ();
9142 emit_label (label);
9143 return const0_rtx;
9146 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9148 return temp;
9151 case RETURN_EXPR:
9152 if (!TREE_OPERAND (exp, 0))
9153 expand_null_return ();
9154 else
9155 expand_return (TREE_OPERAND (exp, 0));
9156 return const0_rtx;
9158 case PREINCREMENT_EXPR:
9159 case PREDECREMENT_EXPR:
9160 return expand_increment (exp, 0, ignore);
9162 case POSTINCREMENT_EXPR:
9163 case POSTDECREMENT_EXPR:
9164 /* Faster to treat as pre-increment if result is not used. */
9165 return expand_increment (exp, ! ignore, ignore);
9167 case ADDR_EXPR:
9168 if (modifier == EXPAND_STACK_PARM)
9169 target = 0;
9170 /* Are we taking the address of a nested function? */
9171 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
9172 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
9173 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
9174 && ! TREE_STATIC (exp))
9176 op0 = trampoline_address (TREE_OPERAND (exp, 0));
9177 op0 = force_operand (op0, target);
9179 /* If we are taking the address of something erroneous, just
9180 return a zero. */
9181 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9182 return const0_rtx;
9183 /* If we are taking the address of a constant and are at the
9184 top level, we have to use output_constant_def since we can't
9185 call force_const_mem at top level. */
9186 else if (cfun == 0
9187 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9188 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
9189 == 'c')))
9190 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
9191 else
9193 /* We make sure to pass const0_rtx down if we came in with
9194 ignore set, to avoid doing the cleanups twice for something. */
9195 op0 = expand_expr (TREE_OPERAND (exp, 0),
9196 ignore ? const0_rtx : NULL_RTX, VOIDmode,
9197 (modifier == EXPAND_INITIALIZER
9198 ? modifier : EXPAND_CONST_ADDRESS));
9200 /* If we are going to ignore the result, OP0 will have been set
9201 to const0_rtx, so just return it. Don't get confused and
9202 think we are taking the address of the constant. */
9203 if (ignore)
9204 return op0;
9206 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9207 clever and returns a REG when given a MEM. */
9208 op0 = protect_from_queue (op0, 1);
9210 /* We would like the object in memory. If it is a constant, we can
9211 have it be statically allocated into memory. For a non-constant,
9212 we need to allocate some memory and store the value into it. */
9214 if (CONSTANT_P (op0))
9215 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
9216 op0);
9217 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9218 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
9219 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
9221 /* If the operand is a SAVE_EXPR, we can deal with this by
9222 forcing the SAVE_EXPR into memory. */
9223 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9225 put_var_into_stack (TREE_OPERAND (exp, 0),
9226 /*rescan=*/true);
9227 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9229 else
9231 /* If this object is in a register, it can't be BLKmode. */
9232 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9233 rtx memloc = assign_temp (inner_type, 1, 1, 1);
9235 if (GET_CODE (op0) == PARALLEL)
9236 /* Handle calls that pass values in multiple
9237 non-contiguous locations. The Irix 6 ABI has examples
9238 of this. */
9239 emit_group_store (memloc, op0,
9240 int_size_in_bytes (inner_type));
9241 else
9242 emit_move_insn (memloc, op0);
9244 op0 = memloc;
9248 if (GET_CODE (op0) != MEM)
9249 abort ();
9251 mark_temp_addr_taken (op0);
9252 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9254 op0 = XEXP (op0, 0);
9255 #ifdef POINTERS_EXTEND_UNSIGNED
9256 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9257 && mode == ptr_mode)
9258 op0 = convert_memory_address (ptr_mode, op0);
9259 #endif
9260 return op0;
9263 /* If OP0 is not aligned as least as much as the type requires, we
9264 need to make a temporary, copy OP0 to it, and take the address of
9265 the temporary. We want to use the alignment of the type, not of
9266 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9267 the test for BLKmode means that can't happen. The test for
9268 BLKmode is because we never make mis-aligned MEMs with
9269 non-BLKmode.
9271 We don't need to do this at all if the machine doesn't have
9272 strict alignment. */
9273 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9274 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9275 > MEM_ALIGN (op0))
9276 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9278 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9279 rtx new;
9281 if (TYPE_ALIGN_OK (inner_type))
9282 abort ();
9284 if (TREE_ADDRESSABLE (inner_type))
9286 /* We can't make a bitwise copy of this object, so fail. */
9287 error ("cannot take the address of an unaligned member");
9288 return const0_rtx;
9291 new = assign_stack_temp_for_type
9292 (TYPE_MODE (inner_type),
9293 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9294 : int_size_in_bytes (inner_type),
9295 1, build_qualified_type (inner_type,
9296 (TYPE_QUALS (inner_type)
9297 | TYPE_QUAL_CONST)));
9299 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9300 (modifier == EXPAND_STACK_PARM
9301 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9303 op0 = new;
9306 op0 = force_operand (XEXP (op0, 0), target);
9309 if (flag_force_addr
9310 && GET_CODE (op0) != REG
9311 && modifier != EXPAND_CONST_ADDRESS
9312 && modifier != EXPAND_INITIALIZER
9313 && modifier != EXPAND_SUM)
9314 op0 = force_reg (Pmode, op0);
9316 if (GET_CODE (op0) == REG
9317 && ! REG_USERVAR_P (op0))
9318 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9320 #ifdef POINTERS_EXTEND_UNSIGNED
9321 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9322 && mode == ptr_mode)
9323 op0 = convert_memory_address (ptr_mode, op0);
9324 #endif
9326 return op0;
9328 case ENTRY_VALUE_EXPR:
9329 abort ();
9331 /* COMPLEX type for Extended Pascal & Fortran */
9332 case COMPLEX_EXPR:
9334 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9335 rtx insns;
9337 /* Get the rtx code of the operands. */
9338 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9339 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9341 if (! target)
9342 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9344 start_sequence ();
9346 /* Move the real (op0) and imaginary (op1) parts to their location. */
9347 emit_move_insn (gen_realpart (mode, target), op0);
9348 emit_move_insn (gen_imagpart (mode, target), op1);
9350 insns = get_insns ();
9351 end_sequence ();
9353 /* Complex construction should appear as a single unit. */
9354 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9355 each with a separate pseudo as destination.
9356 It's not correct for flow to treat them as a unit. */
9357 if (GET_CODE (target) != CONCAT)
9358 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9359 else
9360 emit_insn (insns);
9362 return target;
9365 case REALPART_EXPR:
9366 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9367 return gen_realpart (mode, op0);
9369 case IMAGPART_EXPR:
9370 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9371 return gen_imagpart (mode, op0);
9373 case CONJ_EXPR:
9375 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9376 rtx imag_t;
9377 rtx insns;
9379 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9381 if (! target)
9382 target = gen_reg_rtx (mode);
9384 start_sequence ();
9386 /* Store the realpart and the negated imagpart to target. */
9387 emit_move_insn (gen_realpart (partmode, target),
9388 gen_realpart (partmode, op0));
9390 imag_t = gen_imagpart (partmode, target);
9391 temp = expand_unop (partmode,
9392 ! unsignedp && flag_trapv
9393 && (GET_MODE_CLASS(partmode) == MODE_INT)
9394 ? negv_optab : neg_optab,
9395 gen_imagpart (partmode, op0), imag_t, 0);
9396 if (temp != imag_t)
9397 emit_move_insn (imag_t, temp);
9399 insns = get_insns ();
9400 end_sequence ();
9402 /* Conjugate should appear as a single unit
9403 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9404 each with a separate pseudo as destination.
9405 It's not correct for flow to treat them as a unit. */
9406 if (GET_CODE (target) != CONCAT)
9407 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9408 else
9409 emit_insn (insns);
9411 return target;
9414 case TRY_CATCH_EXPR:
9416 tree handler = TREE_OPERAND (exp, 1);
9418 expand_eh_region_start ();
9420 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9422 expand_eh_region_end_cleanup (handler);
9424 return op0;
9427 case TRY_FINALLY_EXPR:
9429 tree try_block = TREE_OPERAND (exp, 0);
9430 tree finally_block = TREE_OPERAND (exp, 1);
9432 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9434 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9435 is not sufficient, so we cannot expand the block twice.
9436 So we play games with GOTO_SUBROUTINE_EXPR to let us
9437 expand the thing only once. */
9438 /* When not optimizing, we go ahead with this form since
9439 (1) user breakpoints operate more predictably without
9440 code duplication, and
9441 (2) we're not running any of the global optimizers
9442 that would explode in time/space with the highly
9443 connected CFG created by the indirect branching. */
9445 rtx finally_label = gen_label_rtx ();
9446 rtx done_label = gen_label_rtx ();
9447 rtx return_link = gen_reg_rtx (Pmode);
9448 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9449 (tree) finally_label, (tree) return_link);
9450 TREE_SIDE_EFFECTS (cleanup) = 1;
9452 /* Start a new binding layer that will keep track of all cleanup
9453 actions to be performed. */
9454 expand_start_bindings (2);
9455 target_temp_slot_level = temp_slot_level;
9457 expand_decl_cleanup (NULL_TREE, cleanup);
9458 op0 = expand_expr (try_block, target, tmode, modifier);
9460 preserve_temp_slots (op0);
9461 expand_end_bindings (NULL_TREE, 0, 0);
9462 emit_jump (done_label);
9463 emit_label (finally_label);
9464 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9465 emit_indirect_jump (return_link);
9466 emit_label (done_label);
9468 else
9470 expand_start_bindings (2);
9471 target_temp_slot_level = temp_slot_level;
9473 expand_decl_cleanup (NULL_TREE, finally_block);
9474 op0 = expand_expr (try_block, target, tmode, modifier);
9476 preserve_temp_slots (op0);
9477 expand_end_bindings (NULL_TREE, 0, 0);
9480 return op0;
9483 case GOTO_SUBROUTINE_EXPR:
9485 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9486 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9487 rtx return_address = gen_label_rtx ();
9488 emit_move_insn (return_link,
9489 gen_rtx_LABEL_REF (Pmode, return_address));
9490 emit_jump (subr);
9491 emit_label (return_address);
9492 return const0_rtx;
9495 case VA_ARG_EXPR:
9496 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9498 case EXC_PTR_EXPR:
9499 return get_exception_pointer (cfun);
9501 case FDESC_EXPR:
9502 /* Function descriptors are not valid except for as
9503 initialization constants, and should not be expanded. */
9504 abort ();
9506 default:
9507 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9510 /* Here to do an ordinary binary operator, generating an instruction
9511 from the optab already placed in `this_optab'. */
9512 binop:
9513 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9514 subtarget = 0;
9515 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9516 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9517 binop2:
9518 if (modifier == EXPAND_STACK_PARM)
9519 target = 0;
9520 temp = expand_binop (mode, this_optab, op0, op1, target,
9521 unsignedp, OPTAB_LIB_WIDEN);
9522 if (temp == 0)
9523 abort ();
9524 return temp;
9527 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9528 when applied to the address of EXP produces an address known to be
9529 aligned more than BIGGEST_ALIGNMENT. */
9531 static int
9532 is_aligning_offset (offset, exp)
9533 tree offset;
9534 tree exp;
9536 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9537 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9538 || TREE_CODE (offset) == NOP_EXPR
9539 || TREE_CODE (offset) == CONVERT_EXPR
9540 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9541 offset = TREE_OPERAND (offset, 0);
9543 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9544 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9545 if (TREE_CODE (offset) != BIT_AND_EXPR
9546 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9547 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9548 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9549 return 0;
9551 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9552 It must be NEGATE_EXPR. Then strip any more conversions. */
9553 offset = TREE_OPERAND (offset, 0);
9554 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9555 || TREE_CODE (offset) == NOP_EXPR
9556 || TREE_CODE (offset) == CONVERT_EXPR)
9557 offset = TREE_OPERAND (offset, 0);
9559 if (TREE_CODE (offset) != NEGATE_EXPR)
9560 return 0;
9562 offset = TREE_OPERAND (offset, 0);
9563 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9564 || TREE_CODE (offset) == NOP_EXPR
9565 || TREE_CODE (offset) == CONVERT_EXPR)
9566 offset = TREE_OPERAND (offset, 0);
9568 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9569 whose type is the same as EXP. */
9570 return (TREE_CODE (offset) == ADDR_EXPR
9571 && (TREE_OPERAND (offset, 0) == exp
9572 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9573 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9574 == TREE_TYPE (exp)))));
9577 /* Return the tree node if an ARG corresponds to a string constant or zero
9578 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9579 in bytes within the string that ARG is accessing. The type of the
9580 offset will be `sizetype'. */
9582 tree
9583 string_constant (arg, ptr_offset)
9584 tree arg;
9585 tree *ptr_offset;
9587 STRIP_NOPS (arg);
9589 if (TREE_CODE (arg) == ADDR_EXPR
9590 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9592 *ptr_offset = size_zero_node;
9593 return TREE_OPERAND (arg, 0);
9595 else if (TREE_CODE (arg) == PLUS_EXPR)
9597 tree arg0 = TREE_OPERAND (arg, 0);
9598 tree arg1 = TREE_OPERAND (arg, 1);
9600 STRIP_NOPS (arg0);
9601 STRIP_NOPS (arg1);
9603 if (TREE_CODE (arg0) == ADDR_EXPR
9604 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9606 *ptr_offset = convert (sizetype, arg1);
9607 return TREE_OPERAND (arg0, 0);
9609 else if (TREE_CODE (arg1) == ADDR_EXPR
9610 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9612 *ptr_offset = convert (sizetype, arg0);
9613 return TREE_OPERAND (arg1, 0);
9617 return 0;
9620 /* Expand code for a post- or pre- increment or decrement
9621 and return the RTX for the result.
9622 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9624 static rtx
9625 expand_increment (exp, post, ignore)
9626 tree exp;
9627 int post, ignore;
9629 rtx op0, op1;
9630 rtx temp, value;
9631 tree incremented = TREE_OPERAND (exp, 0);
9632 optab this_optab = add_optab;
9633 int icode;
9634 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9635 int op0_is_copy = 0;
9636 int single_insn = 0;
9637 /* 1 means we can't store into OP0 directly,
9638 because it is a subreg narrower than a word,
9639 and we don't dare clobber the rest of the word. */
9640 int bad_subreg = 0;
9642 /* Stabilize any component ref that might need to be
9643 evaluated more than once below. */
9644 if (!post
9645 || TREE_CODE (incremented) == BIT_FIELD_REF
9646 || (TREE_CODE (incremented) == COMPONENT_REF
9647 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9648 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9649 incremented = stabilize_reference (incremented);
9650 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9651 ones into save exprs so that they don't accidentally get evaluated
9652 more than once by the code below. */
9653 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9654 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9655 incremented = save_expr (incremented);
9657 /* Compute the operands as RTX.
9658 Note whether OP0 is the actual lvalue or a copy of it:
9659 I believe it is a copy iff it is a register or subreg
9660 and insns were generated in computing it. */
9662 temp = get_last_insn ();
9663 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9665 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9666 in place but instead must do sign- or zero-extension during assignment,
9667 so we copy it into a new register and let the code below use it as
9668 a copy.
9670 Note that we can safely modify this SUBREG since it is know not to be
9671 shared (it was made by the expand_expr call above). */
9673 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9675 if (post)
9676 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9677 else
9678 bad_subreg = 1;
9680 else if (GET_CODE (op0) == SUBREG
9681 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9683 /* We cannot increment this SUBREG in place. If we are
9684 post-incrementing, get a copy of the old value. Otherwise,
9685 just mark that we cannot increment in place. */
9686 if (post)
9687 op0 = copy_to_reg (op0);
9688 else
9689 bad_subreg = 1;
9692 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9693 && temp != get_last_insn ());
9694 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9696 /* Decide whether incrementing or decrementing. */
9697 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9698 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9699 this_optab = sub_optab;
9701 /* Convert decrement by a constant into a negative increment. */
9702 if (this_optab == sub_optab
9703 && GET_CODE (op1) == CONST_INT)
9705 op1 = GEN_INT (-INTVAL (op1));
9706 this_optab = add_optab;
9709 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9710 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9712 /* For a preincrement, see if we can do this with a single instruction. */
9713 if (!post)
9715 icode = (int) this_optab->handlers[(int) mode].insn_code;
9716 if (icode != (int) CODE_FOR_nothing
9717 /* Make sure that OP0 is valid for operands 0 and 1
9718 of the insn we want to queue. */
9719 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9720 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9721 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9722 single_insn = 1;
9725 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9726 then we cannot just increment OP0. We must therefore contrive to
9727 increment the original value. Then, for postincrement, we can return
9728 OP0 since it is a copy of the old value. For preincrement, expand here
9729 unless we can do it with a single insn.
9731 Likewise if storing directly into OP0 would clobber high bits
9732 we need to preserve (bad_subreg). */
9733 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9735 /* This is the easiest way to increment the value wherever it is.
9736 Problems with multiple evaluation of INCREMENTED are prevented
9737 because either (1) it is a component_ref or preincrement,
9738 in which case it was stabilized above, or (2) it is an array_ref
9739 with constant index in an array in a register, which is
9740 safe to reevaluate. */
9741 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9742 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9743 ? MINUS_EXPR : PLUS_EXPR),
9744 TREE_TYPE (exp),
9745 incremented,
9746 TREE_OPERAND (exp, 1));
9748 while (TREE_CODE (incremented) == NOP_EXPR
9749 || TREE_CODE (incremented) == CONVERT_EXPR)
9751 newexp = convert (TREE_TYPE (incremented), newexp);
9752 incremented = TREE_OPERAND (incremented, 0);
9755 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9756 return post ? op0 : temp;
9759 if (post)
9761 /* We have a true reference to the value in OP0.
9762 If there is an insn to add or subtract in this mode, queue it.
9763 Queueing the increment insn avoids the register shuffling
9764 that often results if we must increment now and first save
9765 the old value for subsequent use. */
9767 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9768 op0 = stabilize (op0);
9769 #endif
9771 icode = (int) this_optab->handlers[(int) mode].insn_code;
9772 if (icode != (int) CODE_FOR_nothing
9773 /* Make sure that OP0 is valid for operands 0 and 1
9774 of the insn we want to queue. */
9775 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9776 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9778 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9779 op1 = force_reg (mode, op1);
9781 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9783 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9785 rtx addr = (general_operand (XEXP (op0, 0), mode)
9786 ? force_reg (Pmode, XEXP (op0, 0))
9787 : copy_to_reg (XEXP (op0, 0)));
9788 rtx temp, result;
9790 op0 = replace_equiv_address (op0, addr);
9791 temp = force_reg (GET_MODE (op0), op0);
9792 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9793 op1 = force_reg (mode, op1);
9795 /* The increment queue is LIFO, thus we have to `queue'
9796 the instructions in reverse order. */
9797 enqueue_insn (op0, gen_move_insn (op0, temp));
9798 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9799 return result;
9803 /* Preincrement, or we can't increment with one simple insn. */
9804 if (post)
9805 /* Save a copy of the value before inc or dec, to return it later. */
9806 temp = value = copy_to_reg (op0);
9807 else
9808 /* Arrange to return the incremented value. */
9809 /* Copy the rtx because expand_binop will protect from the queue,
9810 and the results of that would be invalid for us to return
9811 if our caller does emit_queue before using our result. */
9812 temp = copy_rtx (value = op0);
9814 /* Increment however we can. */
9815 op1 = expand_binop (mode, this_optab, value, op1, op0,
9816 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9818 /* Make sure the value is stored into OP0. */
9819 if (op1 != op0)
9820 emit_move_insn (op0, op1);
9822 return temp;
9825 /* Generate code to calculate EXP using a store-flag instruction
9826 and return an rtx for the result. EXP is either a comparison
9827 or a TRUTH_NOT_EXPR whose operand is a comparison.
9829 If TARGET is nonzero, store the result there if convenient.
9831 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9832 cheap.
9834 Return zero if there is no suitable set-flag instruction
9835 available on this machine.
9837 Once expand_expr has been called on the arguments of the comparison,
9838 we are committed to doing the store flag, since it is not safe to
9839 re-evaluate the expression. We emit the store-flag insn by calling
9840 emit_store_flag, but only expand the arguments if we have a reason
9841 to believe that emit_store_flag will be successful. If we think that
9842 it will, but it isn't, we have to simulate the store-flag with a
9843 set/jump/set sequence. */
9845 static rtx
9846 do_store_flag (exp, target, mode, only_cheap)
9847 tree exp;
9848 rtx target;
9849 enum machine_mode mode;
9850 int only_cheap;
9852 enum rtx_code code;
9853 tree arg0, arg1, type;
9854 tree tem;
9855 enum machine_mode operand_mode;
9856 int invert = 0;
9857 int unsignedp;
9858 rtx op0, op1;
9859 enum insn_code icode;
9860 rtx subtarget = target;
9861 rtx result, label;
9863 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9864 result at the end. We can't simply invert the test since it would
9865 have already been inverted if it were valid. This case occurs for
9866 some floating-point comparisons. */
9868 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9869 invert = 1, exp = TREE_OPERAND (exp, 0);
9871 arg0 = TREE_OPERAND (exp, 0);
9872 arg1 = TREE_OPERAND (exp, 1);
9874 /* Don't crash if the comparison was erroneous. */
9875 if (arg0 == error_mark_node || arg1 == error_mark_node)
9876 return const0_rtx;
9878 type = TREE_TYPE (arg0);
9879 operand_mode = TYPE_MODE (type);
9880 unsignedp = TREE_UNSIGNED (type);
9882 /* We won't bother with BLKmode store-flag operations because it would mean
9883 passing a lot of information to emit_store_flag. */
9884 if (operand_mode == BLKmode)
9885 return 0;
9887 /* We won't bother with store-flag operations involving function pointers
9888 when function pointers must be canonicalized before comparisons. */
9889 #ifdef HAVE_canonicalize_funcptr_for_compare
9890 if (HAVE_canonicalize_funcptr_for_compare
9891 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9892 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9893 == FUNCTION_TYPE))
9894 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9895 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9896 == FUNCTION_TYPE))))
9897 return 0;
9898 #endif
9900 STRIP_NOPS (arg0);
9901 STRIP_NOPS (arg1);
9903 /* Get the rtx comparison code to use. We know that EXP is a comparison
9904 operation of some type. Some comparisons against 1 and -1 can be
9905 converted to comparisons with zero. Do so here so that the tests
9906 below will be aware that we have a comparison with zero. These
9907 tests will not catch constants in the first operand, but constants
9908 are rarely passed as the first operand. */
9910 switch (TREE_CODE (exp))
9912 case EQ_EXPR:
9913 code = EQ;
9914 break;
9915 case NE_EXPR:
9916 code = NE;
9917 break;
9918 case LT_EXPR:
9919 if (integer_onep (arg1))
9920 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9921 else
9922 code = unsignedp ? LTU : LT;
9923 break;
9924 case LE_EXPR:
9925 if (! unsignedp && integer_all_onesp (arg1))
9926 arg1 = integer_zero_node, code = LT;
9927 else
9928 code = unsignedp ? LEU : LE;
9929 break;
9930 case GT_EXPR:
9931 if (! unsignedp && integer_all_onesp (arg1))
9932 arg1 = integer_zero_node, code = GE;
9933 else
9934 code = unsignedp ? GTU : GT;
9935 break;
9936 case GE_EXPR:
9937 if (integer_onep (arg1))
9938 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9939 else
9940 code = unsignedp ? GEU : GE;
9941 break;
9943 case UNORDERED_EXPR:
9944 code = UNORDERED;
9945 break;
9946 case ORDERED_EXPR:
9947 code = ORDERED;
9948 break;
9949 case UNLT_EXPR:
9950 code = UNLT;
9951 break;
9952 case UNLE_EXPR:
9953 code = UNLE;
9954 break;
9955 case UNGT_EXPR:
9956 code = UNGT;
9957 break;
9958 case UNGE_EXPR:
9959 code = UNGE;
9960 break;
9961 case UNEQ_EXPR:
9962 code = UNEQ;
9963 break;
9965 default:
9966 abort ();
9969 /* Put a constant second. */
9970 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9972 tem = arg0; arg0 = arg1; arg1 = tem;
9973 code = swap_condition (code);
9976 /* If this is an equality or inequality test of a single bit, we can
9977 do this by shifting the bit being tested to the low-order bit and
9978 masking the result with the constant 1. If the condition was EQ,
9979 we xor it with 1. This does not require an scc insn and is faster
9980 than an scc insn even if we have it. */
9982 if ((code == NE || code == EQ)
9983 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9984 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9986 tree inner = TREE_OPERAND (arg0, 0);
9987 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
9988 int ops_unsignedp;
9990 /* If INNER is a right shift of a constant and it plus BITNUM does
9991 not overflow, adjust BITNUM and INNER. */
9993 if (TREE_CODE (inner) == RSHIFT_EXPR
9994 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9995 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
9996 && bitnum < TYPE_PRECISION (type)
9997 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
9998 bitnum - TYPE_PRECISION (type)))
10000 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10001 inner = TREE_OPERAND (inner, 0);
10004 /* If we are going to be able to omit the AND below, we must do our
10005 operations as unsigned. If we must use the AND, we have a choice.
10006 Normally unsigned is faster, but for some machines signed is. */
10007 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10008 #ifdef LOAD_EXTEND_OP
10009 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10010 #else
10012 #endif
10015 if (! get_subtarget (subtarget)
10016 || GET_MODE (subtarget) != operand_mode
10017 || ! safe_from_p (subtarget, inner, 1))
10018 subtarget = 0;
10020 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10022 if (bitnum != 0)
10023 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10024 size_int (bitnum), subtarget, ops_unsignedp);
10026 if (GET_MODE (op0) != mode)
10027 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10029 if ((code == EQ && ! invert) || (code == NE && invert))
10030 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10031 ops_unsignedp, OPTAB_LIB_WIDEN);
10033 /* Put the AND last so it can combine with more things. */
10034 if (bitnum != TYPE_PRECISION (type) - 1)
10035 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10037 return op0;
10040 /* Now see if we are likely to be able to do this. Return if not. */
10041 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10042 return 0;
10044 icode = setcc_gen_code[(int) code];
10045 if (icode == CODE_FOR_nothing
10046 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10048 /* We can only do this if it is one of the special cases that
10049 can be handled without an scc insn. */
10050 if ((code == LT && integer_zerop (arg1))
10051 || (! only_cheap && code == GE && integer_zerop (arg1)))
10053 else if (BRANCH_COST >= 0
10054 && ! only_cheap && (code == NE || code == EQ)
10055 && TREE_CODE (type) != REAL_TYPE
10056 && ((abs_optab->handlers[(int) operand_mode].insn_code
10057 != CODE_FOR_nothing)
10058 || (ffs_optab->handlers[(int) operand_mode].insn_code
10059 != CODE_FOR_nothing)))
10061 else
10062 return 0;
10065 if (! get_subtarget (target)
10066 || GET_MODE (subtarget) != operand_mode
10067 || ! safe_from_p (subtarget, arg1, 1))
10068 subtarget = 0;
10070 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10071 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10073 if (target == 0)
10074 target = gen_reg_rtx (mode);
10076 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10077 because, if the emit_store_flag does anything it will succeed and
10078 OP0 and OP1 will not be used subsequently. */
10080 result = emit_store_flag (target, code,
10081 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10082 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10083 operand_mode, unsignedp, 1);
10085 if (result)
10087 if (invert)
10088 result = expand_binop (mode, xor_optab, result, const1_rtx,
10089 result, 0, OPTAB_LIB_WIDEN);
10090 return result;
10093 /* If this failed, we have to do this with set/compare/jump/set code. */
10094 if (GET_CODE (target) != REG
10095 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10096 target = gen_reg_rtx (GET_MODE (target));
10098 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10099 result = compare_from_rtx (op0, op1, code, unsignedp,
10100 operand_mode, NULL_RTX);
10101 if (GET_CODE (result) == CONST_INT)
10102 return (((result == const0_rtx && ! invert)
10103 || (result != const0_rtx && invert))
10104 ? const0_rtx : const1_rtx);
10106 /* The code of RESULT may not match CODE if compare_from_rtx
10107 decided to swap its operands and reverse the original code.
10109 We know that compare_from_rtx returns either a CONST_INT or
10110 a new comparison code, so it is safe to just extract the
10111 code from RESULT. */
10112 code = GET_CODE (result);
10114 label = gen_label_rtx ();
10115 if (bcc_gen_fctn[(int) code] == 0)
10116 abort ();
10118 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10119 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10120 emit_label (label);
10122 return target;
10126 /* Stubs in case we haven't got a casesi insn. */
10127 #ifndef HAVE_casesi
10128 # define HAVE_casesi 0
10129 # define gen_casesi(a, b, c, d, e) (0)
10130 # define CODE_FOR_casesi CODE_FOR_nothing
10131 #endif
10133 /* If the machine does not have a case insn that compares the bounds,
10134 this means extra overhead for dispatch tables, which raises the
10135 threshold for using them. */
10136 #ifndef CASE_VALUES_THRESHOLD
10137 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10138 #endif /* CASE_VALUES_THRESHOLD */
10140 unsigned int
10141 case_values_threshold ()
10143 return CASE_VALUES_THRESHOLD;
10146 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10147 0 otherwise (i.e. if there is no casesi instruction). */
10149 try_casesi (index_type, index_expr, minval, range,
10150 table_label, default_label)
10151 tree index_type, index_expr, minval, range;
10152 rtx table_label ATTRIBUTE_UNUSED;
10153 rtx default_label;
10155 enum machine_mode index_mode = SImode;
10156 int index_bits = GET_MODE_BITSIZE (index_mode);
10157 rtx op1, op2, index;
10158 enum machine_mode op_mode;
10160 if (! HAVE_casesi)
10161 return 0;
10163 /* Convert the index to SImode. */
10164 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10166 enum machine_mode omode = TYPE_MODE (index_type);
10167 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10169 /* We must handle the endpoints in the original mode. */
10170 index_expr = build (MINUS_EXPR, index_type,
10171 index_expr, minval);
10172 minval = integer_zero_node;
10173 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10174 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10175 omode, 1, default_label);
10176 /* Now we can safely truncate. */
10177 index = convert_to_mode (index_mode, index, 0);
10179 else
10181 if (TYPE_MODE (index_type) != index_mode)
10183 index_expr = convert ((*lang_hooks.types.type_for_size)
10184 (index_bits, 0), index_expr);
10185 index_type = TREE_TYPE (index_expr);
10188 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10190 emit_queue ();
10191 index = protect_from_queue (index, 0);
10192 do_pending_stack_adjust ();
10194 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10195 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10196 (index, op_mode))
10197 index = copy_to_mode_reg (op_mode, index);
10199 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10201 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10202 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10203 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10204 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10205 (op1, op_mode))
10206 op1 = copy_to_mode_reg (op_mode, op1);
10208 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10210 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10211 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10212 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10213 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10214 (op2, op_mode))
10215 op2 = copy_to_mode_reg (op_mode, op2);
10217 emit_jump_insn (gen_casesi (index, op1, op2,
10218 table_label, default_label));
10219 return 1;
10222 /* Attempt to generate a tablejump instruction; same concept. */
10223 #ifndef HAVE_tablejump
10224 #define HAVE_tablejump 0
10225 #define gen_tablejump(x, y) (0)
10226 #endif
10228 /* Subroutine of the next function.
10230 INDEX is the value being switched on, with the lowest value
10231 in the table already subtracted.
10232 MODE is its expected mode (needed if INDEX is constant).
10233 RANGE is the length of the jump table.
10234 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10236 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10237 index value is out of range. */
10239 static void
10240 do_tablejump (index, mode, range, table_label, default_label)
10241 rtx index, range, table_label, default_label;
10242 enum machine_mode mode;
10244 rtx temp, vector;
10246 if (INTVAL (range) > cfun->max_jumptable_ents)
10247 cfun->max_jumptable_ents = INTVAL (range);
10249 /* Do an unsigned comparison (in the proper mode) between the index
10250 expression and the value which represents the length of the range.
10251 Since we just finished subtracting the lower bound of the range
10252 from the index expression, this comparison allows us to simultaneously
10253 check that the original index expression value is both greater than
10254 or equal to the minimum value of the range and less than or equal to
10255 the maximum value of the range. */
10257 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10258 default_label);
10260 /* If index is in range, it must fit in Pmode.
10261 Convert to Pmode so we can index with it. */
10262 if (mode != Pmode)
10263 index = convert_to_mode (Pmode, index, 1);
10265 /* Don't let a MEM slip thru, because then INDEX that comes
10266 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10267 and break_out_memory_refs will go to work on it and mess it up. */
10268 #ifdef PIC_CASE_VECTOR_ADDRESS
10269 if (flag_pic && GET_CODE (index) != REG)
10270 index = copy_to_mode_reg (Pmode, index);
10271 #endif
10273 /* If flag_force_addr were to affect this address
10274 it could interfere with the tricky assumptions made
10275 about addresses that contain label-refs,
10276 which may be valid only very near the tablejump itself. */
10277 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10278 GET_MODE_SIZE, because this indicates how large insns are. The other
10279 uses should all be Pmode, because they are addresses. This code
10280 could fail if addresses and insns are not the same size. */
10281 index = gen_rtx_PLUS (Pmode,
10282 gen_rtx_MULT (Pmode, index,
10283 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10284 gen_rtx_LABEL_REF (Pmode, table_label));
10285 #ifdef PIC_CASE_VECTOR_ADDRESS
10286 if (flag_pic)
10287 index = PIC_CASE_VECTOR_ADDRESS (index);
10288 else
10289 #endif
10290 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10291 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10292 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10293 RTX_UNCHANGING_P (vector) = 1;
10294 MEM_NOTRAP_P (vector) = 1;
10295 convert_move (temp, vector, 0);
10297 emit_jump_insn (gen_tablejump (temp, table_label));
10299 /* If we are generating PIC code or if the table is PC-relative, the
10300 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10301 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10302 emit_barrier ();
10306 try_tablejump (index_type, index_expr, minval, range,
10307 table_label, default_label)
10308 tree index_type, index_expr, minval, range;
10309 rtx table_label, default_label;
10311 rtx index;
10313 if (! HAVE_tablejump)
10314 return 0;
10316 index_expr = fold (build (MINUS_EXPR, index_type,
10317 convert (index_type, index_expr),
10318 convert (index_type, minval)));
10319 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10320 emit_queue ();
10321 index = protect_from_queue (index, 0);
10322 do_pending_stack_adjust ();
10324 do_tablejump (index, TYPE_MODE (index_type),
10325 convert_modes (TYPE_MODE (index_type),
10326 TYPE_MODE (TREE_TYPE (range)),
10327 expand_expr (range, NULL_RTX,
10328 VOIDmode, 0),
10329 TREE_UNSIGNED (TREE_TYPE (range))),
10330 table_label, default_label);
10331 return 1;
10334 /* Nonzero if the mode is a valid vector mode for this architecture.
10335 This returns nonzero even if there is no hardware support for the
10336 vector mode, but we can emulate with narrower modes. */
10339 vector_mode_valid_p (mode)
10340 enum machine_mode mode;
10342 enum mode_class class = GET_MODE_CLASS (mode);
10343 enum machine_mode innermode;
10345 /* Doh! What's going on? */
10346 if (class != MODE_VECTOR_INT
10347 && class != MODE_VECTOR_FLOAT)
10348 return 0;
10350 /* Hardware support. Woo hoo! */
10351 if (VECTOR_MODE_SUPPORTED_P (mode))
10352 return 1;
10354 innermode = GET_MODE_INNER (mode);
10356 /* We should probably return 1 if requesting V4DI and we have no DI,
10357 but we have V2DI, but this is probably very unlikely. */
10359 /* If we have support for the inner mode, we can safely emulate it.
10360 We may not have V2DI, but me can emulate with a pair of DIs. */
10361 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
10364 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10365 static rtx
10366 const_vector_from_tree (exp)
10367 tree exp;
10369 rtvec v;
10370 int units, i;
10371 tree link, elt;
10372 enum machine_mode inner, mode;
10374 mode = TYPE_MODE (TREE_TYPE (exp));
10376 if (is_zeros_p (exp))
10377 return CONST0_RTX (mode);
10379 units = GET_MODE_NUNITS (mode);
10380 inner = GET_MODE_INNER (mode);
10382 v = rtvec_alloc (units);
10384 link = TREE_VECTOR_CST_ELTS (exp);
10385 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10387 elt = TREE_VALUE (link);
10389 if (TREE_CODE (elt) == REAL_CST)
10390 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10391 inner);
10392 else
10393 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10394 TREE_INT_CST_HIGH (elt),
10395 inner);
10398 return gen_rtx_raw_CONST_VECTOR (mode, v);
10401 #include "gt-expr.h"