gcc:
[official-gcc.git] / gcc / expr.c
blobd20342413339a92556564774777c0d7642daa877
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
51 /* Decide whether a function's arguments should be processed
52 from first to last or from last to first.
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
57 #ifdef PUSH_ROUNDING
59 #ifndef PUSH_ARGS_REVERSED
60 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
61 #define PUSH_ARGS_REVERSED /* If it's last to first. */
62 #endif
63 #endif
65 #endif
67 #ifndef STACK_PUSH_CODE
68 #ifdef STACK_GROWS_DOWNWARD
69 #define STACK_PUSH_CODE PRE_DEC
70 #else
71 #define STACK_PUSH_CODE PRE_INC
72 #endif
73 #endif
75 /* Assume that case vectors are not pc-relative. */
76 #ifndef CASE_VECTOR_PC_RELATIVE
77 #define CASE_VECTOR_PC_RELATIVE 0
78 #endif
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
84 #else
85 #define TARGET_MEM_FUNCTIONS 0
86 #endif
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
95 int cse_not_expected;
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 tree placeholder_list = 0;
100 /* This structure is used by move_by_pieces to describe the move to
101 be performed. */
102 struct move_by_pieces
104 rtx to;
105 rtx to_addr;
106 int autinc_to;
107 int explicit_inc_to;
108 rtx from;
109 rtx from_addr;
110 int autinc_from;
111 int explicit_inc_from;
112 unsigned HOST_WIDE_INT len;
113 HOST_WIDE_INT offset;
114 int reverse;
117 /* This structure is used by store_by_pieces to describe the clear to
118 be performed. */
120 struct store_by_pieces
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
126 unsigned HOST_WIDE_INT len;
127 HOST_WIDE_INT offset;
128 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
129 PTR constfundata;
130 int reverse;
133 static rtx enqueue_insn PARAMS ((rtx, rtx));
134 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
135 PARAMS ((unsigned HOST_WIDE_INT,
136 unsigned int));
137 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
138 struct move_by_pieces *));
139 static bool block_move_libcall_safe_for_call_parm PARAMS ((void));
140 static bool emit_block_move_via_movstr PARAMS ((rtx, rtx, rtx, unsigned));
141 static rtx emit_block_move_via_libcall PARAMS ((rtx, rtx, rtx));
142 static tree emit_block_move_libcall_fn PARAMS ((int));
143 static void emit_block_move_via_loop PARAMS ((rtx, rtx, rtx, unsigned));
144 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
145 enum machine_mode));
146 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
147 unsigned int));
148 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
149 unsigned int));
150 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
151 enum machine_mode,
152 struct store_by_pieces *));
153 static bool clear_storage_via_clrstr PARAMS ((rtx, rtx, unsigned));
154 static rtx clear_storage_via_libcall PARAMS ((rtx, rtx));
155 static tree clear_storage_libcall_fn PARAMS ((int));
156 static rtx compress_float_constant PARAMS ((rtx, rtx));
157 static rtx get_subtarget PARAMS ((rtx));
158 static int is_zeros_p PARAMS ((tree));
159 static int mostly_zeros_p PARAMS ((tree));
160 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, tree, int, int));
163 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
164 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
165 HOST_WIDE_INT, enum machine_mode,
166 tree, enum machine_mode, int, tree,
167 int));
168 static rtx var_rtx PARAMS ((tree));
170 static unsigned HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
171 static unsigned HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree,
172 tree));
174 static int is_aligning_offset PARAMS ((tree, tree));
175 static rtx expand_increment PARAMS ((tree, int, int));
176 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
177 #ifdef PUSH_ROUNDING
178 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
179 #endif
180 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
181 static rtx const_vector_from_tree PARAMS ((tree));
183 /* Record for each mode whether we can move a register directly to or
184 from an object of that mode in memory. If we can't, we won't try
185 to use that mode directly when accessing a field of that mode. */
187 static char direct_load[NUM_MACHINE_MODES];
188 static char direct_store[NUM_MACHINE_MODES];
190 /* Record for each mode whether we can float-extend from memory. */
192 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
194 /* If a memory-to-memory move would take MOVE_RATIO or more simple
195 move-instruction sequences, we will do a movstr or libcall instead. */
197 #ifndef MOVE_RATIO
198 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
199 #define MOVE_RATIO 2
200 #else
201 /* If we are optimizing for space (-Os), cut down the default move ratio. */
202 #define MOVE_RATIO (optimize_size ? 3 : 15)
203 #endif
204 #endif
206 /* This macro is used to determine whether move_by_pieces should be called
207 to perform a structure copy. */
208 #ifndef MOVE_BY_PIECES_P
209 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
210 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
211 #endif
213 /* If a clear memory operation would take CLEAR_RATIO or more simple
214 move-instruction sequences, we will do a clrstr or libcall instead. */
216 #ifndef CLEAR_RATIO
217 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
218 #define CLEAR_RATIO 2
219 #else
220 /* If we are optimizing for space, cut down the default clear ratio. */
221 #define CLEAR_RATIO (optimize_size ? 3 : 15)
222 #endif
223 #endif
225 /* This macro is used to determine whether clear_by_pieces should be
226 called to clear storage. */
227 #ifndef CLEAR_BY_PIECES_P
228 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
229 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
230 #endif
232 /* This macro is used to determine whether store_by_pieces should be
233 called to "memset" storage with byte values other than zero, or
234 to "memcpy" storage when the source is a constant string. */
235 #ifndef STORE_BY_PIECES_P
236 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
237 #endif
239 /* This array records the insn_code of insns to perform block moves. */
240 enum insn_code movstr_optab[NUM_MACHINE_MODES];
242 /* This array records the insn_code of insns to perform block clears. */
243 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
245 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
247 #ifndef SLOW_UNALIGNED_ACCESS
248 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
249 #endif
251 /* This is run once per compilation to set up which modes can be used
252 directly in memory and to initialize the block move optab. */
254 void
255 init_expr_once ()
257 rtx insn, pat;
258 enum machine_mode mode;
259 int num_clobbers;
260 rtx mem, mem1;
261 rtx reg;
263 /* Try indexing by frame ptr and try by stack ptr.
264 It is known that on the Convex the stack ptr isn't a valid index.
265 With luck, one or the other is valid on any machine. */
266 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
267 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
269 /* A scratch register we can modify in-place below to avoid
270 useless RTL allocations. */
271 reg = gen_rtx_REG (VOIDmode, -1);
273 insn = rtx_alloc (INSN);
274 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
275 PATTERN (insn) = pat;
277 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
278 mode = (enum machine_mode) ((int) mode + 1))
280 int regno;
282 direct_load[(int) mode] = direct_store[(int) mode] = 0;
283 PUT_MODE (mem, mode);
284 PUT_MODE (mem1, mode);
285 PUT_MODE (reg, mode);
287 /* See if there is some register that can be used in this mode and
288 directly loaded or stored from memory. */
290 if (mode != VOIDmode && mode != BLKmode)
291 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
292 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
293 regno++)
295 if (! HARD_REGNO_MODE_OK (regno, mode))
296 continue;
298 REGNO (reg) = regno;
300 SET_SRC (pat) = mem;
301 SET_DEST (pat) = reg;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_load[(int) mode] = 1;
305 SET_SRC (pat) = mem1;
306 SET_DEST (pat) = reg;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_load[(int) mode] = 1;
310 SET_SRC (pat) = reg;
311 SET_DEST (pat) = mem;
312 if (recog (pat, insn, &num_clobbers) >= 0)
313 direct_store[(int) mode] = 1;
315 SET_SRC (pat) = reg;
316 SET_DEST (pat) = mem1;
317 if (recog (pat, insn, &num_clobbers) >= 0)
318 direct_store[(int) mode] = 1;
322 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
324 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
325 mode = GET_MODE_WIDER_MODE (mode))
327 enum machine_mode srcmode;
328 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
329 srcmode = GET_MODE_WIDER_MODE (srcmode))
331 enum insn_code ic;
333 ic = can_extend_p (mode, srcmode, 0);
334 if (ic == CODE_FOR_nothing)
335 continue;
337 PUT_MODE (mem, srcmode);
339 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
340 float_extend_from_mem[mode][srcmode] = true;
345 /* This is run at the start of compiling a function. */
347 void
348 init_expr ()
350 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
352 pending_chain = 0;
353 pending_stack_adjust = 0;
354 stack_pointer_delta = 0;
355 inhibit_defer_pop = 0;
356 saveregs_value = 0;
357 apply_args_value = 0;
358 forced_labels = 0;
361 /* Small sanity check that the queue is empty at the end of a function. */
363 void
364 finish_expr_for_function ()
366 if (pending_chain)
367 abort ();
370 /* Manage the queue of increment instructions to be output
371 for POSTINCREMENT_EXPR expressions, etc. */
373 /* Queue up to increment (or change) VAR later. BODY says how:
374 BODY should be the same thing you would pass to emit_insn
375 to increment right away. It will go to emit_insn later on.
377 The value is a QUEUED expression to be used in place of VAR
378 where you want to guarantee the pre-incrementation value of VAR. */
380 static rtx
381 enqueue_insn (var, body)
382 rtx var, body;
384 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
385 body, pending_chain);
386 return pending_chain;
389 /* Use protect_from_queue to convert a QUEUED expression
390 into something that you can put immediately into an instruction.
391 If the queued incrementation has not happened yet,
392 protect_from_queue returns the variable itself.
393 If the incrementation has happened, protect_from_queue returns a temp
394 that contains a copy of the old value of the variable.
396 Any time an rtx which might possibly be a QUEUED is to be put
397 into an instruction, it must be passed through protect_from_queue first.
398 QUEUED expressions are not meaningful in instructions.
400 Do not pass a value through protect_from_queue and then hold
401 on to it for a while before putting it in an instruction!
402 If the queue is flushed in between, incorrect code will result. */
405 protect_from_queue (x, modify)
406 rtx x;
407 int modify;
409 RTX_CODE code = GET_CODE (x);
411 #if 0 /* A QUEUED can hang around after the queue is forced out. */
412 /* Shortcut for most common case. */
413 if (pending_chain == 0)
414 return x;
415 #endif
417 if (code != QUEUED)
419 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
420 use of autoincrement. Make a copy of the contents of the memory
421 location rather than a copy of the address, but not if the value is
422 of mode BLKmode. Don't modify X in place since it might be
423 shared. */
424 if (code == MEM && GET_MODE (x) != BLKmode
425 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
427 rtx y = XEXP (x, 0);
428 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
430 if (QUEUED_INSN (y))
432 rtx temp = gen_reg_rtx (GET_MODE (x));
434 emit_insn_before (gen_move_insn (temp, new),
435 QUEUED_INSN (y));
436 return temp;
439 /* Copy the address into a pseudo, so that the returned value
440 remains correct across calls to emit_queue. */
441 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
444 /* Otherwise, recursively protect the subexpressions of all
445 the kinds of rtx's that can contain a QUEUED. */
446 if (code == MEM)
448 rtx tem = protect_from_queue (XEXP (x, 0), 0);
449 if (tem != XEXP (x, 0))
451 x = copy_rtx (x);
452 XEXP (x, 0) = tem;
455 else if (code == PLUS || code == MULT)
457 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
458 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
459 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
461 x = copy_rtx (x);
462 XEXP (x, 0) = new0;
463 XEXP (x, 1) = new1;
466 return x;
468 /* If the increment has not happened, use the variable itself. Copy it
469 into a new pseudo so that the value remains correct across calls to
470 emit_queue. */
471 if (QUEUED_INSN (x) == 0)
472 return copy_to_reg (QUEUED_VAR (x));
473 /* If the increment has happened and a pre-increment copy exists,
474 use that copy. */
475 if (QUEUED_COPY (x) != 0)
476 return QUEUED_COPY (x);
477 /* The increment has happened but we haven't set up a pre-increment copy.
478 Set one up now, and use it. */
479 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
480 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
481 QUEUED_INSN (x));
482 return QUEUED_COPY (x);
485 /* Return nonzero if X contains a QUEUED expression:
486 if it contains anything that will be altered by a queued increment.
487 We handle only combinations of MEM, PLUS, MINUS and MULT operators
488 since memory addresses generally contain only those. */
491 queued_subexp_p (x)
492 rtx x;
494 enum rtx_code code = GET_CODE (x);
495 switch (code)
497 case QUEUED:
498 return 1;
499 case MEM:
500 return queued_subexp_p (XEXP (x, 0));
501 case MULT:
502 case PLUS:
503 case MINUS:
504 return (queued_subexp_p (XEXP (x, 0))
505 || queued_subexp_p (XEXP (x, 1)));
506 default:
507 return 0;
511 /* Perform all the pending incrementations. */
513 void
514 emit_queue ()
516 rtx p;
517 while ((p = pending_chain))
519 rtx body = QUEUED_BODY (p);
521 switch (GET_CODE (body))
523 case INSN:
524 case JUMP_INSN:
525 case CALL_INSN:
526 case CODE_LABEL:
527 case BARRIER:
528 case NOTE:
529 QUEUED_INSN (p) = body;
530 emit_insn (body);
531 break;
533 #ifdef ENABLE_CHECKING
534 case SEQUENCE:
535 abort ();
536 break;
537 #endif
539 default:
540 QUEUED_INSN (p) = emit_insn (body);
541 break;
544 pending_chain = QUEUED_NEXT (p);
548 /* Copy data from FROM to TO, where the machine modes are not the same.
549 Both modes may be integer, or both may be floating.
550 UNSIGNEDP should be nonzero if FROM is an unsigned type.
551 This causes zero-extension instead of sign-extension. */
553 void
554 convert_move (to, from, unsignedp)
555 rtx to, from;
556 int unsignedp;
558 enum machine_mode to_mode = GET_MODE (to);
559 enum machine_mode from_mode = GET_MODE (from);
560 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
561 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
562 enum insn_code code;
563 rtx libcall;
565 /* rtx code for making an equivalent value. */
566 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
567 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
569 to = protect_from_queue (to, 1);
570 from = protect_from_queue (from, 0);
572 if (to_real != from_real)
573 abort ();
575 /* If FROM is a SUBREG that indicates that we have already done at least
576 the required extension, strip it. We don't handle such SUBREGs as
577 TO here. */
579 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
580 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
581 >= GET_MODE_SIZE (to_mode))
582 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
583 from = gen_lowpart (to_mode, from), from_mode = to_mode;
585 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
586 abort ();
588 if (to_mode == from_mode
589 || (from_mode == VOIDmode && CONSTANT_P (from)))
591 emit_move_insn (to, from);
592 return;
595 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
597 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
598 abort ();
600 if (VECTOR_MODE_P (to_mode))
601 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
602 else
603 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
605 emit_move_insn (to, from);
606 return;
609 if (to_real != from_real)
610 abort ();
612 if (to_real)
614 rtx value, insns;
616 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
618 /* Try converting directly if the insn is supported. */
619 if ((code = can_extend_p (to_mode, from_mode, 0))
620 != CODE_FOR_nothing)
622 emit_unop_insn (code, to, from, UNKNOWN);
623 return;
627 #ifdef HAVE_trunchfqf2
628 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
630 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
631 return;
633 #endif
634 #ifdef HAVE_trunctqfqf2
635 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
637 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
638 return;
640 #endif
641 #ifdef HAVE_truncsfqf2
642 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
644 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
645 return;
647 #endif
648 #ifdef HAVE_truncdfqf2
649 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
651 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
652 return;
654 #endif
655 #ifdef HAVE_truncxfqf2
656 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
658 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
659 return;
661 #endif
662 #ifdef HAVE_trunctfqf2
663 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
665 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
666 return;
668 #endif
670 #ifdef HAVE_trunctqfhf2
671 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
673 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
674 return;
676 #endif
677 #ifdef HAVE_truncsfhf2
678 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
680 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
681 return;
683 #endif
684 #ifdef HAVE_truncdfhf2
685 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
687 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
688 return;
690 #endif
691 #ifdef HAVE_truncxfhf2
692 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
694 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
695 return;
697 #endif
698 #ifdef HAVE_trunctfhf2
699 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
701 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
702 return;
704 #endif
706 #ifdef HAVE_truncsftqf2
707 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
709 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
710 return;
712 #endif
713 #ifdef HAVE_truncdftqf2
714 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
716 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
717 return;
719 #endif
720 #ifdef HAVE_truncxftqf2
721 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
723 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
724 return;
726 #endif
727 #ifdef HAVE_trunctftqf2
728 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
730 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
731 return;
733 #endif
735 #ifdef HAVE_truncdfsf2
736 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
738 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
739 return;
741 #endif
742 #ifdef HAVE_truncxfsf2
743 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
745 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
746 return;
748 #endif
749 #ifdef HAVE_trunctfsf2
750 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
752 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
753 return;
755 #endif
756 #ifdef HAVE_truncxfdf2
757 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
759 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
760 return;
762 #endif
763 #ifdef HAVE_trunctfdf2
764 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
766 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
767 return;
769 #endif
771 libcall = (rtx) 0;
772 switch (from_mode)
774 case SFmode:
775 switch (to_mode)
777 case DFmode:
778 libcall = extendsfdf2_libfunc;
779 break;
781 case XFmode:
782 libcall = extendsfxf2_libfunc;
783 break;
785 case TFmode:
786 libcall = extendsftf2_libfunc;
787 break;
789 default:
790 break;
792 break;
794 case DFmode:
795 switch (to_mode)
797 case SFmode:
798 libcall = truncdfsf2_libfunc;
799 break;
801 case XFmode:
802 libcall = extenddfxf2_libfunc;
803 break;
805 case TFmode:
806 libcall = extenddftf2_libfunc;
807 break;
809 default:
810 break;
812 break;
814 case XFmode:
815 switch (to_mode)
817 case SFmode:
818 libcall = truncxfsf2_libfunc;
819 break;
821 case DFmode:
822 libcall = truncxfdf2_libfunc;
823 break;
825 default:
826 break;
828 break;
830 case TFmode:
831 switch (to_mode)
833 case SFmode:
834 libcall = trunctfsf2_libfunc;
835 break;
837 case DFmode:
838 libcall = trunctfdf2_libfunc;
839 break;
841 default:
842 break;
844 break;
846 default:
847 break;
850 if (libcall == (rtx) 0)
851 /* This conversion is not implemented yet. */
852 abort ();
854 start_sequence ();
855 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
856 1, from, from_mode);
857 insns = get_insns ();
858 end_sequence ();
859 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
860 from));
861 return;
864 /* Now both modes are integers. */
866 /* Handle expanding beyond a word. */
867 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
868 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
870 rtx insns;
871 rtx lowpart;
872 rtx fill_value;
873 rtx lowfrom;
874 int i;
875 enum machine_mode lowpart_mode;
876 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
878 /* Try converting directly if the insn is supported. */
879 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
880 != CODE_FOR_nothing)
882 /* If FROM is a SUBREG, put it into a register. Do this
883 so that we always generate the same set of insns for
884 better cse'ing; if an intermediate assignment occurred,
885 we won't be doing the operation directly on the SUBREG. */
886 if (optimize > 0 && GET_CODE (from) == SUBREG)
887 from = force_reg (from_mode, from);
888 emit_unop_insn (code, to, from, equiv_code);
889 return;
891 /* Next, try converting via full word. */
892 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
893 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
894 != CODE_FOR_nothing))
896 if (GET_CODE (to) == REG)
897 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
898 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
899 emit_unop_insn (code, to,
900 gen_lowpart (word_mode, to), equiv_code);
901 return;
904 /* No special multiword conversion insn; do it by hand. */
905 start_sequence ();
907 /* Since we will turn this into a no conflict block, we must ensure
908 that the source does not overlap the target. */
910 if (reg_overlap_mentioned_p (to, from))
911 from = force_reg (from_mode, from);
913 /* Get a copy of FROM widened to a word, if necessary. */
914 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
915 lowpart_mode = word_mode;
916 else
917 lowpart_mode = from_mode;
919 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
921 lowpart = gen_lowpart (lowpart_mode, to);
922 emit_move_insn (lowpart, lowfrom);
924 /* Compute the value to put in each remaining word. */
925 if (unsignedp)
926 fill_value = const0_rtx;
927 else
929 #ifdef HAVE_slt
930 if (HAVE_slt
931 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
932 && STORE_FLAG_VALUE == -1)
934 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
935 lowpart_mode, 0);
936 fill_value = gen_reg_rtx (word_mode);
937 emit_insn (gen_slt (fill_value));
939 else
940 #endif
942 fill_value
943 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
944 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
945 NULL_RTX, 0);
946 fill_value = convert_to_mode (word_mode, fill_value, 1);
950 /* Fill the remaining words. */
951 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
953 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
954 rtx subword = operand_subword (to, index, 1, to_mode);
956 if (subword == 0)
957 abort ();
959 if (fill_value != subword)
960 emit_move_insn (subword, fill_value);
963 insns = get_insns ();
964 end_sequence ();
966 emit_no_conflict_block (insns, to, from, NULL_RTX,
967 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
968 return;
971 /* Truncating multi-word to a word or less. */
972 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
973 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
975 if (!((GET_CODE (from) == MEM
976 && ! MEM_VOLATILE_P (from)
977 && direct_load[(int) to_mode]
978 && ! mode_dependent_address_p (XEXP (from, 0)))
979 || GET_CODE (from) == REG
980 || GET_CODE (from) == SUBREG))
981 from = force_reg (from_mode, from);
982 convert_move (to, gen_lowpart (word_mode, from), 0);
983 return;
986 /* Handle pointer conversion. */ /* SPEE 900220. */
987 if (to_mode == PQImode)
989 if (from_mode != QImode)
990 from = convert_to_mode (QImode, from, unsignedp);
992 #ifdef HAVE_truncqipqi2
993 if (HAVE_truncqipqi2)
995 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
996 return;
998 #endif /* HAVE_truncqipqi2 */
999 abort ();
1002 if (from_mode == PQImode)
1004 if (to_mode != QImode)
1006 from = convert_to_mode (QImode, from, unsignedp);
1007 from_mode = QImode;
1009 else
1011 #ifdef HAVE_extendpqiqi2
1012 if (HAVE_extendpqiqi2)
1014 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1015 return;
1017 #endif /* HAVE_extendpqiqi2 */
1018 abort ();
1022 if (to_mode == PSImode)
1024 if (from_mode != SImode)
1025 from = convert_to_mode (SImode, from, unsignedp);
1027 #ifdef HAVE_truncsipsi2
1028 if (HAVE_truncsipsi2)
1030 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1031 return;
1033 #endif /* HAVE_truncsipsi2 */
1034 abort ();
1037 if (from_mode == PSImode)
1039 if (to_mode != SImode)
1041 from = convert_to_mode (SImode, from, unsignedp);
1042 from_mode = SImode;
1044 else
1046 #ifdef HAVE_extendpsisi2
1047 if (! unsignedp && HAVE_extendpsisi2)
1049 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1050 return;
1052 #endif /* HAVE_extendpsisi2 */
1053 #ifdef HAVE_zero_extendpsisi2
1054 if (unsignedp && HAVE_zero_extendpsisi2)
1056 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1057 return;
1059 #endif /* HAVE_zero_extendpsisi2 */
1060 abort ();
1064 if (to_mode == PDImode)
1066 if (from_mode != DImode)
1067 from = convert_to_mode (DImode, from, unsignedp);
1069 #ifdef HAVE_truncdipdi2
1070 if (HAVE_truncdipdi2)
1072 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1073 return;
1075 #endif /* HAVE_truncdipdi2 */
1076 abort ();
1079 if (from_mode == PDImode)
1081 if (to_mode != DImode)
1083 from = convert_to_mode (DImode, from, unsignedp);
1084 from_mode = DImode;
1086 else
1088 #ifdef HAVE_extendpdidi2
1089 if (HAVE_extendpdidi2)
1091 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1092 return;
1094 #endif /* HAVE_extendpdidi2 */
1095 abort ();
1099 /* Now follow all the conversions between integers
1100 no more than a word long. */
1102 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1103 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1104 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1105 GET_MODE_BITSIZE (from_mode)))
1107 if (!((GET_CODE (from) == MEM
1108 && ! MEM_VOLATILE_P (from)
1109 && direct_load[(int) to_mode]
1110 && ! mode_dependent_address_p (XEXP (from, 0)))
1111 || GET_CODE (from) == REG
1112 || GET_CODE (from) == SUBREG))
1113 from = force_reg (from_mode, from);
1114 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1115 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1116 from = copy_to_reg (from);
1117 emit_move_insn (to, gen_lowpart (to_mode, from));
1118 return;
1121 /* Handle extension. */
1122 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1124 /* Convert directly if that works. */
1125 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1126 != CODE_FOR_nothing)
1128 if (flag_force_mem)
1129 from = force_not_mem (from);
1131 emit_unop_insn (code, to, from, equiv_code);
1132 return;
1134 else
1136 enum machine_mode intermediate;
1137 rtx tmp;
1138 tree shift_amount;
1140 /* Search for a mode to convert via. */
1141 for (intermediate = from_mode; intermediate != VOIDmode;
1142 intermediate = GET_MODE_WIDER_MODE (intermediate))
1143 if (((can_extend_p (to_mode, intermediate, unsignedp)
1144 != CODE_FOR_nothing)
1145 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1146 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1147 GET_MODE_BITSIZE (intermediate))))
1148 && (can_extend_p (intermediate, from_mode, unsignedp)
1149 != CODE_FOR_nothing))
1151 convert_move (to, convert_to_mode (intermediate, from,
1152 unsignedp), unsignedp);
1153 return;
1156 /* No suitable intermediate mode.
1157 Generate what we need with shifts. */
1158 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1159 - GET_MODE_BITSIZE (from_mode), 0);
1160 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1161 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1162 to, unsignedp);
1163 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1164 to, unsignedp);
1165 if (tmp != to)
1166 emit_move_insn (to, tmp);
1167 return;
1171 /* Support special truncate insns for certain modes. */
1173 if (from_mode == DImode && to_mode == SImode)
1175 #ifdef HAVE_truncdisi2
1176 if (HAVE_truncdisi2)
1178 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1179 return;
1181 #endif
1182 convert_move (to, force_reg (from_mode, from), unsignedp);
1183 return;
1186 if (from_mode == DImode && to_mode == HImode)
1188 #ifdef HAVE_truncdihi2
1189 if (HAVE_truncdihi2)
1191 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1192 return;
1194 #endif
1195 convert_move (to, force_reg (from_mode, from), unsignedp);
1196 return;
1199 if (from_mode == DImode && to_mode == QImode)
1201 #ifdef HAVE_truncdiqi2
1202 if (HAVE_truncdiqi2)
1204 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1205 return;
1207 #endif
1208 convert_move (to, force_reg (from_mode, from), unsignedp);
1209 return;
1212 if (from_mode == SImode && to_mode == HImode)
1214 #ifdef HAVE_truncsihi2
1215 if (HAVE_truncsihi2)
1217 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1218 return;
1220 #endif
1221 convert_move (to, force_reg (from_mode, from), unsignedp);
1222 return;
1225 if (from_mode == SImode && to_mode == QImode)
1227 #ifdef HAVE_truncsiqi2
1228 if (HAVE_truncsiqi2)
1230 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1231 return;
1233 #endif
1234 convert_move (to, force_reg (from_mode, from), unsignedp);
1235 return;
1238 if (from_mode == HImode && to_mode == QImode)
1240 #ifdef HAVE_trunchiqi2
1241 if (HAVE_trunchiqi2)
1243 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1244 return;
1246 #endif
1247 convert_move (to, force_reg (from_mode, from), unsignedp);
1248 return;
1251 if (from_mode == TImode && to_mode == DImode)
1253 #ifdef HAVE_trunctidi2
1254 if (HAVE_trunctidi2)
1256 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1257 return;
1259 #endif
1260 convert_move (to, force_reg (from_mode, from), unsignedp);
1261 return;
1264 if (from_mode == TImode && to_mode == SImode)
1266 #ifdef HAVE_trunctisi2
1267 if (HAVE_trunctisi2)
1269 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1270 return;
1272 #endif
1273 convert_move (to, force_reg (from_mode, from), unsignedp);
1274 return;
1277 if (from_mode == TImode && to_mode == HImode)
1279 #ifdef HAVE_trunctihi2
1280 if (HAVE_trunctihi2)
1282 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1283 return;
1285 #endif
1286 convert_move (to, force_reg (from_mode, from), unsignedp);
1287 return;
1290 if (from_mode == TImode && to_mode == QImode)
1292 #ifdef HAVE_trunctiqi2
1293 if (HAVE_trunctiqi2)
1295 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1296 return;
1298 #endif
1299 convert_move (to, force_reg (from_mode, from), unsignedp);
1300 return;
1303 /* Handle truncation of volatile memrefs, and so on;
1304 the things that couldn't be truncated directly,
1305 and for which there was no special instruction. */
1306 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1308 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1309 emit_move_insn (to, temp);
1310 return;
1313 /* Mode combination is not recognized. */
1314 abort ();
1317 /* Return an rtx for a value that would result
1318 from converting X to mode MODE.
1319 Both X and MODE may be floating, or both integer.
1320 UNSIGNEDP is nonzero if X is an unsigned value.
1321 This can be done by referring to a part of X in place
1322 or by copying to a new temporary with conversion.
1324 This function *must not* call protect_from_queue
1325 except when putting X into an insn (in which case convert_move does it). */
1328 convert_to_mode (mode, x, unsignedp)
1329 enum machine_mode mode;
1330 rtx x;
1331 int unsignedp;
1333 return convert_modes (mode, VOIDmode, x, unsignedp);
1336 /* Return an rtx for a value that would result
1337 from converting X from mode OLDMODE to mode MODE.
1338 Both modes may be floating, or both integer.
1339 UNSIGNEDP is nonzero if X is an unsigned value.
1341 This can be done by referring to a part of X in place
1342 or by copying to a new temporary with conversion.
1344 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1346 This function *must not* call protect_from_queue
1347 except when putting X into an insn (in which case convert_move does it). */
1350 convert_modes (mode, oldmode, x, unsignedp)
1351 enum machine_mode mode, oldmode;
1352 rtx x;
1353 int unsignedp;
1355 rtx temp;
1357 /* If FROM is a SUBREG that indicates that we have already done at least
1358 the required extension, strip it. */
1360 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1361 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1362 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1363 x = gen_lowpart (mode, x);
1365 if (GET_MODE (x) != VOIDmode)
1366 oldmode = GET_MODE (x);
1368 if (mode == oldmode)
1369 return x;
1371 /* There is one case that we must handle specially: If we are converting
1372 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1373 we are to interpret the constant as unsigned, gen_lowpart will do
1374 the wrong if the constant appears negative. What we want to do is
1375 make the high-order word of the constant zero, not all ones. */
1377 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1378 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1379 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1381 HOST_WIDE_INT val = INTVAL (x);
1383 if (oldmode != VOIDmode
1384 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1386 int width = GET_MODE_BITSIZE (oldmode);
1388 /* We need to zero extend VAL. */
1389 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1392 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1395 /* We can do this with a gen_lowpart if both desired and current modes
1396 are integer, and this is either a constant integer, a register, or a
1397 non-volatile MEM. Except for the constant case where MODE is no
1398 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1400 if ((GET_CODE (x) == CONST_INT
1401 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1402 || (GET_MODE_CLASS (mode) == MODE_INT
1403 && GET_MODE_CLASS (oldmode) == MODE_INT
1404 && (GET_CODE (x) == CONST_DOUBLE
1405 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1406 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1407 && direct_load[(int) mode])
1408 || (GET_CODE (x) == REG
1409 && (! HARD_REGISTER_P (x)
1410 || HARD_REGNO_MODE_OK (REGNO (x), mode))
1411 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1412 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1414 /* ?? If we don't know OLDMODE, we have to assume here that
1415 X does not need sign- or zero-extension. This may not be
1416 the case, but it's the best we can do. */
1417 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1418 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1420 HOST_WIDE_INT val = INTVAL (x);
1421 int width = GET_MODE_BITSIZE (oldmode);
1423 /* We must sign or zero-extend in this case. Start by
1424 zero-extending, then sign extend if we need to. */
1425 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1426 if (! unsignedp
1427 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1428 val |= (HOST_WIDE_INT) (-1) << width;
1430 return gen_int_mode (val, mode);
1433 return gen_lowpart (mode, x);
1436 temp = gen_reg_rtx (mode);
1437 convert_move (temp, x, unsignedp);
1438 return temp;
1441 /* This macro is used to determine what the largest unit size that
1442 move_by_pieces can use is. */
1444 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1445 move efficiently, as opposed to MOVE_MAX which is the maximum
1446 number of bytes we can move with a single instruction. */
1448 #ifndef MOVE_MAX_PIECES
1449 #define MOVE_MAX_PIECES MOVE_MAX
1450 #endif
1452 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1453 store efficiently. Due to internal GCC limitations, this is
1454 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1455 for an immediate constant. */
1457 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1459 /* Generate several move instructions to copy LEN bytes from block FROM to
1460 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1461 and TO through protect_from_queue before calling.
1463 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1464 used to push FROM to the stack.
1466 ALIGN is maximum alignment we can assume. */
1468 void
1469 move_by_pieces (to, from, len, align)
1470 rtx to, from;
1471 unsigned HOST_WIDE_INT len;
1472 unsigned int align;
1474 struct move_by_pieces data;
1475 rtx to_addr, from_addr = XEXP (from, 0);
1476 unsigned int max_size = MOVE_MAX_PIECES + 1;
1477 enum machine_mode mode = VOIDmode, tmode;
1478 enum insn_code icode;
1480 data.offset = 0;
1481 data.from_addr = from_addr;
1482 if (to)
1484 to_addr = XEXP (to, 0);
1485 data.to = to;
1486 data.autinc_to
1487 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1488 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1489 data.reverse
1490 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1492 else
1494 to_addr = NULL_RTX;
1495 data.to = NULL_RTX;
1496 data.autinc_to = 1;
1497 #ifdef STACK_GROWS_DOWNWARD
1498 data.reverse = 1;
1499 #else
1500 data.reverse = 0;
1501 #endif
1503 data.to_addr = to_addr;
1504 data.from = from;
1505 data.autinc_from
1506 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1507 || GET_CODE (from_addr) == POST_INC
1508 || GET_CODE (from_addr) == POST_DEC);
1510 data.explicit_inc_from = 0;
1511 data.explicit_inc_to = 0;
1512 if (data.reverse) data.offset = len;
1513 data.len = len;
1515 /* If copying requires more than two move insns,
1516 copy addresses to registers (to make displacements shorter)
1517 and use post-increment if available. */
1518 if (!(data.autinc_from && data.autinc_to)
1519 && move_by_pieces_ninsns (len, align) > 2)
1521 /* Find the mode of the largest move... */
1522 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1523 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1524 if (GET_MODE_SIZE (tmode) < max_size)
1525 mode = tmode;
1527 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1529 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1530 data.autinc_from = 1;
1531 data.explicit_inc_from = -1;
1533 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1535 data.from_addr = copy_addr_to_reg (from_addr);
1536 data.autinc_from = 1;
1537 data.explicit_inc_from = 1;
1539 if (!data.autinc_from && CONSTANT_P (from_addr))
1540 data.from_addr = copy_addr_to_reg (from_addr);
1541 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1543 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1544 data.autinc_to = 1;
1545 data.explicit_inc_to = -1;
1547 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1549 data.to_addr = copy_addr_to_reg (to_addr);
1550 data.autinc_to = 1;
1551 data.explicit_inc_to = 1;
1553 if (!data.autinc_to && CONSTANT_P (to_addr))
1554 data.to_addr = copy_addr_to_reg (to_addr);
1557 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1558 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1559 align = MOVE_MAX * BITS_PER_UNIT;
1561 /* First move what we can in the largest integer mode, then go to
1562 successively smaller modes. */
1564 while (max_size > 1)
1566 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1567 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1568 if (GET_MODE_SIZE (tmode) < max_size)
1569 mode = tmode;
1571 if (mode == VOIDmode)
1572 break;
1574 icode = mov_optab->handlers[(int) mode].insn_code;
1575 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1576 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1578 max_size = GET_MODE_SIZE (mode);
1581 /* The code above should have handled everything. */
1582 if (data.len > 0)
1583 abort ();
1586 /* Return number of insns required to move L bytes by pieces.
1587 ALIGN (in bits) is maximum alignment we can assume. */
1589 static unsigned HOST_WIDE_INT
1590 move_by_pieces_ninsns (l, align)
1591 unsigned HOST_WIDE_INT l;
1592 unsigned int align;
1594 unsigned HOST_WIDE_INT n_insns = 0;
1595 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1597 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1598 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1599 align = MOVE_MAX * BITS_PER_UNIT;
1601 while (max_size > 1)
1603 enum machine_mode mode = VOIDmode, tmode;
1604 enum insn_code icode;
1606 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1607 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1608 if (GET_MODE_SIZE (tmode) < max_size)
1609 mode = tmode;
1611 if (mode == VOIDmode)
1612 break;
1614 icode = mov_optab->handlers[(int) mode].insn_code;
1615 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1616 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1618 max_size = GET_MODE_SIZE (mode);
1621 if (l)
1622 abort ();
1623 return n_insns;
1626 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1627 with move instructions for mode MODE. GENFUN is the gen_... function
1628 to make a move insn for that mode. DATA has all the other info. */
1630 static void
1631 move_by_pieces_1 (genfun, mode, data)
1632 rtx (*genfun) PARAMS ((rtx, ...));
1633 enum machine_mode mode;
1634 struct move_by_pieces *data;
1636 unsigned int size = GET_MODE_SIZE (mode);
1637 rtx to1 = NULL_RTX, from1;
1639 while (data->len >= size)
1641 if (data->reverse)
1642 data->offset -= size;
1644 if (data->to)
1646 if (data->autinc_to)
1647 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1648 data->offset);
1649 else
1650 to1 = adjust_address (data->to, mode, data->offset);
1653 if (data->autinc_from)
1654 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1655 data->offset);
1656 else
1657 from1 = adjust_address (data->from, mode, data->offset);
1659 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1660 emit_insn (gen_add2_insn (data->to_addr,
1661 GEN_INT (-(HOST_WIDE_INT)size)));
1662 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1663 emit_insn (gen_add2_insn (data->from_addr,
1664 GEN_INT (-(HOST_WIDE_INT)size)));
1666 if (data->to)
1667 emit_insn ((*genfun) (to1, from1));
1668 else
1670 #ifdef PUSH_ROUNDING
1671 emit_single_push_insn (mode, from1, NULL);
1672 #else
1673 abort ();
1674 #endif
1677 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1678 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1679 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1680 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1682 if (! data->reverse)
1683 data->offset += size;
1685 data->len -= size;
1689 /* Emit code to move a block Y to a block X. This may be done with
1690 string-move instructions, with multiple scalar move instructions,
1691 or with a library call.
1693 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1694 SIZE is an rtx that says how long they are.
1695 ALIGN is the maximum alignment we can assume they have.
1696 METHOD describes what kind of copy this is, and what mechanisms may be used.
1698 Return the address of the new block, if memcpy is called and returns it,
1699 0 otherwise. */
1702 emit_block_move (x, y, size, method)
1703 rtx x, y, size;
1704 enum block_op_methods method;
1706 bool may_use_call;
1707 rtx retval = 0;
1708 unsigned int align;
1710 switch (method)
1712 case BLOCK_OP_NORMAL:
1713 may_use_call = true;
1714 break;
1716 case BLOCK_OP_CALL_PARM:
1717 may_use_call = block_move_libcall_safe_for_call_parm ();
1719 /* Make inhibit_defer_pop nonzero around the library call
1720 to force it to pop the arguments right away. */
1721 NO_DEFER_POP;
1722 break;
1724 case BLOCK_OP_NO_LIBCALL:
1725 may_use_call = false;
1726 break;
1728 default:
1729 abort ();
1732 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1734 if (GET_MODE (x) != BLKmode)
1735 abort ();
1736 if (GET_MODE (y) != BLKmode)
1737 abort ();
1739 x = protect_from_queue (x, 1);
1740 y = protect_from_queue (y, 0);
1741 size = protect_from_queue (size, 0);
1743 if (GET_CODE (x) != MEM)
1744 abort ();
1745 if (GET_CODE (y) != MEM)
1746 abort ();
1747 if (size == 0)
1748 abort ();
1750 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1751 can be incorrect is coming from __builtin_memcpy. */
1752 if (GET_CODE (size) == CONST_INT)
1754 x = shallow_copy_rtx (x);
1755 y = shallow_copy_rtx (y);
1756 set_mem_size (x, size);
1757 set_mem_size (y, size);
1760 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1761 move_by_pieces (x, y, INTVAL (size), align);
1762 else if (emit_block_move_via_movstr (x, y, size, align))
1764 else if (may_use_call)
1765 retval = emit_block_move_via_libcall (x, y, size);
1766 else
1767 emit_block_move_via_loop (x, y, size, align);
1769 if (method == BLOCK_OP_CALL_PARM)
1770 OK_DEFER_POP;
1772 return retval;
1775 /* A subroutine of emit_block_move. Returns true if calling the
1776 block move libcall will not clobber any parameters which may have
1777 already been placed on the stack. */
1779 static bool
1780 block_move_libcall_safe_for_call_parm ()
1782 if (PUSH_ARGS)
1783 return true;
1784 else
1786 /* Check to see whether memcpy takes all register arguments. */
1787 static enum {
1788 takes_regs_uninit, takes_regs_no, takes_regs_yes
1789 } takes_regs = takes_regs_uninit;
1791 switch (takes_regs)
1793 case takes_regs_uninit:
1795 CUMULATIVE_ARGS args_so_far;
1796 tree fn, arg;
1798 fn = emit_block_move_libcall_fn (false);
1799 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1801 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1802 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1804 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1805 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1806 if (!tmp || !REG_P (tmp))
1807 goto fail_takes_regs;
1808 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1809 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1810 NULL_TREE, 1))
1811 goto fail_takes_regs;
1812 #endif
1813 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1816 takes_regs = takes_regs_yes;
1817 /* FALLTHRU */
1819 case takes_regs_yes:
1820 return true;
1822 fail_takes_regs:
1823 takes_regs = takes_regs_no;
1824 /* FALLTHRU */
1825 case takes_regs_no:
1826 return false;
1828 default:
1829 abort ();
1834 /* A subroutine of emit_block_move. Expand a movstr pattern;
1835 return true if successful. */
1837 static bool
1838 emit_block_move_via_movstr (x, y, size, align)
1839 rtx x, y, size;
1840 unsigned int align;
1842 /* Try the most limited insn first, because there's no point
1843 including more than one in the machine description unless
1844 the more limited one has some advantage. */
1846 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1847 enum machine_mode mode;
1849 /* Since this is a move insn, we don't care about volatility. */
1850 volatile_ok = 1;
1852 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1853 mode = GET_MODE_WIDER_MODE (mode))
1855 enum insn_code code = movstr_optab[(int) mode];
1856 insn_operand_predicate_fn pred;
1858 if (code != CODE_FOR_nothing
1859 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1860 here because if SIZE is less than the mode mask, as it is
1861 returned by the macro, it will definitely be less than the
1862 actual mode mask. */
1863 && ((GET_CODE (size) == CONST_INT
1864 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1865 <= (GET_MODE_MASK (mode) >> 1)))
1866 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1867 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1868 || (*pred) (x, BLKmode))
1869 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1870 || (*pred) (y, BLKmode))
1871 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1872 || (*pred) (opalign, VOIDmode)))
1874 rtx op2;
1875 rtx last = get_last_insn ();
1876 rtx pat;
1878 op2 = convert_to_mode (mode, size, 1);
1879 pred = insn_data[(int) code].operand[2].predicate;
1880 if (pred != 0 && ! (*pred) (op2, mode))
1881 op2 = copy_to_mode_reg (mode, op2);
1883 /* ??? When called via emit_block_move_for_call, it'd be
1884 nice if there were some way to inform the backend, so
1885 that it doesn't fail the expansion because it thinks
1886 emitting the libcall would be more efficient. */
1888 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1889 if (pat)
1891 emit_insn (pat);
1892 volatile_ok = 0;
1893 return true;
1895 else
1896 delete_insns_since (last);
1900 volatile_ok = 0;
1901 return false;
1904 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1905 Return the return value from memcpy, 0 otherwise. */
1907 static rtx
1908 emit_block_move_via_libcall (dst, src, size)
1909 rtx dst, src, size;
1911 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1912 enum machine_mode size_mode;
1913 rtx retval;
1915 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1917 It is unsafe to save the value generated by protect_from_queue
1918 and reuse it later. Consider what happens if emit_queue is
1919 called before the return value from protect_from_queue is used.
1921 Expansion of the CALL_EXPR below will call emit_queue before
1922 we are finished emitting RTL for argument setup. So if we are
1923 not careful we could get the wrong value for an argument.
1925 To avoid this problem we go ahead and emit code to copy X, Y &
1926 SIZE into new pseudos. We can then place those new pseudos
1927 into an RTL_EXPR and use them later, even after a call to
1928 emit_queue.
1930 Note this is not strictly needed for library calls since they
1931 do not call emit_queue before loading their arguments. However,
1932 we may need to have library calls call emit_queue in the future
1933 since failing to do so could cause problems for targets which
1934 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1936 dst = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1937 src = copy_to_mode_reg (Pmode, XEXP (src, 0));
1939 if (TARGET_MEM_FUNCTIONS)
1940 size_mode = TYPE_MODE (sizetype);
1941 else
1942 size_mode = TYPE_MODE (unsigned_type_node);
1943 size = convert_to_mode (size_mode, size, 1);
1944 size = copy_to_mode_reg (size_mode, size);
1946 /* It is incorrect to use the libcall calling conventions to call
1947 memcpy in this context. This could be a user call to memcpy and
1948 the user may wish to examine the return value from memcpy. For
1949 targets where libcalls and normal calls have different conventions
1950 for returning pointers, we could end up generating incorrect code.
1952 For convenience, we generate the call to bcopy this way as well. */
1954 dst_tree = make_tree (ptr_type_node, dst);
1955 src_tree = make_tree (ptr_type_node, src);
1956 if (TARGET_MEM_FUNCTIONS)
1957 size_tree = make_tree (sizetype, size);
1958 else
1959 size_tree = make_tree (unsigned_type_node, size);
1961 fn = emit_block_move_libcall_fn (true);
1962 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1963 if (TARGET_MEM_FUNCTIONS)
1965 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1966 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1968 else
1970 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1971 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1974 /* Now we have to build up the CALL_EXPR itself. */
1975 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1976 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1977 call_expr, arg_list, NULL_TREE);
1978 TREE_SIDE_EFFECTS (call_expr) = 1;
1980 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1982 /* If we are initializing a readonly value, show the above call
1983 clobbered it. Otherwise, a load from it may erroneously be
1984 hoisted from a loop. */
1985 if (RTX_UNCHANGING_P (dst))
1986 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
1988 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
1991 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1992 for the function we use for block copies. The first time FOR_CALL
1993 is true, we call assemble_external. */
1995 static GTY(()) tree block_move_fn;
1997 static tree
1998 emit_block_move_libcall_fn (for_call)
1999 int for_call;
2001 static bool emitted_extern;
2002 tree fn = block_move_fn, args;
2004 if (!fn)
2006 if (TARGET_MEM_FUNCTIONS)
2008 fn = get_identifier ("memcpy");
2009 args = build_function_type_list (ptr_type_node, ptr_type_node,
2010 const_ptr_type_node, sizetype,
2011 NULL_TREE);
2013 else
2015 fn = get_identifier ("bcopy");
2016 args = build_function_type_list (void_type_node, const_ptr_type_node,
2017 ptr_type_node, unsigned_type_node,
2018 NULL_TREE);
2021 fn = build_decl (FUNCTION_DECL, fn, args);
2022 DECL_EXTERNAL (fn) = 1;
2023 TREE_PUBLIC (fn) = 1;
2024 DECL_ARTIFICIAL (fn) = 1;
2025 TREE_NOTHROW (fn) = 1;
2027 block_move_fn = fn;
2030 if (for_call && !emitted_extern)
2032 emitted_extern = true;
2033 make_decl_rtl (fn, NULL);
2034 assemble_external (fn);
2037 return fn;
2040 /* A subroutine of emit_block_move. Copy the data via an explicit
2041 loop. This is used only when libcalls are forbidden. */
2042 /* ??? It'd be nice to copy in hunks larger than QImode. */
2044 static void
2045 emit_block_move_via_loop (x, y, size, align)
2046 rtx x, y, size;
2047 unsigned int align ATTRIBUTE_UNUSED;
2049 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2050 enum machine_mode iter_mode;
2052 iter_mode = GET_MODE (size);
2053 if (iter_mode == VOIDmode)
2054 iter_mode = word_mode;
2056 top_label = gen_label_rtx ();
2057 cmp_label = gen_label_rtx ();
2058 iter = gen_reg_rtx (iter_mode);
2060 emit_move_insn (iter, const0_rtx);
2062 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2063 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2064 do_pending_stack_adjust ();
2066 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2068 emit_jump (cmp_label);
2069 emit_label (top_label);
2071 tmp = convert_modes (Pmode, iter_mode, iter, true);
2072 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2073 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2074 x = change_address (x, QImode, x_addr);
2075 y = change_address (y, QImode, y_addr);
2077 emit_move_insn (x, y);
2079 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2080 true, OPTAB_LIB_WIDEN);
2081 if (tmp != iter)
2082 emit_move_insn (iter, tmp);
2084 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2085 emit_label (cmp_label);
2087 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2088 true, top_label);
2090 emit_note (NULL, NOTE_INSN_LOOP_END);
2093 /* Copy all or part of a value X into registers starting at REGNO.
2094 The number of registers to be filled is NREGS. */
2096 void
2097 move_block_to_reg (regno, x, nregs, mode)
2098 int regno;
2099 rtx x;
2100 int nregs;
2101 enum machine_mode mode;
2103 int i;
2104 #ifdef HAVE_load_multiple
2105 rtx pat;
2106 rtx last;
2107 #endif
2109 if (nregs == 0)
2110 return;
2112 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2113 x = validize_mem (force_const_mem (mode, x));
2115 /* See if the machine can do this with a load multiple insn. */
2116 #ifdef HAVE_load_multiple
2117 if (HAVE_load_multiple)
2119 last = get_last_insn ();
2120 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
2121 GEN_INT (nregs));
2122 if (pat)
2124 emit_insn (pat);
2125 return;
2127 else
2128 delete_insns_since (last);
2130 #endif
2132 for (i = 0; i < nregs; i++)
2133 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2134 operand_subword_force (x, i, mode));
2137 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2138 The number of registers to be filled is NREGS. SIZE indicates the number
2139 of bytes in the object X. */
2141 void
2142 move_block_from_reg (regno, x, nregs, size)
2143 int regno;
2144 rtx x;
2145 int nregs;
2146 int size;
2148 int i;
2149 #ifdef HAVE_store_multiple
2150 rtx pat;
2151 rtx last;
2152 #endif
2153 enum machine_mode mode;
2155 if (nregs == 0)
2156 return;
2158 /* If SIZE is that of a mode no bigger than a word, just use that
2159 mode's store operation. */
2160 if (size <= UNITS_PER_WORD
2161 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
2163 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
2164 return;
2167 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
2168 to the left before storing to memory. Note that the previous test
2169 doesn't handle all cases (e.g. SIZE == 3). */
2170 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
2172 rtx tem = operand_subword (x, 0, 1, BLKmode);
2173 rtx shift;
2175 if (tem == 0)
2176 abort ();
2178 shift = expand_shift (LSHIFT_EXPR, word_mode,
2179 gen_rtx_REG (word_mode, regno),
2180 build_int_2 ((UNITS_PER_WORD - size)
2181 * BITS_PER_UNIT, 0), NULL_RTX, 0);
2182 emit_move_insn (tem, shift);
2183 return;
2186 /* See if the machine can do this with a store multiple insn. */
2187 #ifdef HAVE_store_multiple
2188 if (HAVE_store_multiple)
2190 last = get_last_insn ();
2191 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2192 GEN_INT (nregs));
2193 if (pat)
2195 emit_insn (pat);
2196 return;
2198 else
2199 delete_insns_since (last);
2201 #endif
2203 for (i = 0; i < nregs; i++)
2205 rtx tem = operand_subword (x, i, 1, BLKmode);
2207 if (tem == 0)
2208 abort ();
2210 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2214 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2215 ORIG, where ORIG is a non-consecutive group of registers represented by
2216 a PARALLEL. The clone is identical to the original except in that the
2217 original set of registers is replaced by a new set of pseudo registers.
2218 The new set has the same modes as the original set. */
2221 gen_group_rtx (orig)
2222 rtx orig;
2224 int i, length;
2225 rtx *tmps;
2227 if (GET_CODE (orig) != PARALLEL)
2228 abort ();
2230 length = XVECLEN (orig, 0);
2231 tmps = (rtx *) alloca (sizeof (rtx) * length);
2233 /* Skip a NULL entry in first slot. */
2234 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2236 if (i)
2237 tmps[0] = 0;
2239 for (; i < length; i++)
2241 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2242 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2244 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2247 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2250 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2251 registers represented by a PARALLEL. SSIZE represents the total size of
2252 block SRC in bytes, or -1 if not known. */
2253 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2254 the balance will be in what would be the low-order memory addresses, i.e.
2255 left justified for big endian, right justified for little endian. This
2256 happens to be true for the targets currently using this support. If this
2257 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2258 would be needed. */
2260 void
2261 emit_group_load (dst, orig_src, ssize)
2262 rtx dst, orig_src;
2263 int ssize;
2265 rtx *tmps, src;
2266 int start, i;
2268 if (GET_CODE (dst) != PARALLEL)
2269 abort ();
2271 /* Check for a NULL entry, used to indicate that the parameter goes
2272 both on the stack and in registers. */
2273 if (XEXP (XVECEXP (dst, 0, 0), 0))
2274 start = 0;
2275 else
2276 start = 1;
2278 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
2280 /* Process the pieces. */
2281 for (i = start; i < XVECLEN (dst, 0); i++)
2283 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2284 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2285 unsigned int bytelen = GET_MODE_SIZE (mode);
2286 int shift = 0;
2288 /* Handle trailing fragments that run over the size of the struct. */
2289 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2291 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2292 bytelen = ssize - bytepos;
2293 if (bytelen <= 0)
2294 abort ();
2297 /* If we won't be loading directly from memory, protect the real source
2298 from strange tricks we might play; but make sure that the source can
2299 be loaded directly into the destination. */
2300 src = orig_src;
2301 if (GET_CODE (orig_src) != MEM
2302 && (!CONSTANT_P (orig_src)
2303 || (GET_MODE (orig_src) != mode
2304 && GET_MODE (orig_src) != VOIDmode)))
2306 if (GET_MODE (orig_src) == VOIDmode)
2307 src = gen_reg_rtx (mode);
2308 else
2309 src = gen_reg_rtx (GET_MODE (orig_src));
2311 emit_move_insn (src, orig_src);
2314 /* Optimize the access just a bit. */
2315 if (GET_CODE (src) == MEM
2316 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2317 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2318 && bytelen == GET_MODE_SIZE (mode))
2320 tmps[i] = gen_reg_rtx (mode);
2321 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2323 else if (GET_CODE (src) == CONCAT)
2325 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2326 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2328 if ((bytepos == 0 && bytelen == slen0)
2329 || (bytepos != 0 && bytepos + bytelen <= slen))
2331 /* The following assumes that the concatenated objects all
2332 have the same size. In this case, a simple calculation
2333 can be used to determine the object and the bit field
2334 to be extracted. */
2335 tmps[i] = XEXP (src, bytepos / slen0);
2336 if (! CONSTANT_P (tmps[i])
2337 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2338 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2339 (bytepos % slen0) * BITS_PER_UNIT,
2340 1, NULL_RTX, mode, mode, ssize);
2342 else if (bytepos == 0)
2344 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
2345 emit_move_insn (mem, src);
2346 tmps[i] = adjust_address (mem, mode, 0);
2348 else
2349 abort ();
2351 else if (CONSTANT_P (src)
2352 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2353 tmps[i] = src;
2354 else
2355 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2356 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2357 mode, mode, ssize);
2359 if (BYTES_BIG_ENDIAN && shift)
2360 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2361 tmps[i], 0, OPTAB_WIDEN);
2364 emit_queue ();
2366 /* Copy the extracted pieces into the proper (probable) hard regs. */
2367 for (i = start; i < XVECLEN (dst, 0); i++)
2368 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2371 /* Emit code to move a block SRC to block DST, where SRC and DST are
2372 non-consecutive groups of registers, each represented by a PARALLEL. */
2374 void
2375 emit_group_move (dst, src)
2376 rtx dst, src;
2378 int i;
2380 if (GET_CODE (src) != PARALLEL
2381 || GET_CODE (dst) != PARALLEL
2382 || XVECLEN (src, 0) != XVECLEN (dst, 0))
2383 abort ();
2385 /* Skip first entry if NULL. */
2386 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2387 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2388 XEXP (XVECEXP (src, 0, i), 0));
2391 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2392 registers represented by a PARALLEL. SSIZE represents the total size of
2393 block DST, or -1 if not known. */
2395 void
2396 emit_group_store (orig_dst, src, ssize)
2397 rtx orig_dst, src;
2398 int ssize;
2400 rtx *tmps, dst;
2401 int start, i;
2403 if (GET_CODE (src) != PARALLEL)
2404 abort ();
2406 /* Check for a NULL entry, used to indicate that the parameter goes
2407 both on the stack and in registers. */
2408 if (XEXP (XVECEXP (src, 0, 0), 0))
2409 start = 0;
2410 else
2411 start = 1;
2413 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2415 /* Copy the (probable) hard regs into pseudos. */
2416 for (i = start; i < XVECLEN (src, 0); i++)
2418 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2419 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2420 emit_move_insn (tmps[i], reg);
2422 emit_queue ();
2424 /* If we won't be storing directly into memory, protect the real destination
2425 from strange tricks we might play. */
2426 dst = orig_dst;
2427 if (GET_CODE (dst) == PARALLEL)
2429 rtx temp;
2431 /* We can get a PARALLEL dst if there is a conditional expression in
2432 a return statement. In that case, the dst and src are the same,
2433 so no action is necessary. */
2434 if (rtx_equal_p (dst, src))
2435 return;
2437 /* It is unclear if we can ever reach here, but we may as well handle
2438 it. Allocate a temporary, and split this into a store/load to/from
2439 the temporary. */
2441 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2442 emit_group_store (temp, src, ssize);
2443 emit_group_load (dst, temp, ssize);
2444 return;
2446 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2448 dst = gen_reg_rtx (GET_MODE (orig_dst));
2449 /* Make life a bit easier for combine. */
2450 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2453 /* Process the pieces. */
2454 for (i = start; i < XVECLEN (src, 0); i++)
2456 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2457 enum machine_mode mode = GET_MODE (tmps[i]);
2458 unsigned int bytelen = GET_MODE_SIZE (mode);
2459 rtx dest = dst;
2461 /* Handle trailing fragments that run over the size of the struct. */
2462 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2464 if (BYTES_BIG_ENDIAN)
2466 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2467 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2468 tmps[i], 0, OPTAB_WIDEN);
2470 bytelen = ssize - bytepos;
2473 if (GET_CODE (dst) == CONCAT)
2475 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2476 dest = XEXP (dst, 0);
2477 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2479 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2480 dest = XEXP (dst, 1);
2482 else if (bytepos == 0 && XVECLEN (src, 0))
2484 dest = assign_stack_temp (GET_MODE (dest),
2485 GET_MODE_SIZE (GET_MODE (dest)), 0);
2486 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2487 tmps[i]);
2488 dst = dest;
2489 break;
2491 else
2492 abort ();
2495 /* Optimize the access just a bit. */
2496 if (GET_CODE (dest) == MEM
2497 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2498 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2499 && bytelen == GET_MODE_SIZE (mode))
2500 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2501 else
2502 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2503 mode, tmps[i], ssize);
2506 emit_queue ();
2508 /* Copy from the pseudo into the (probable) hard reg. */
2509 if (orig_dst != dst)
2510 emit_move_insn (orig_dst, dst);
2513 /* Generate code to copy a BLKmode object of TYPE out of a
2514 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2515 is null, a stack temporary is created. TGTBLK is returned.
2517 The primary purpose of this routine is to handle functions
2518 that return BLKmode structures in registers. Some machines
2519 (the PA for example) want to return all small structures
2520 in registers regardless of the structure's alignment. */
2523 copy_blkmode_from_reg (tgtblk, srcreg, type)
2524 rtx tgtblk;
2525 rtx srcreg;
2526 tree type;
2528 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2529 rtx src = NULL, dst = NULL;
2530 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2531 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2533 if (tgtblk == 0)
2535 tgtblk = assign_temp (build_qualified_type (type,
2536 (TYPE_QUALS (type)
2537 | TYPE_QUAL_CONST)),
2538 0, 1, 1);
2539 preserve_temp_slots (tgtblk);
2542 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2543 into a new pseudo which is a full word. */
2545 if (GET_MODE (srcreg) != BLKmode
2546 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2547 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2549 /* Structures whose size is not a multiple of a word are aligned
2550 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2551 machine, this means we must skip the empty high order bytes when
2552 calculating the bit offset. */
2553 if (BYTES_BIG_ENDIAN
2554 && bytes % UNITS_PER_WORD)
2555 big_endian_correction
2556 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2558 /* Copy the structure BITSIZE bites at a time.
2560 We could probably emit more efficient code for machines which do not use
2561 strict alignment, but it doesn't seem worth the effort at the current
2562 time. */
2563 for (bitpos = 0, xbitpos = big_endian_correction;
2564 bitpos < bytes * BITS_PER_UNIT;
2565 bitpos += bitsize, xbitpos += bitsize)
2567 /* We need a new source operand each time xbitpos is on a
2568 word boundary and when xbitpos == big_endian_correction
2569 (the first time through). */
2570 if (xbitpos % BITS_PER_WORD == 0
2571 || xbitpos == big_endian_correction)
2572 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2573 GET_MODE (srcreg));
2575 /* We need a new destination operand each time bitpos is on
2576 a word boundary. */
2577 if (bitpos % BITS_PER_WORD == 0)
2578 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2580 /* Use xbitpos for the source extraction (right justified) and
2581 xbitpos for the destination store (left justified). */
2582 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2583 extract_bit_field (src, bitsize,
2584 xbitpos % BITS_PER_WORD, 1,
2585 NULL_RTX, word_mode, word_mode,
2586 BITS_PER_WORD),
2587 BITS_PER_WORD);
2590 return tgtblk;
2593 /* Add a USE expression for REG to the (possibly empty) list pointed
2594 to by CALL_FUSAGE. REG must denote a hard register. */
2596 void
2597 use_reg (call_fusage, reg)
2598 rtx *call_fusage, reg;
2600 if (GET_CODE (reg) != REG
2601 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2602 abort ();
2604 *call_fusage
2605 = gen_rtx_EXPR_LIST (VOIDmode,
2606 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2609 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2610 starting at REGNO. All of these registers must be hard registers. */
2612 void
2613 use_regs (call_fusage, regno, nregs)
2614 rtx *call_fusage;
2615 int regno;
2616 int nregs;
2618 int i;
2620 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2621 abort ();
2623 for (i = 0; i < nregs; i++)
2624 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2627 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2628 PARALLEL REGS. This is for calls that pass values in multiple
2629 non-contiguous locations. The Irix 6 ABI has examples of this. */
2631 void
2632 use_group_regs (call_fusage, regs)
2633 rtx *call_fusage;
2634 rtx regs;
2636 int i;
2638 for (i = 0; i < XVECLEN (regs, 0); i++)
2640 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2642 /* A NULL entry means the parameter goes both on the stack and in
2643 registers. This can also be a MEM for targets that pass values
2644 partially on the stack and partially in registers. */
2645 if (reg != 0 && GET_CODE (reg) == REG)
2646 use_reg (call_fusage, reg);
2651 /* Determine whether the LEN bytes generated by CONSTFUN can be
2652 stored to memory using several move instructions. CONSTFUNDATA is
2653 a pointer which will be passed as argument in every CONSTFUN call.
2654 ALIGN is maximum alignment we can assume. Return nonzero if a
2655 call to store_by_pieces should succeed. */
2658 can_store_by_pieces (len, constfun, constfundata, align)
2659 unsigned HOST_WIDE_INT len;
2660 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2661 PTR constfundata;
2662 unsigned int align;
2664 unsigned HOST_WIDE_INT max_size, l;
2665 HOST_WIDE_INT offset = 0;
2666 enum machine_mode mode, tmode;
2667 enum insn_code icode;
2668 int reverse;
2669 rtx cst;
2671 if (! STORE_BY_PIECES_P (len, align))
2672 return 0;
2674 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2675 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2676 align = MOVE_MAX * BITS_PER_UNIT;
2678 /* We would first store what we can in the largest integer mode, then go to
2679 successively smaller modes. */
2681 for (reverse = 0;
2682 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2683 reverse++)
2685 l = len;
2686 mode = VOIDmode;
2687 max_size = STORE_MAX_PIECES + 1;
2688 while (max_size > 1)
2690 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2691 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2692 if (GET_MODE_SIZE (tmode) < max_size)
2693 mode = tmode;
2695 if (mode == VOIDmode)
2696 break;
2698 icode = mov_optab->handlers[(int) mode].insn_code;
2699 if (icode != CODE_FOR_nothing
2700 && align >= GET_MODE_ALIGNMENT (mode))
2702 unsigned int size = GET_MODE_SIZE (mode);
2704 while (l >= size)
2706 if (reverse)
2707 offset -= size;
2709 cst = (*constfun) (constfundata, offset, mode);
2710 if (!LEGITIMATE_CONSTANT_P (cst))
2711 return 0;
2713 if (!reverse)
2714 offset += size;
2716 l -= size;
2720 max_size = GET_MODE_SIZE (mode);
2723 /* The code above should have handled everything. */
2724 if (l != 0)
2725 abort ();
2728 return 1;
2731 /* Generate several move instructions to store LEN bytes generated by
2732 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2733 pointer which will be passed as argument in every CONSTFUN call.
2734 ALIGN is maximum alignment we can assume. */
2736 void
2737 store_by_pieces (to, len, constfun, constfundata, align)
2738 rtx to;
2739 unsigned HOST_WIDE_INT len;
2740 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2741 PTR constfundata;
2742 unsigned int align;
2744 struct store_by_pieces data;
2746 if (! STORE_BY_PIECES_P (len, align))
2747 abort ();
2748 to = protect_from_queue (to, 1);
2749 data.constfun = constfun;
2750 data.constfundata = constfundata;
2751 data.len = len;
2752 data.to = to;
2753 store_by_pieces_1 (&data, align);
2756 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2757 rtx with BLKmode). The caller must pass TO through protect_from_queue
2758 before calling. ALIGN is maximum alignment we can assume. */
2760 static void
2761 clear_by_pieces (to, len, align)
2762 rtx to;
2763 unsigned HOST_WIDE_INT len;
2764 unsigned int align;
2766 struct store_by_pieces data;
2768 data.constfun = clear_by_pieces_1;
2769 data.constfundata = NULL;
2770 data.len = len;
2771 data.to = to;
2772 store_by_pieces_1 (&data, align);
2775 /* Callback routine for clear_by_pieces.
2776 Return const0_rtx unconditionally. */
2778 static rtx
2779 clear_by_pieces_1 (data, offset, mode)
2780 PTR data ATTRIBUTE_UNUSED;
2781 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2782 enum machine_mode mode ATTRIBUTE_UNUSED;
2784 return const0_rtx;
2787 /* Subroutine of clear_by_pieces and store_by_pieces.
2788 Generate several move instructions to store LEN bytes of block TO. (A MEM
2789 rtx with BLKmode). The caller must pass TO through protect_from_queue
2790 before calling. ALIGN is maximum alignment we can assume. */
2792 static void
2793 store_by_pieces_1 (data, align)
2794 struct store_by_pieces *data;
2795 unsigned int align;
2797 rtx to_addr = XEXP (data->to, 0);
2798 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2799 enum machine_mode mode = VOIDmode, tmode;
2800 enum insn_code icode;
2802 data->offset = 0;
2803 data->to_addr = to_addr;
2804 data->autinc_to
2805 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2806 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2808 data->explicit_inc_to = 0;
2809 data->reverse
2810 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2811 if (data->reverse)
2812 data->offset = data->len;
2814 /* If storing requires more than two move insns,
2815 copy addresses to registers (to make displacements shorter)
2816 and use post-increment if available. */
2817 if (!data->autinc_to
2818 && move_by_pieces_ninsns (data->len, align) > 2)
2820 /* Determine the main mode we'll be using. */
2821 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2822 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2823 if (GET_MODE_SIZE (tmode) < max_size)
2824 mode = tmode;
2826 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2828 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2829 data->autinc_to = 1;
2830 data->explicit_inc_to = -1;
2833 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2834 && ! data->autinc_to)
2836 data->to_addr = copy_addr_to_reg (to_addr);
2837 data->autinc_to = 1;
2838 data->explicit_inc_to = 1;
2841 if ( !data->autinc_to && CONSTANT_P (to_addr))
2842 data->to_addr = copy_addr_to_reg (to_addr);
2845 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2846 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2847 align = MOVE_MAX * BITS_PER_UNIT;
2849 /* First store what we can in the largest integer mode, then go to
2850 successively smaller modes. */
2852 while (max_size > 1)
2854 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2855 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2856 if (GET_MODE_SIZE (tmode) < max_size)
2857 mode = tmode;
2859 if (mode == VOIDmode)
2860 break;
2862 icode = mov_optab->handlers[(int) mode].insn_code;
2863 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2864 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2866 max_size = GET_MODE_SIZE (mode);
2869 /* The code above should have handled everything. */
2870 if (data->len != 0)
2871 abort ();
2874 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2875 with move instructions for mode MODE. GENFUN is the gen_... function
2876 to make a move insn for that mode. DATA has all the other info. */
2878 static void
2879 store_by_pieces_2 (genfun, mode, data)
2880 rtx (*genfun) PARAMS ((rtx, ...));
2881 enum machine_mode mode;
2882 struct store_by_pieces *data;
2884 unsigned int size = GET_MODE_SIZE (mode);
2885 rtx to1, cst;
2887 while (data->len >= size)
2889 if (data->reverse)
2890 data->offset -= size;
2892 if (data->autinc_to)
2893 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2894 data->offset);
2895 else
2896 to1 = adjust_address (data->to, mode, data->offset);
2898 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2899 emit_insn (gen_add2_insn (data->to_addr,
2900 GEN_INT (-(HOST_WIDE_INT) size)));
2902 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2903 emit_insn ((*genfun) (to1, cst));
2905 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2906 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2908 if (! data->reverse)
2909 data->offset += size;
2911 data->len -= size;
2915 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2916 its length in bytes. */
2919 clear_storage (object, size)
2920 rtx object;
2921 rtx size;
2923 rtx retval = 0;
2924 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2925 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2927 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2928 just move a zero. Otherwise, do this a piece at a time. */
2929 if (GET_MODE (object) != BLKmode
2930 && GET_CODE (size) == CONST_INT
2931 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2932 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2933 else
2935 object = protect_from_queue (object, 1);
2936 size = protect_from_queue (size, 0);
2938 if (GET_CODE (size) == CONST_INT
2939 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2940 clear_by_pieces (object, INTVAL (size), align);
2941 else if (clear_storage_via_clrstr (object, size, align))
2943 else
2944 retval = clear_storage_via_libcall (object, size);
2947 return retval;
2950 /* A subroutine of clear_storage. Expand a clrstr pattern;
2951 return true if successful. */
2953 static bool
2954 clear_storage_via_clrstr (object, size, align)
2955 rtx object, size;
2956 unsigned int align;
2958 /* Try the most limited insn first, because there's no point
2959 including more than one in the machine description unless
2960 the more limited one has some advantage. */
2962 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2963 enum machine_mode mode;
2965 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2966 mode = GET_MODE_WIDER_MODE (mode))
2968 enum insn_code code = clrstr_optab[(int) mode];
2969 insn_operand_predicate_fn pred;
2971 if (code != CODE_FOR_nothing
2972 /* We don't need MODE to be narrower than
2973 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2974 the mode mask, as it is returned by the macro, it will
2975 definitely be less than the actual mode mask. */
2976 && ((GET_CODE (size) == CONST_INT
2977 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2978 <= (GET_MODE_MASK (mode) >> 1)))
2979 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2980 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2981 || (*pred) (object, BLKmode))
2982 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2983 || (*pred) (opalign, VOIDmode)))
2985 rtx op1;
2986 rtx last = get_last_insn ();
2987 rtx pat;
2989 op1 = convert_to_mode (mode, size, 1);
2990 pred = insn_data[(int) code].operand[1].predicate;
2991 if (pred != 0 && ! (*pred) (op1, mode))
2992 op1 = copy_to_mode_reg (mode, op1);
2994 pat = GEN_FCN ((int) code) (object, op1, opalign);
2995 if (pat)
2997 emit_insn (pat);
2998 return true;
3000 else
3001 delete_insns_since (last);
3005 return false;
3008 /* A subroutine of clear_storage. Expand a call to memset or bzero.
3009 Return the return value of memset, 0 otherwise. */
3011 static rtx
3012 clear_storage_via_libcall (object, size)
3013 rtx object, size;
3015 tree call_expr, arg_list, fn, object_tree, size_tree;
3016 enum machine_mode size_mode;
3017 rtx retval;
3019 /* OBJECT or SIZE may have been passed through protect_from_queue.
3021 It is unsafe to save the value generated by protect_from_queue
3022 and reuse it later. Consider what happens if emit_queue is
3023 called before the return value from protect_from_queue is used.
3025 Expansion of the CALL_EXPR below will call emit_queue before
3026 we are finished emitting RTL for argument setup. So if we are
3027 not careful we could get the wrong value for an argument.
3029 To avoid this problem we go ahead and emit code to copy OBJECT
3030 and SIZE into new pseudos. We can then place those new pseudos
3031 into an RTL_EXPR and use them later, even after a call to
3032 emit_queue.
3034 Note this is not strictly needed for library calls since they
3035 do not call emit_queue before loading their arguments. However,
3036 we may need to have library calls call emit_queue in the future
3037 since failing to do so could cause problems for targets which
3038 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
3040 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
3042 if (TARGET_MEM_FUNCTIONS)
3043 size_mode = TYPE_MODE (sizetype);
3044 else
3045 size_mode = TYPE_MODE (unsigned_type_node);
3046 size = convert_to_mode (size_mode, size, 1);
3047 size = copy_to_mode_reg (size_mode, size);
3049 /* It is incorrect to use the libcall calling conventions to call
3050 memset in this context. This could be a user call to memset and
3051 the user may wish to examine the return value from memset. For
3052 targets where libcalls and normal calls have different conventions
3053 for returning pointers, we could end up generating incorrect code.
3055 For convenience, we generate the call to bzero this way as well. */
3057 object_tree = make_tree (ptr_type_node, object);
3058 if (TARGET_MEM_FUNCTIONS)
3059 size_tree = make_tree (sizetype, size);
3060 else
3061 size_tree = make_tree (unsigned_type_node, size);
3063 fn = clear_storage_libcall_fn (true);
3064 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
3065 if (TARGET_MEM_FUNCTIONS)
3066 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3067 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3069 /* Now we have to build up the CALL_EXPR itself. */
3070 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3071 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3072 call_expr, arg_list, NULL_TREE);
3073 TREE_SIDE_EFFECTS (call_expr) = 1;
3075 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3077 /* If we are initializing a readonly value, show the above call
3078 clobbered it. Otherwise, a load from it may erroneously be
3079 hoisted from a loop. */
3080 if (RTX_UNCHANGING_P (object))
3081 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3083 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3086 /* A subroutine of clear_storage_via_libcall. Create the tree node
3087 for the function we use for block clears. The first time FOR_CALL
3088 is true, we call assemble_external. */
3090 static GTY(()) tree block_clear_fn;
3092 static tree
3093 clear_storage_libcall_fn (for_call)
3094 int for_call;
3096 static bool emitted_extern;
3097 tree fn = block_clear_fn, args;
3099 if (!fn)
3101 if (TARGET_MEM_FUNCTIONS)
3103 fn = get_identifier ("memset");
3104 args = build_function_type_list (ptr_type_node, ptr_type_node,
3105 integer_type_node, sizetype,
3106 NULL_TREE);
3108 else
3110 fn = get_identifier ("bzero");
3111 args = build_function_type_list (void_type_node, ptr_type_node,
3112 unsigned_type_node, NULL_TREE);
3115 fn = build_decl (FUNCTION_DECL, fn, args);
3116 DECL_EXTERNAL (fn) = 1;
3117 TREE_PUBLIC (fn) = 1;
3118 DECL_ARTIFICIAL (fn) = 1;
3119 TREE_NOTHROW (fn) = 1;
3121 block_clear_fn = fn;
3124 if (for_call && !emitted_extern)
3126 emitted_extern = true;
3127 make_decl_rtl (fn, NULL);
3128 assemble_external (fn);
3131 return fn;
3134 /* Generate code to copy Y into X.
3135 Both Y and X must have the same mode, except that
3136 Y can be a constant with VOIDmode.
3137 This mode cannot be BLKmode; use emit_block_move for that.
3139 Return the last instruction emitted. */
3142 emit_move_insn (x, y)
3143 rtx x, y;
3145 enum machine_mode mode = GET_MODE (x);
3146 rtx y_cst = NULL_RTX;
3147 rtx last_insn;
3149 x = protect_from_queue (x, 1);
3150 y = protect_from_queue (y, 0);
3152 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3153 abort ();
3155 /* Never force constant_p_rtx to memory. */
3156 if (GET_CODE (y) == CONSTANT_P_RTX)
3158 else if (CONSTANT_P (y))
3160 if (optimize
3161 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3162 && (last_insn = compress_float_constant (x, y)))
3163 return last_insn;
3165 if (!LEGITIMATE_CONSTANT_P (y))
3167 y_cst = y;
3168 y = force_const_mem (mode, y);
3170 /* If the target's cannot_force_const_mem prevented the spill,
3171 assume that the target's move expanders will also take care
3172 of the non-legitimate constant. */
3173 if (!y)
3174 y = y_cst;
3178 /* If X or Y are memory references, verify that their addresses are valid
3179 for the machine. */
3180 if (GET_CODE (x) == MEM
3181 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3182 && ! push_operand (x, GET_MODE (x)))
3183 || (flag_force_addr
3184 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3185 x = validize_mem (x);
3187 if (GET_CODE (y) == MEM
3188 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3189 || (flag_force_addr
3190 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3191 y = validize_mem (y);
3193 if (mode == BLKmode)
3194 abort ();
3196 last_insn = emit_move_insn_1 (x, y);
3198 if (y_cst && GET_CODE (x) == REG)
3199 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3201 return last_insn;
3204 /* Low level part of emit_move_insn.
3205 Called just like emit_move_insn, but assumes X and Y
3206 are basically valid. */
3209 emit_move_insn_1 (x, y)
3210 rtx x, y;
3212 enum machine_mode mode = GET_MODE (x);
3213 enum machine_mode submode;
3214 enum mode_class class = GET_MODE_CLASS (mode);
3216 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3217 abort ();
3219 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3220 return
3221 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3223 /* Expand complex moves by moving real part and imag part, if possible. */
3224 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
3225 && BLKmode != (submode = GET_MODE_INNER (mode))
3226 && (mov_optab->handlers[(int) submode].insn_code
3227 != CODE_FOR_nothing))
3229 /* Don't split destination if it is a stack push. */
3230 int stack = push_operand (x, GET_MODE (x));
3232 #ifdef PUSH_ROUNDING
3233 /* In case we output to the stack, but the size is smaller machine can
3234 push exactly, we need to use move instructions. */
3235 if (stack
3236 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3237 != GET_MODE_SIZE (submode)))
3239 rtx temp;
3240 HOST_WIDE_INT offset1, offset2;
3242 /* Do not use anti_adjust_stack, since we don't want to update
3243 stack_pointer_delta. */
3244 temp = expand_binop (Pmode,
3245 #ifdef STACK_GROWS_DOWNWARD
3246 sub_optab,
3247 #else
3248 add_optab,
3249 #endif
3250 stack_pointer_rtx,
3251 GEN_INT
3252 (PUSH_ROUNDING
3253 (GET_MODE_SIZE (GET_MODE (x)))),
3254 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3256 if (temp != stack_pointer_rtx)
3257 emit_move_insn (stack_pointer_rtx, temp);
3259 #ifdef STACK_GROWS_DOWNWARD
3260 offset1 = 0;
3261 offset2 = GET_MODE_SIZE (submode);
3262 #else
3263 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3264 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3265 + GET_MODE_SIZE (submode));
3266 #endif
3268 emit_move_insn (change_address (x, submode,
3269 gen_rtx_PLUS (Pmode,
3270 stack_pointer_rtx,
3271 GEN_INT (offset1))),
3272 gen_realpart (submode, y));
3273 emit_move_insn (change_address (x, submode,
3274 gen_rtx_PLUS (Pmode,
3275 stack_pointer_rtx,
3276 GEN_INT (offset2))),
3277 gen_imagpart (submode, y));
3279 else
3280 #endif
3281 /* If this is a stack, push the highpart first, so it
3282 will be in the argument order.
3284 In that case, change_address is used only to convert
3285 the mode, not to change the address. */
3286 if (stack)
3288 /* Note that the real part always precedes the imag part in memory
3289 regardless of machine's endianness. */
3290 #ifdef STACK_GROWS_DOWNWARD
3291 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3292 (gen_rtx_MEM (submode, XEXP (x, 0)),
3293 gen_imagpart (submode, y)));
3294 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3295 (gen_rtx_MEM (submode, XEXP (x, 0)),
3296 gen_realpart (submode, y)));
3297 #else
3298 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3299 (gen_rtx_MEM (submode, XEXP (x, 0)),
3300 gen_realpart (submode, y)));
3301 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3302 (gen_rtx_MEM (submode, XEXP (x, 0)),
3303 gen_imagpart (submode, y)));
3304 #endif
3306 else
3308 rtx realpart_x, realpart_y;
3309 rtx imagpart_x, imagpart_y;
3311 /* If this is a complex value with each part being smaller than a
3312 word, the usual calling sequence will likely pack the pieces into
3313 a single register. Unfortunately, SUBREG of hard registers only
3314 deals in terms of words, so we have a problem converting input
3315 arguments to the CONCAT of two registers that is used elsewhere
3316 for complex values. If this is before reload, we can copy it into
3317 memory and reload. FIXME, we should see about using extract and
3318 insert on integer registers, but complex short and complex char
3319 variables should be rarely used. */
3320 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
3321 && (reload_in_progress | reload_completed) == 0)
3323 int packed_dest_p
3324 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3325 int packed_src_p
3326 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3328 if (packed_dest_p || packed_src_p)
3330 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3331 ? MODE_FLOAT : MODE_INT);
3333 enum machine_mode reg_mode
3334 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3336 if (reg_mode != BLKmode)
3338 rtx mem = assign_stack_temp (reg_mode,
3339 GET_MODE_SIZE (mode), 0);
3340 rtx cmem = adjust_address (mem, mode, 0);
3342 cfun->cannot_inline
3343 = N_("function using short complex types cannot be inline");
3345 if (packed_dest_p)
3347 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3349 emit_move_insn_1 (cmem, y);
3350 return emit_move_insn_1 (sreg, mem);
3352 else
3354 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3356 emit_move_insn_1 (mem, sreg);
3357 return emit_move_insn_1 (x, cmem);
3363 realpart_x = gen_realpart (submode, x);
3364 realpart_y = gen_realpart (submode, y);
3365 imagpart_x = gen_imagpart (submode, x);
3366 imagpart_y = gen_imagpart (submode, y);
3368 /* Show the output dies here. This is necessary for SUBREGs
3369 of pseudos since we cannot track their lifetimes correctly;
3370 hard regs shouldn't appear here except as return values.
3371 We never want to emit such a clobber after reload. */
3372 if (x != y
3373 && ! (reload_in_progress || reload_completed)
3374 && (GET_CODE (realpart_x) == SUBREG
3375 || GET_CODE (imagpart_x) == SUBREG))
3376 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3378 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3379 (realpart_x, realpart_y));
3380 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3381 (imagpart_x, imagpart_y));
3384 return get_last_insn ();
3387 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3388 find a mode to do it in. If we have a movcc, use it. Otherwise,
3389 find the MODE_INT mode of the same width. */
3390 else if (GET_MODE_CLASS (mode) == MODE_CC
3391 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3393 enum insn_code insn_code;
3394 enum machine_mode tmode = VOIDmode;
3395 rtx x1 = x, y1 = y;
3397 if (mode != CCmode
3398 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3399 tmode = CCmode;
3400 else
3401 for (tmode = QImode; tmode != VOIDmode;
3402 tmode = GET_MODE_WIDER_MODE (tmode))
3403 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3404 break;
3406 if (tmode == VOIDmode)
3407 abort ();
3409 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3410 may call change_address which is not appropriate if we were
3411 called when a reload was in progress. We don't have to worry
3412 about changing the address since the size in bytes is supposed to
3413 be the same. Copy the MEM to change the mode and move any
3414 substitutions from the old MEM to the new one. */
3416 if (reload_in_progress)
3418 x = gen_lowpart_common (tmode, x1);
3419 if (x == 0 && GET_CODE (x1) == MEM)
3421 x = adjust_address_nv (x1, tmode, 0);
3422 copy_replacements (x1, x);
3425 y = gen_lowpart_common (tmode, y1);
3426 if (y == 0 && GET_CODE (y1) == MEM)
3428 y = adjust_address_nv (y1, tmode, 0);
3429 copy_replacements (y1, y);
3432 else
3434 x = gen_lowpart (tmode, x);
3435 y = gen_lowpart (tmode, y);
3438 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3439 return emit_insn (GEN_FCN (insn_code) (x, y));
3442 /* This will handle any multi-word or full-word mode that lacks a move_insn
3443 pattern. However, you will get better code if you define such patterns,
3444 even if they must turn into multiple assembler instructions. */
3445 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3447 rtx last_insn = 0;
3448 rtx seq, inner;
3449 int need_clobber;
3450 int i;
3452 #ifdef PUSH_ROUNDING
3454 /* If X is a push on the stack, do the push now and replace
3455 X with a reference to the stack pointer. */
3456 if (push_operand (x, GET_MODE (x)))
3458 rtx temp;
3459 enum rtx_code code;
3461 /* Do not use anti_adjust_stack, since we don't want to update
3462 stack_pointer_delta. */
3463 temp = expand_binop (Pmode,
3464 #ifdef STACK_GROWS_DOWNWARD
3465 sub_optab,
3466 #else
3467 add_optab,
3468 #endif
3469 stack_pointer_rtx,
3470 GEN_INT
3471 (PUSH_ROUNDING
3472 (GET_MODE_SIZE (GET_MODE (x)))),
3473 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3475 if (temp != stack_pointer_rtx)
3476 emit_move_insn (stack_pointer_rtx, temp);
3478 code = GET_CODE (XEXP (x, 0));
3480 /* Just hope that small offsets off SP are OK. */
3481 if (code == POST_INC)
3482 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3483 GEN_INT (-((HOST_WIDE_INT)
3484 GET_MODE_SIZE (GET_MODE (x)))));
3485 else if (code == POST_DEC)
3486 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3487 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3488 else
3489 temp = stack_pointer_rtx;
3491 x = change_address (x, VOIDmode, temp);
3493 #endif
3495 /* If we are in reload, see if either operand is a MEM whose address
3496 is scheduled for replacement. */
3497 if (reload_in_progress && GET_CODE (x) == MEM
3498 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3499 x = replace_equiv_address_nv (x, inner);
3500 if (reload_in_progress && GET_CODE (y) == MEM
3501 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3502 y = replace_equiv_address_nv (y, inner);
3504 start_sequence ();
3506 need_clobber = 0;
3507 for (i = 0;
3508 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3509 i++)
3511 rtx xpart = operand_subword (x, i, 1, mode);
3512 rtx ypart = operand_subword (y, i, 1, mode);
3514 /* If we can't get a part of Y, put Y into memory if it is a
3515 constant. Otherwise, force it into a register. If we still
3516 can't get a part of Y, abort. */
3517 if (ypart == 0 && CONSTANT_P (y))
3519 y = force_const_mem (mode, y);
3520 ypart = operand_subword (y, i, 1, mode);
3522 else if (ypart == 0)
3523 ypart = operand_subword_force (y, i, mode);
3525 if (xpart == 0 || ypart == 0)
3526 abort ();
3528 need_clobber |= (GET_CODE (xpart) == SUBREG);
3530 last_insn = emit_move_insn (xpart, ypart);
3533 seq = get_insns ();
3534 end_sequence ();
3536 /* Show the output dies here. This is necessary for SUBREGs
3537 of pseudos since we cannot track their lifetimes correctly;
3538 hard regs shouldn't appear here except as return values.
3539 We never want to emit such a clobber after reload. */
3540 if (x != y
3541 && ! (reload_in_progress || reload_completed)
3542 && need_clobber != 0)
3543 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3545 emit_insn (seq);
3547 return last_insn;
3549 else
3550 abort ();
3553 /* If Y is representable exactly in a narrower mode, and the target can
3554 perform the extension directly from constant or memory, then emit the
3555 move as an extension. */
3557 static rtx
3558 compress_float_constant (x, y)
3559 rtx x, y;
3561 enum machine_mode dstmode = GET_MODE (x);
3562 enum machine_mode orig_srcmode = GET_MODE (y);
3563 enum machine_mode srcmode;
3564 REAL_VALUE_TYPE r;
3566 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3568 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3569 srcmode != orig_srcmode;
3570 srcmode = GET_MODE_WIDER_MODE (srcmode))
3572 enum insn_code ic;
3573 rtx trunc_y, last_insn;
3575 /* Skip if the target can't extend this way. */
3576 ic = can_extend_p (dstmode, srcmode, 0);
3577 if (ic == CODE_FOR_nothing)
3578 continue;
3580 /* Skip if the narrowed value isn't exact. */
3581 if (! exact_real_truncate (srcmode, &r))
3582 continue;
3584 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3586 if (LEGITIMATE_CONSTANT_P (trunc_y))
3588 /* Skip if the target needs extra instructions to perform
3589 the extension. */
3590 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3591 continue;
3593 else if (float_extend_from_mem[dstmode][srcmode])
3594 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3595 else
3596 continue;
3598 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3599 last_insn = get_last_insn ();
3601 if (GET_CODE (x) == REG)
3602 REG_NOTES (last_insn)
3603 = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3605 return last_insn;
3608 return NULL_RTX;
3611 /* Pushing data onto the stack. */
3613 /* Push a block of length SIZE (perhaps variable)
3614 and return an rtx to address the beginning of the block.
3615 Note that it is not possible for the value returned to be a QUEUED.
3616 The value may be virtual_outgoing_args_rtx.
3618 EXTRA is the number of bytes of padding to push in addition to SIZE.
3619 BELOW nonzero means this padding comes at low addresses;
3620 otherwise, the padding comes at high addresses. */
3623 push_block (size, extra, below)
3624 rtx size;
3625 int extra, below;
3627 rtx temp;
3629 size = convert_modes (Pmode, ptr_mode, size, 1);
3630 if (CONSTANT_P (size))
3631 anti_adjust_stack (plus_constant (size, extra));
3632 else if (GET_CODE (size) == REG && extra == 0)
3633 anti_adjust_stack (size);
3634 else
3636 temp = copy_to_mode_reg (Pmode, size);
3637 if (extra != 0)
3638 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3639 temp, 0, OPTAB_LIB_WIDEN);
3640 anti_adjust_stack (temp);
3643 #ifndef STACK_GROWS_DOWNWARD
3644 if (0)
3645 #else
3646 if (1)
3647 #endif
3649 temp = virtual_outgoing_args_rtx;
3650 if (extra != 0 && below)
3651 temp = plus_constant (temp, extra);
3653 else
3655 if (GET_CODE (size) == CONST_INT)
3656 temp = plus_constant (virtual_outgoing_args_rtx,
3657 -INTVAL (size) - (below ? 0 : extra));
3658 else if (extra != 0 && !below)
3659 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3660 negate_rtx (Pmode, plus_constant (size, extra)));
3661 else
3662 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3663 negate_rtx (Pmode, size));
3666 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3669 #ifdef PUSH_ROUNDING
3671 /* Emit single push insn. */
3673 static void
3674 emit_single_push_insn (mode, x, type)
3675 rtx x;
3676 enum machine_mode mode;
3677 tree type;
3679 rtx dest_addr;
3680 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3681 rtx dest;
3682 enum insn_code icode;
3683 insn_operand_predicate_fn pred;
3685 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3686 /* If there is push pattern, use it. Otherwise try old way of throwing
3687 MEM representing push operation to move expander. */
3688 icode = push_optab->handlers[(int) mode].insn_code;
3689 if (icode != CODE_FOR_nothing)
3691 if (((pred = insn_data[(int) icode].operand[0].predicate)
3692 && !((*pred) (x, mode))))
3693 x = force_reg (mode, x);
3694 emit_insn (GEN_FCN (icode) (x));
3695 return;
3697 if (GET_MODE_SIZE (mode) == rounded_size)
3698 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3699 else
3701 #ifdef STACK_GROWS_DOWNWARD
3702 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3703 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3704 #else
3705 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3706 GEN_INT (rounded_size));
3707 #endif
3708 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3711 dest = gen_rtx_MEM (mode, dest_addr);
3713 if (type != 0)
3715 set_mem_attributes (dest, type, 1);
3717 if (flag_optimize_sibling_calls)
3718 /* Function incoming arguments may overlap with sibling call
3719 outgoing arguments and we cannot allow reordering of reads
3720 from function arguments with stores to outgoing arguments
3721 of sibling calls. */
3722 set_mem_alias_set (dest, 0);
3724 emit_move_insn (dest, x);
3726 #endif
3728 /* Generate code to push X onto the stack, assuming it has mode MODE and
3729 type TYPE.
3730 MODE is redundant except when X is a CONST_INT (since they don't
3731 carry mode info).
3732 SIZE is an rtx for the size of data to be copied (in bytes),
3733 needed only if X is BLKmode.
3735 ALIGN (in bits) is maximum alignment we can assume.
3737 If PARTIAL and REG are both nonzero, then copy that many of the first
3738 words of X into registers starting with REG, and push the rest of X.
3739 The amount of space pushed is decreased by PARTIAL words,
3740 rounded *down* to a multiple of PARM_BOUNDARY.
3741 REG must be a hard register in this case.
3742 If REG is zero but PARTIAL is not, take any all others actions for an
3743 argument partially in registers, but do not actually load any
3744 registers.
3746 EXTRA is the amount in bytes of extra space to leave next to this arg.
3747 This is ignored if an argument block has already been allocated.
3749 On a machine that lacks real push insns, ARGS_ADDR is the address of
3750 the bottom of the argument block for this call. We use indexing off there
3751 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3752 argument block has not been preallocated.
3754 ARGS_SO_FAR is the size of args previously pushed for this call.
3756 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3757 for arguments passed in registers. If nonzero, it will be the number
3758 of bytes required. */
3760 void
3761 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3762 args_addr, args_so_far, reg_parm_stack_space,
3763 alignment_pad)
3764 rtx x;
3765 enum machine_mode mode;
3766 tree type;
3767 rtx size;
3768 unsigned int align;
3769 int partial;
3770 rtx reg;
3771 int extra;
3772 rtx args_addr;
3773 rtx args_so_far;
3774 int reg_parm_stack_space;
3775 rtx alignment_pad;
3777 rtx xinner;
3778 enum direction stack_direction
3779 #ifdef STACK_GROWS_DOWNWARD
3780 = downward;
3781 #else
3782 = upward;
3783 #endif
3785 /* Decide where to pad the argument: `downward' for below,
3786 `upward' for above, or `none' for don't pad it.
3787 Default is below for small data on big-endian machines; else above. */
3788 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3790 /* Invert direction if stack is post-decrement.
3791 FIXME: why? */
3792 if (STACK_PUSH_CODE == POST_DEC)
3793 if (where_pad != none)
3794 where_pad = (where_pad == downward ? upward : downward);
3796 xinner = x = protect_from_queue (x, 0);
3798 if (mode == BLKmode)
3800 /* Copy a block into the stack, entirely or partially. */
3802 rtx temp;
3803 int used = partial * UNITS_PER_WORD;
3804 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3805 int skip;
3807 if (size == 0)
3808 abort ();
3810 used -= offset;
3812 /* USED is now the # of bytes we need not copy to the stack
3813 because registers will take care of them. */
3815 if (partial != 0)
3816 xinner = adjust_address (xinner, BLKmode, used);
3818 /* If the partial register-part of the arg counts in its stack size,
3819 skip the part of stack space corresponding to the registers.
3820 Otherwise, start copying to the beginning of the stack space,
3821 by setting SKIP to 0. */
3822 skip = (reg_parm_stack_space == 0) ? 0 : used;
3824 #ifdef PUSH_ROUNDING
3825 /* Do it with several push insns if that doesn't take lots of insns
3826 and if there is no difficulty with push insns that skip bytes
3827 on the stack for alignment purposes. */
3828 if (args_addr == 0
3829 && PUSH_ARGS
3830 && GET_CODE (size) == CONST_INT
3831 && skip == 0
3832 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3833 /* Here we avoid the case of a structure whose weak alignment
3834 forces many pushes of a small amount of data,
3835 and such small pushes do rounding that causes trouble. */
3836 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3837 || align >= BIGGEST_ALIGNMENT
3838 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3839 == (align / BITS_PER_UNIT)))
3840 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3842 /* Push padding now if padding above and stack grows down,
3843 or if padding below and stack grows up.
3844 But if space already allocated, this has already been done. */
3845 if (extra && args_addr == 0
3846 && where_pad != none && where_pad != stack_direction)
3847 anti_adjust_stack (GEN_INT (extra));
3849 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3851 else
3852 #endif /* PUSH_ROUNDING */
3854 rtx target;
3856 /* Otherwise make space on the stack and copy the data
3857 to the address of that space. */
3859 /* Deduct words put into registers from the size we must copy. */
3860 if (partial != 0)
3862 if (GET_CODE (size) == CONST_INT)
3863 size = GEN_INT (INTVAL (size) - used);
3864 else
3865 size = expand_binop (GET_MODE (size), sub_optab, size,
3866 GEN_INT (used), NULL_RTX, 0,
3867 OPTAB_LIB_WIDEN);
3870 /* Get the address of the stack space.
3871 In this case, we do not deal with EXTRA separately.
3872 A single stack adjust will do. */
3873 if (! args_addr)
3875 temp = push_block (size, extra, where_pad == downward);
3876 extra = 0;
3878 else if (GET_CODE (args_so_far) == CONST_INT)
3879 temp = memory_address (BLKmode,
3880 plus_constant (args_addr,
3881 skip + INTVAL (args_so_far)));
3882 else
3883 temp = memory_address (BLKmode,
3884 plus_constant (gen_rtx_PLUS (Pmode,
3885 args_addr,
3886 args_so_far),
3887 skip));
3889 if (!ACCUMULATE_OUTGOING_ARGS)
3891 /* If the source is referenced relative to the stack pointer,
3892 copy it to another register to stabilize it. We do not need
3893 to do this if we know that we won't be changing sp. */
3895 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3896 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3897 temp = copy_to_reg (temp);
3900 target = gen_rtx_MEM (BLKmode, temp);
3902 if (type != 0)
3904 set_mem_attributes (target, type, 1);
3905 /* Function incoming arguments may overlap with sibling call
3906 outgoing arguments and we cannot allow reordering of reads
3907 from function arguments with stores to outgoing arguments
3908 of sibling calls. */
3909 set_mem_alias_set (target, 0);
3912 /* ALIGN may well be better aligned than TYPE, e.g. due to
3913 PARM_BOUNDARY. Assume the caller isn't lying. */
3914 set_mem_align (target, align);
3916 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3919 else if (partial > 0)
3921 /* Scalar partly in registers. */
3923 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3924 int i;
3925 int not_stack;
3926 /* # words of start of argument
3927 that we must make space for but need not store. */
3928 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3929 int args_offset = INTVAL (args_so_far);
3930 int skip;
3932 /* Push padding now if padding above and stack grows down,
3933 or if padding below and stack grows up.
3934 But if space already allocated, this has already been done. */
3935 if (extra && args_addr == 0
3936 && where_pad != none && where_pad != stack_direction)
3937 anti_adjust_stack (GEN_INT (extra));
3939 /* If we make space by pushing it, we might as well push
3940 the real data. Otherwise, we can leave OFFSET nonzero
3941 and leave the space uninitialized. */
3942 if (args_addr == 0)
3943 offset = 0;
3945 /* Now NOT_STACK gets the number of words that we don't need to
3946 allocate on the stack. */
3947 not_stack = partial - offset;
3949 /* If the partial register-part of the arg counts in its stack size,
3950 skip the part of stack space corresponding to the registers.
3951 Otherwise, start copying to the beginning of the stack space,
3952 by setting SKIP to 0. */
3953 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3955 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3956 x = validize_mem (force_const_mem (mode, x));
3958 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3959 SUBREGs of such registers are not allowed. */
3960 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3961 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3962 x = copy_to_reg (x);
3964 /* Loop over all the words allocated on the stack for this arg. */
3965 /* We can do it by words, because any scalar bigger than a word
3966 has a size a multiple of a word. */
3967 #ifndef PUSH_ARGS_REVERSED
3968 for (i = not_stack; i < size; i++)
3969 #else
3970 for (i = size - 1; i >= not_stack; i--)
3971 #endif
3972 if (i >= not_stack + offset)
3973 emit_push_insn (operand_subword_force (x, i, mode),
3974 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3975 0, args_addr,
3976 GEN_INT (args_offset + ((i - not_stack + skip)
3977 * UNITS_PER_WORD)),
3978 reg_parm_stack_space, alignment_pad);
3980 else
3982 rtx addr;
3983 rtx dest;
3985 /* Push padding now if padding above and stack grows down,
3986 or if padding below and stack grows up.
3987 But if space already allocated, this has already been done. */
3988 if (extra && args_addr == 0
3989 && where_pad != none && where_pad != stack_direction)
3990 anti_adjust_stack (GEN_INT (extra));
3992 #ifdef PUSH_ROUNDING
3993 if (args_addr == 0 && PUSH_ARGS)
3994 emit_single_push_insn (mode, x, type);
3995 else
3996 #endif
3998 if (GET_CODE (args_so_far) == CONST_INT)
3999 addr
4000 = memory_address (mode,
4001 plus_constant (args_addr,
4002 INTVAL (args_so_far)));
4003 else
4004 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4005 args_so_far));
4006 dest = gen_rtx_MEM (mode, addr);
4007 if (type != 0)
4009 set_mem_attributes (dest, type, 1);
4010 /* Function incoming arguments may overlap with sibling call
4011 outgoing arguments and we cannot allow reordering of reads
4012 from function arguments with stores to outgoing arguments
4013 of sibling calls. */
4014 set_mem_alias_set (dest, 0);
4017 emit_move_insn (dest, x);
4021 /* If part should go in registers, copy that part
4022 into the appropriate registers. Do this now, at the end,
4023 since mem-to-mem copies above may do function calls. */
4024 if (partial > 0 && reg != 0)
4026 /* Handle calls that pass values in multiple non-contiguous locations.
4027 The Irix 6 ABI has examples of this. */
4028 if (GET_CODE (reg) == PARALLEL)
4029 emit_group_load (reg, x, -1); /* ??? size? */
4030 else
4031 move_block_to_reg (REGNO (reg), x, partial, mode);
4034 if (extra && args_addr == 0 && where_pad == stack_direction)
4035 anti_adjust_stack (GEN_INT (extra));
4037 if (alignment_pad && args_addr == 0)
4038 anti_adjust_stack (alignment_pad);
4041 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4042 operations. */
4044 static rtx
4045 get_subtarget (x)
4046 rtx x;
4048 return ((x == 0
4049 /* Only registers can be subtargets. */
4050 || GET_CODE (x) != REG
4051 /* If the register is readonly, it can't be set more than once. */
4052 || RTX_UNCHANGING_P (x)
4053 /* Don't use hard regs to avoid extending their life. */
4054 || REGNO (x) < FIRST_PSEUDO_REGISTER
4055 /* Avoid subtargets inside loops,
4056 since they hide some invariant expressions. */
4057 || preserve_subexpressions_p ())
4058 ? 0 : x);
4061 /* Expand an assignment that stores the value of FROM into TO.
4062 If WANT_VALUE is nonzero, return an rtx for the value of TO.
4063 (This may contain a QUEUED rtx;
4064 if the value is constant, this rtx is a constant.)
4065 Otherwise, the returned value is NULL_RTX.
4067 SUGGEST_REG is no longer actually used.
4068 It used to mean, copy the value through a register
4069 and return that register, if that is possible.
4070 We now use WANT_VALUE to decide whether to do this. */
4073 expand_assignment (to, from, want_value, suggest_reg)
4074 tree to, from;
4075 int want_value;
4076 int suggest_reg ATTRIBUTE_UNUSED;
4078 rtx to_rtx = 0;
4079 rtx result;
4081 /* Don't crash if the lhs of the assignment was erroneous. */
4083 if (TREE_CODE (to) == ERROR_MARK)
4085 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
4086 return want_value ? result : NULL_RTX;
4089 /* Assignment of a structure component needs special treatment
4090 if the structure component's rtx is not simply a MEM.
4091 Assignment of an array element at a constant index, and assignment of
4092 an array element in an unaligned packed structure field, has the same
4093 problem. */
4095 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
4096 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
4097 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4099 enum machine_mode mode1;
4100 HOST_WIDE_INT bitsize, bitpos;
4101 rtx orig_to_rtx;
4102 tree offset;
4103 int unsignedp;
4104 int volatilep = 0;
4105 tree tem;
4107 push_temp_slots ();
4108 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4109 &unsignedp, &volatilep);
4111 /* If we are going to use store_bit_field and extract_bit_field,
4112 make sure to_rtx will be safe for multiple use. */
4114 if (mode1 == VOIDmode && want_value)
4115 tem = stabilize_reference (tem);
4117 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
4119 if (offset != 0)
4121 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4123 if (GET_CODE (to_rtx) != MEM)
4124 abort ();
4126 #ifdef POINTERS_EXTEND_UNSIGNED
4127 if (GET_MODE (offset_rtx) != Pmode)
4128 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4129 #else
4130 if (GET_MODE (offset_rtx) != ptr_mode)
4131 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4132 #endif
4134 /* A constant address in TO_RTX can have VOIDmode, we must not try
4135 to call force_reg for that case. Avoid that case. */
4136 if (GET_CODE (to_rtx) == MEM
4137 && GET_MODE (to_rtx) == BLKmode
4138 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4139 && bitsize > 0
4140 && (bitpos % bitsize) == 0
4141 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4142 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4144 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4145 bitpos = 0;
4148 to_rtx = offset_address (to_rtx, offset_rtx,
4149 highest_pow2_factor_for_type (TREE_TYPE (to),
4150 offset));
4153 if (GET_CODE (to_rtx) == MEM)
4155 /* If the field is at offset zero, we could have been given the
4156 DECL_RTX of the parent struct. Don't munge it. */
4157 to_rtx = shallow_copy_rtx (to_rtx);
4159 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4162 /* Deal with volatile and readonly fields. The former is only done
4163 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4164 if (volatilep && GET_CODE (to_rtx) == MEM)
4166 if (to_rtx == orig_to_rtx)
4167 to_rtx = copy_rtx (to_rtx);
4168 MEM_VOLATILE_P (to_rtx) = 1;
4171 if (TREE_CODE (to) == COMPONENT_REF
4172 && TREE_READONLY (TREE_OPERAND (to, 1)))
4174 if (to_rtx == orig_to_rtx)
4175 to_rtx = copy_rtx (to_rtx);
4176 RTX_UNCHANGING_P (to_rtx) = 1;
4179 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
4181 if (to_rtx == orig_to_rtx)
4182 to_rtx = copy_rtx (to_rtx);
4183 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4186 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4187 (want_value
4188 /* Spurious cast for HPUX compiler. */
4189 ? ((enum machine_mode)
4190 TYPE_MODE (TREE_TYPE (to)))
4191 : VOIDmode),
4192 unsignedp, TREE_TYPE (tem), get_alias_set (to));
4194 preserve_temp_slots (result);
4195 free_temp_slots ();
4196 pop_temp_slots ();
4198 /* If the value is meaningful, convert RESULT to the proper mode.
4199 Otherwise, return nothing. */
4200 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4201 TYPE_MODE (TREE_TYPE (from)),
4202 result,
4203 TREE_UNSIGNED (TREE_TYPE (to)))
4204 : NULL_RTX);
4207 /* If the rhs is a function call and its value is not an aggregate,
4208 call the function before we start to compute the lhs.
4209 This is needed for correct code for cases such as
4210 val = setjmp (buf) on machines where reference to val
4211 requires loading up part of an address in a separate insn.
4213 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4214 since it might be a promoted variable where the zero- or sign- extension
4215 needs to be done. Handling this in the normal way is safe because no
4216 computation is done before the call. */
4217 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
4218 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4219 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4220 && GET_CODE (DECL_RTL (to)) == REG))
4222 rtx value;
4224 push_temp_slots ();
4225 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4226 if (to_rtx == 0)
4227 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4229 /* Handle calls that return values in multiple non-contiguous locations.
4230 The Irix 6 ABI has examples of this. */
4231 if (GET_CODE (to_rtx) == PARALLEL)
4232 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
4233 else if (GET_MODE (to_rtx) == BLKmode)
4234 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4235 else
4237 #ifdef POINTERS_EXTEND_UNSIGNED
4238 if (POINTER_TYPE_P (TREE_TYPE (to))
4239 && GET_MODE (to_rtx) != GET_MODE (value))
4240 value = convert_memory_address (GET_MODE (to_rtx), value);
4241 #endif
4242 emit_move_insn (to_rtx, value);
4244 preserve_temp_slots (to_rtx);
4245 free_temp_slots ();
4246 pop_temp_slots ();
4247 return want_value ? to_rtx : NULL_RTX;
4250 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4251 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4253 if (to_rtx == 0)
4254 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4256 /* Don't move directly into a return register. */
4257 if (TREE_CODE (to) == RESULT_DECL
4258 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
4260 rtx temp;
4262 push_temp_slots ();
4263 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4265 if (GET_CODE (to_rtx) == PARALLEL)
4266 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
4267 else
4268 emit_move_insn (to_rtx, temp);
4270 preserve_temp_slots (to_rtx);
4271 free_temp_slots ();
4272 pop_temp_slots ();
4273 return want_value ? to_rtx : NULL_RTX;
4276 /* In case we are returning the contents of an object which overlaps
4277 the place the value is being stored, use a safe function when copying
4278 a value through a pointer into a structure value return block. */
4279 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4280 && current_function_returns_struct
4281 && !current_function_returns_pcc_struct)
4283 rtx from_rtx, size;
4285 push_temp_slots ();
4286 size = expr_size (from);
4287 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4289 if (TARGET_MEM_FUNCTIONS)
4290 emit_library_call (memmove_libfunc, LCT_NORMAL,
4291 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4292 XEXP (from_rtx, 0), Pmode,
4293 convert_to_mode (TYPE_MODE (sizetype),
4294 size, TREE_UNSIGNED (sizetype)),
4295 TYPE_MODE (sizetype));
4296 else
4297 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4298 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4299 XEXP (to_rtx, 0), Pmode,
4300 convert_to_mode (TYPE_MODE (integer_type_node),
4301 size,
4302 TREE_UNSIGNED (integer_type_node)),
4303 TYPE_MODE (integer_type_node));
4305 preserve_temp_slots (to_rtx);
4306 free_temp_slots ();
4307 pop_temp_slots ();
4308 return want_value ? to_rtx : NULL_RTX;
4311 /* Compute FROM and store the value in the rtx we got. */
4313 push_temp_slots ();
4314 result = store_expr (from, to_rtx, want_value);
4315 preserve_temp_slots (result);
4316 free_temp_slots ();
4317 pop_temp_slots ();
4318 return want_value ? result : NULL_RTX;
4321 /* Generate code for computing expression EXP,
4322 and storing the value into TARGET.
4323 TARGET may contain a QUEUED rtx.
4325 If WANT_VALUE & 1 is nonzero, return a copy of the value
4326 not in TARGET, so that we can be sure to use the proper
4327 value in a containing expression even if TARGET has something
4328 else stored in it. If possible, we copy the value through a pseudo
4329 and return that pseudo. Or, if the value is constant, we try to
4330 return the constant. In some cases, we return a pseudo
4331 copied *from* TARGET.
4333 If the mode is BLKmode then we may return TARGET itself.
4334 It turns out that in BLKmode it doesn't cause a problem.
4335 because C has no operators that could combine two different
4336 assignments into the same BLKmode object with different values
4337 with no sequence point. Will other languages need this to
4338 be more thorough?
4340 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4341 to catch quickly any cases where the caller uses the value
4342 and fails to set WANT_VALUE.
4344 If WANT_VALUE & 2 is set, this is a store into a call param on the
4345 stack, and block moves may need to be treated specially. */
4348 store_expr (exp, target, want_value)
4349 tree exp;
4350 rtx target;
4351 int want_value;
4353 rtx temp;
4354 int dont_return_target = 0;
4355 int dont_store_target = 0;
4357 if (VOID_TYPE_P (TREE_TYPE (exp)))
4359 /* C++ can generate ?: expressions with a throw expression in one
4360 branch and an rvalue in the other. Here, we resolve attempts to
4361 store the throw expression's nonexistant result. */
4362 if (want_value)
4363 abort ();
4364 expand_expr (exp, const0_rtx, VOIDmode, 0);
4365 return NULL_RTX;
4367 if (TREE_CODE (exp) == COMPOUND_EXPR)
4369 /* Perform first part of compound expression, then assign from second
4370 part. */
4371 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4372 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4373 emit_queue ();
4374 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4376 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4378 /* For conditional expression, get safe form of the target. Then
4379 test the condition, doing the appropriate assignment on either
4380 side. This avoids the creation of unnecessary temporaries.
4381 For non-BLKmode, it is more efficient not to do this. */
4383 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4385 emit_queue ();
4386 target = protect_from_queue (target, 1);
4388 do_pending_stack_adjust ();
4389 NO_DEFER_POP;
4390 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4391 start_cleanup_deferral ();
4392 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4393 end_cleanup_deferral ();
4394 emit_queue ();
4395 emit_jump_insn (gen_jump (lab2));
4396 emit_barrier ();
4397 emit_label (lab1);
4398 start_cleanup_deferral ();
4399 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4400 end_cleanup_deferral ();
4401 emit_queue ();
4402 emit_label (lab2);
4403 OK_DEFER_POP;
4405 return want_value & 1 ? target : NULL_RTX;
4407 else if (queued_subexp_p (target))
4408 /* If target contains a postincrement, let's not risk
4409 using it as the place to generate the rhs. */
4411 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4413 /* Expand EXP into a new pseudo. */
4414 temp = gen_reg_rtx (GET_MODE (target));
4415 temp = expand_expr (exp, temp, GET_MODE (target),
4416 (want_value & 2
4417 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4419 else
4420 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4421 (want_value & 2
4422 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4424 /* If target is volatile, ANSI requires accessing the value
4425 *from* the target, if it is accessed. So make that happen.
4426 In no case return the target itself. */
4427 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4428 dont_return_target = 1;
4430 else if ((want_value & 1) != 0
4431 && GET_CODE (target) == MEM
4432 && ! MEM_VOLATILE_P (target)
4433 && GET_MODE (target) != BLKmode)
4434 /* If target is in memory and caller wants value in a register instead,
4435 arrange that. Pass TARGET as target for expand_expr so that,
4436 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4437 We know expand_expr will not use the target in that case.
4438 Don't do this if TARGET is volatile because we are supposed
4439 to write it and then read it. */
4441 temp = expand_expr (exp, target, GET_MODE (target),
4442 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4443 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4445 /* If TEMP is already in the desired TARGET, only copy it from
4446 memory and don't store it there again. */
4447 if (temp == target
4448 || (rtx_equal_p (temp, target)
4449 && ! side_effects_p (temp) && ! side_effects_p (target)))
4450 dont_store_target = 1;
4451 temp = copy_to_reg (temp);
4453 dont_return_target = 1;
4455 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4456 /* If this is a scalar in a register that is stored in a wider mode
4457 than the declared mode, compute the result into its declared mode
4458 and then convert to the wider mode. Our value is the computed
4459 expression. */
4461 rtx inner_target = 0;
4463 /* If we don't want a value, we can do the conversion inside EXP,
4464 which will often result in some optimizations. Do the conversion
4465 in two steps: first change the signedness, if needed, then
4466 the extend. But don't do this if the type of EXP is a subtype
4467 of something else since then the conversion might involve
4468 more than just converting modes. */
4469 if ((want_value & 1) == 0
4470 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4471 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4473 if (TREE_UNSIGNED (TREE_TYPE (exp))
4474 != SUBREG_PROMOTED_UNSIGNED_P (target))
4475 exp = convert
4476 ((*lang_hooks.types.signed_or_unsigned_type)
4477 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4479 exp = convert ((*lang_hooks.types.type_for_mode)
4480 (GET_MODE (SUBREG_REG (target)),
4481 SUBREG_PROMOTED_UNSIGNED_P (target)),
4482 exp);
4484 inner_target = SUBREG_REG (target);
4487 temp = expand_expr (exp, inner_target, VOIDmode,
4488 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4490 /* If TEMP is a MEM and we want a result value, make the access
4491 now so it gets done only once. Strictly speaking, this is
4492 only necessary if the MEM is volatile, or if the address
4493 overlaps TARGET. But not performing the load twice also
4494 reduces the amount of rtl we generate and then have to CSE. */
4495 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4496 temp = copy_to_reg (temp);
4498 /* If TEMP is a VOIDmode constant, use convert_modes to make
4499 sure that we properly convert it. */
4500 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4502 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4503 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4504 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4505 GET_MODE (target), temp,
4506 SUBREG_PROMOTED_UNSIGNED_P (target));
4509 convert_move (SUBREG_REG (target), temp,
4510 SUBREG_PROMOTED_UNSIGNED_P (target));
4512 /* If we promoted a constant, change the mode back down to match
4513 target. Otherwise, the caller might get confused by a result whose
4514 mode is larger than expected. */
4516 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4518 if (GET_MODE (temp) != VOIDmode)
4520 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4521 SUBREG_PROMOTED_VAR_P (temp) = 1;
4522 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4523 SUBREG_PROMOTED_UNSIGNED_P (target));
4525 else
4526 temp = convert_modes (GET_MODE (target),
4527 GET_MODE (SUBREG_REG (target)),
4528 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4531 return want_value & 1 ? temp : NULL_RTX;
4533 else
4535 temp = expand_expr (exp, target, GET_MODE (target),
4536 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4537 /* Return TARGET if it's a specified hardware register.
4538 If TARGET is a volatile mem ref, either return TARGET
4539 or return a reg copied *from* TARGET; ANSI requires this.
4541 Otherwise, if TEMP is not TARGET, return TEMP
4542 if it is constant (for efficiency),
4543 or if we really want the correct value. */
4544 if (!(target && GET_CODE (target) == REG
4545 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4546 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4547 && ! rtx_equal_p (temp, target)
4548 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4549 dont_return_target = 1;
4552 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4553 the same as that of TARGET, adjust the constant. This is needed, for
4554 example, in case it is a CONST_DOUBLE and we want only a word-sized
4555 value. */
4556 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4557 && TREE_CODE (exp) != ERROR_MARK
4558 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4559 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4560 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4562 /* If value was not generated in the target, store it there.
4563 Convert the value to TARGET's type first if necessary.
4564 If TEMP and TARGET compare equal according to rtx_equal_p, but
4565 one or both of them are volatile memory refs, we have to distinguish
4566 two cases:
4567 - expand_expr has used TARGET. In this case, we must not generate
4568 another copy. This can be detected by TARGET being equal according
4569 to == .
4570 - expand_expr has not used TARGET - that means that the source just
4571 happens to have the same RTX form. Since temp will have been created
4572 by expand_expr, it will compare unequal according to == .
4573 We must generate a copy in this case, to reach the correct number
4574 of volatile memory references. */
4576 if ((! rtx_equal_p (temp, target)
4577 || (temp != target && (side_effects_p (temp)
4578 || side_effects_p (target))))
4579 && TREE_CODE (exp) != ERROR_MARK
4580 && ! dont_store_target
4581 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4582 but TARGET is not valid memory reference, TEMP will differ
4583 from TARGET although it is really the same location. */
4584 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4585 || target != DECL_RTL_IF_SET (exp))
4586 /* If there's nothing to copy, don't bother. Don't call expr_size
4587 unless necessary, because some front-ends (C++) expr_size-hook
4588 aborts on objects that are not supposed to be bit-copied or
4589 bit-initialized. */
4590 && expr_size (exp) != const0_rtx)
4592 target = protect_from_queue (target, 1);
4593 if (GET_MODE (temp) != GET_MODE (target)
4594 && GET_MODE (temp) != VOIDmode)
4596 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4597 if (dont_return_target)
4599 /* In this case, we will return TEMP,
4600 so make sure it has the proper mode.
4601 But don't forget to store the value into TARGET. */
4602 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4603 emit_move_insn (target, temp);
4605 else
4606 convert_move (target, temp, unsignedp);
4609 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4611 /* Handle copying a string constant into an array. The string
4612 constant may be shorter than the array. So copy just the string's
4613 actual length, and clear the rest. First get the size of the data
4614 type of the string, which is actually the size of the target. */
4615 rtx size = expr_size (exp);
4617 if (GET_CODE (size) == CONST_INT
4618 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4619 emit_block_move (target, temp, size,
4620 (want_value & 2
4621 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4622 else
4624 /* Compute the size of the data to copy from the string. */
4625 tree copy_size
4626 = size_binop (MIN_EXPR,
4627 make_tree (sizetype, size),
4628 size_int (TREE_STRING_LENGTH (exp)));
4629 rtx copy_size_rtx
4630 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4631 (want_value & 2
4632 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4633 rtx label = 0;
4635 /* Copy that much. */
4636 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4637 TREE_UNSIGNED (sizetype));
4638 emit_block_move (target, temp, copy_size_rtx,
4639 (want_value & 2
4640 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4642 /* Figure out how much is left in TARGET that we have to clear.
4643 Do all calculations in ptr_mode. */
4644 if (GET_CODE (copy_size_rtx) == CONST_INT)
4646 size = plus_constant (size, -INTVAL (copy_size_rtx));
4647 target = adjust_address (target, BLKmode,
4648 INTVAL (copy_size_rtx));
4650 else
4652 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4653 copy_size_rtx, NULL_RTX, 0,
4654 OPTAB_LIB_WIDEN);
4656 #ifdef POINTERS_EXTEND_UNSIGNED
4657 if (GET_MODE (copy_size_rtx) != Pmode)
4658 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4659 TREE_UNSIGNED (sizetype));
4660 #endif
4662 target = offset_address (target, copy_size_rtx,
4663 highest_pow2_factor (copy_size));
4664 label = gen_label_rtx ();
4665 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4666 GET_MODE (size), 0, label);
4669 if (size != const0_rtx)
4670 clear_storage (target, size);
4672 if (label)
4673 emit_label (label);
4676 /* Handle calls that return values in multiple non-contiguous locations.
4677 The Irix 6 ABI has examples of this. */
4678 else if (GET_CODE (target) == PARALLEL)
4679 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4680 else if (GET_MODE (temp) == BLKmode)
4681 emit_block_move (target, temp, expr_size (exp),
4682 (want_value & 2
4683 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4684 else
4685 emit_move_insn (target, temp);
4688 /* If we don't want a value, return NULL_RTX. */
4689 if ((want_value & 1) == 0)
4690 return NULL_RTX;
4692 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4693 ??? The latter test doesn't seem to make sense. */
4694 else if (dont_return_target && GET_CODE (temp) != MEM)
4695 return temp;
4697 /* Return TARGET itself if it is a hard register. */
4698 else if ((want_value & 1) != 0
4699 && GET_MODE (target) != BLKmode
4700 && ! (GET_CODE (target) == REG
4701 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4702 return copy_to_reg (target);
4704 else
4705 return target;
4708 /* Return 1 if EXP just contains zeros. */
4710 static int
4711 is_zeros_p (exp)
4712 tree exp;
4714 tree elt;
4716 switch (TREE_CODE (exp))
4718 case CONVERT_EXPR:
4719 case NOP_EXPR:
4720 case NON_LVALUE_EXPR:
4721 case VIEW_CONVERT_EXPR:
4722 return is_zeros_p (TREE_OPERAND (exp, 0));
4724 case INTEGER_CST:
4725 return integer_zerop (exp);
4727 case COMPLEX_CST:
4728 return
4729 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4731 case REAL_CST:
4732 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4734 case VECTOR_CST:
4735 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4736 elt = TREE_CHAIN (elt))
4737 if (!is_zeros_p (TREE_VALUE (elt)))
4738 return 0;
4740 return 1;
4742 case CONSTRUCTOR:
4743 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4744 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4745 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4746 if (! is_zeros_p (TREE_VALUE (elt)))
4747 return 0;
4749 return 1;
4751 default:
4752 return 0;
4756 /* Return 1 if EXP contains mostly (3/4) zeros. */
4758 static int
4759 mostly_zeros_p (exp)
4760 tree exp;
4762 if (TREE_CODE (exp) == CONSTRUCTOR)
4764 int elts = 0, zeros = 0;
4765 tree elt = CONSTRUCTOR_ELTS (exp);
4766 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4768 /* If there are no ranges of true bits, it is all zero. */
4769 return elt == NULL_TREE;
4771 for (; elt; elt = TREE_CHAIN (elt))
4773 /* We do not handle the case where the index is a RANGE_EXPR,
4774 so the statistic will be somewhat inaccurate.
4775 We do make a more accurate count in store_constructor itself,
4776 so since this function is only used for nested array elements,
4777 this should be close enough. */
4778 if (mostly_zeros_p (TREE_VALUE (elt)))
4779 zeros++;
4780 elts++;
4783 return 4 * zeros >= 3 * elts;
4786 return is_zeros_p (exp);
4789 /* Helper function for store_constructor.
4790 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4791 TYPE is the type of the CONSTRUCTOR, not the element type.
4792 CLEARED is as for store_constructor.
4793 ALIAS_SET is the alias set to use for any stores.
4795 This provides a recursive shortcut back to store_constructor when it isn't
4796 necessary to go through store_field. This is so that we can pass through
4797 the cleared field to let store_constructor know that we may not have to
4798 clear a substructure if the outer structure has already been cleared. */
4800 static void
4801 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4802 alias_set)
4803 rtx target;
4804 unsigned HOST_WIDE_INT bitsize;
4805 HOST_WIDE_INT bitpos;
4806 enum machine_mode mode;
4807 tree exp, type;
4808 int cleared;
4809 int alias_set;
4811 if (TREE_CODE (exp) == CONSTRUCTOR
4812 && bitpos % BITS_PER_UNIT == 0
4813 /* If we have a nonzero bitpos for a register target, then we just
4814 let store_field do the bitfield handling. This is unlikely to
4815 generate unnecessary clear instructions anyways. */
4816 && (bitpos == 0 || GET_CODE (target) == MEM))
4818 if (GET_CODE (target) == MEM)
4819 target
4820 = adjust_address (target,
4821 GET_MODE (target) == BLKmode
4822 || 0 != (bitpos
4823 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4824 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4827 /* Update the alias set, if required. */
4828 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4829 && MEM_ALIAS_SET (target) != 0)
4831 target = copy_rtx (target);
4832 set_mem_alias_set (target, alias_set);
4835 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4837 else
4838 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4839 alias_set);
4842 /* Store the value of constructor EXP into the rtx TARGET.
4843 TARGET is either a REG or a MEM; we know it cannot conflict, since
4844 safe_from_p has been called.
4845 CLEARED is true if TARGET is known to have been zero'd.
4846 SIZE is the number of bytes of TARGET we are allowed to modify: this
4847 may not be the same as the size of EXP if we are assigning to a field
4848 which has been packed to exclude padding bits. */
4850 static void
4851 store_constructor (exp, target, cleared, size)
4852 tree exp;
4853 rtx target;
4854 int cleared;
4855 HOST_WIDE_INT size;
4857 tree type = TREE_TYPE (exp);
4858 #ifdef WORD_REGISTER_OPERATIONS
4859 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4860 #endif
4862 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4863 || TREE_CODE (type) == QUAL_UNION_TYPE)
4865 tree elt;
4867 /* We either clear the aggregate or indicate the value is dead. */
4868 if ((TREE_CODE (type) == UNION_TYPE
4869 || TREE_CODE (type) == QUAL_UNION_TYPE)
4870 && ! cleared
4871 && ! CONSTRUCTOR_ELTS (exp))
4872 /* If the constructor is empty, clear the union. */
4874 clear_storage (target, expr_size (exp));
4875 cleared = 1;
4878 /* If we are building a static constructor into a register,
4879 set the initial value as zero so we can fold the value into
4880 a constant. But if more than one register is involved,
4881 this probably loses. */
4882 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4883 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4885 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4886 cleared = 1;
4889 /* If the constructor has fewer fields than the structure
4890 or if we are initializing the structure to mostly zeros,
4891 clear the whole structure first. Don't do this if TARGET is a
4892 register whose mode size isn't equal to SIZE since clear_storage
4893 can't handle this case. */
4894 else if (! cleared && size > 0
4895 && ((list_length (CONSTRUCTOR_ELTS (exp))
4896 != fields_length (type))
4897 || mostly_zeros_p (exp))
4898 && (GET_CODE (target) != REG
4899 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4900 == size)))
4902 clear_storage (target, GEN_INT (size));
4903 cleared = 1;
4906 if (! cleared)
4907 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4909 /* Store each element of the constructor into
4910 the corresponding field of TARGET. */
4912 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4914 tree field = TREE_PURPOSE (elt);
4915 tree value = TREE_VALUE (elt);
4916 enum machine_mode mode;
4917 HOST_WIDE_INT bitsize;
4918 HOST_WIDE_INT bitpos = 0;
4919 tree offset;
4920 rtx to_rtx = target;
4922 /* Just ignore missing fields.
4923 We cleared the whole structure, above,
4924 if any fields are missing. */
4925 if (field == 0)
4926 continue;
4928 if (cleared && is_zeros_p (value))
4929 continue;
4931 if (host_integerp (DECL_SIZE (field), 1))
4932 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4933 else
4934 bitsize = -1;
4936 mode = DECL_MODE (field);
4937 if (DECL_BIT_FIELD (field))
4938 mode = VOIDmode;
4940 offset = DECL_FIELD_OFFSET (field);
4941 if (host_integerp (offset, 0)
4942 && host_integerp (bit_position (field), 0))
4944 bitpos = int_bit_position (field);
4945 offset = 0;
4947 else
4948 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4950 if (offset)
4952 rtx offset_rtx;
4954 if (contains_placeholder_p (offset))
4955 offset = build (WITH_RECORD_EXPR, sizetype,
4956 offset, make_tree (TREE_TYPE (exp), target));
4958 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4959 if (GET_CODE (to_rtx) != MEM)
4960 abort ();
4962 #ifdef POINTERS_EXTEND_UNSIGNED
4963 if (GET_MODE (offset_rtx) != Pmode)
4964 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4965 #else
4966 if (GET_MODE (offset_rtx) != ptr_mode)
4967 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4968 #endif
4970 to_rtx = offset_address (to_rtx, offset_rtx,
4971 highest_pow2_factor (offset));
4974 if (TREE_READONLY (field))
4976 if (GET_CODE (to_rtx) == MEM)
4977 to_rtx = copy_rtx (to_rtx);
4979 RTX_UNCHANGING_P (to_rtx) = 1;
4982 #ifdef WORD_REGISTER_OPERATIONS
4983 /* If this initializes a field that is smaller than a word, at the
4984 start of a word, try to widen it to a full word.
4985 This special case allows us to output C++ member function
4986 initializations in a form that the optimizers can understand. */
4987 if (GET_CODE (target) == REG
4988 && bitsize < BITS_PER_WORD
4989 && bitpos % BITS_PER_WORD == 0
4990 && GET_MODE_CLASS (mode) == MODE_INT
4991 && TREE_CODE (value) == INTEGER_CST
4992 && exp_size >= 0
4993 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4995 tree type = TREE_TYPE (value);
4997 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4999 type = (*lang_hooks.types.type_for_size)
5000 (BITS_PER_WORD, TREE_UNSIGNED (type));
5001 value = convert (type, value);
5004 if (BYTES_BIG_ENDIAN)
5005 value
5006 = fold (build (LSHIFT_EXPR, type, value,
5007 build_int_2 (BITS_PER_WORD - bitsize, 0)));
5008 bitsize = BITS_PER_WORD;
5009 mode = word_mode;
5011 #endif
5013 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5014 && DECL_NONADDRESSABLE_P (field))
5016 to_rtx = copy_rtx (to_rtx);
5017 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5020 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5021 value, type, cleared,
5022 get_alias_set (TREE_TYPE (field)));
5025 else if (TREE_CODE (type) == ARRAY_TYPE
5026 || TREE_CODE (type) == VECTOR_TYPE)
5028 tree elt;
5029 int i;
5030 int need_to_clear;
5031 tree domain = TYPE_DOMAIN (type);
5032 tree elttype = TREE_TYPE (type);
5033 int const_bounds_p;
5034 HOST_WIDE_INT minelt = 0;
5035 HOST_WIDE_INT maxelt = 0;
5037 /* Vectors are like arrays, but the domain is stored via an array
5038 type indirectly. */
5039 if (TREE_CODE (type) == VECTOR_TYPE)
5041 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
5042 the same field as TYPE_DOMAIN, we are not guaranteed that
5043 it always will. */
5044 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
5045 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
5048 const_bounds_p = (TYPE_MIN_VALUE (domain)
5049 && TYPE_MAX_VALUE (domain)
5050 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5051 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5053 /* If we have constant bounds for the range of the type, get them. */
5054 if (const_bounds_p)
5056 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5057 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5060 /* If the constructor has fewer elements than the array,
5061 clear the whole array first. Similarly if this is
5062 static constructor of a non-BLKmode object. */
5063 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
5064 need_to_clear = 1;
5065 else
5067 HOST_WIDE_INT count = 0, zero_count = 0;
5068 need_to_clear = ! const_bounds_p;
5070 /* This loop is a more accurate version of the loop in
5071 mostly_zeros_p (it handles RANGE_EXPR in an index).
5072 It is also needed to check for missing elements. */
5073 for (elt = CONSTRUCTOR_ELTS (exp);
5074 elt != NULL_TREE && ! need_to_clear;
5075 elt = TREE_CHAIN (elt))
5077 tree index = TREE_PURPOSE (elt);
5078 HOST_WIDE_INT this_node_count;
5080 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5082 tree lo_index = TREE_OPERAND (index, 0);
5083 tree hi_index = TREE_OPERAND (index, 1);
5085 if (! host_integerp (lo_index, 1)
5086 || ! host_integerp (hi_index, 1))
5088 need_to_clear = 1;
5089 break;
5092 this_node_count = (tree_low_cst (hi_index, 1)
5093 - tree_low_cst (lo_index, 1) + 1);
5095 else
5096 this_node_count = 1;
5098 count += this_node_count;
5099 if (mostly_zeros_p (TREE_VALUE (elt)))
5100 zero_count += this_node_count;
5103 /* Clear the entire array first if there are any missing elements,
5104 or if the incidence of zero elements is >= 75%. */
5105 if (! need_to_clear
5106 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
5107 need_to_clear = 1;
5110 if (need_to_clear && size > 0)
5112 if (! cleared)
5114 if (REG_P (target))
5115 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5116 else
5117 clear_storage (target, GEN_INT (size));
5119 cleared = 1;
5121 else if (REG_P (target))
5122 /* Inform later passes that the old value is dead. */
5123 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5125 /* Store each element of the constructor into
5126 the corresponding element of TARGET, determined
5127 by counting the elements. */
5128 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5129 elt;
5130 elt = TREE_CHAIN (elt), i++)
5132 enum machine_mode mode;
5133 HOST_WIDE_INT bitsize;
5134 HOST_WIDE_INT bitpos;
5135 int unsignedp;
5136 tree value = TREE_VALUE (elt);
5137 tree index = TREE_PURPOSE (elt);
5138 rtx xtarget = target;
5140 if (cleared && is_zeros_p (value))
5141 continue;
5143 unsignedp = TREE_UNSIGNED (elttype);
5144 mode = TYPE_MODE (elttype);
5145 if (mode == BLKmode)
5146 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5147 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5148 : -1);
5149 else
5150 bitsize = GET_MODE_BITSIZE (mode);
5152 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5154 tree lo_index = TREE_OPERAND (index, 0);
5155 tree hi_index = TREE_OPERAND (index, 1);
5156 rtx index_r, pos_rtx, loop_end;
5157 struct nesting *loop;
5158 HOST_WIDE_INT lo, hi, count;
5159 tree position;
5161 /* If the range is constant and "small", unroll the loop. */
5162 if (const_bounds_p
5163 && host_integerp (lo_index, 0)
5164 && host_integerp (hi_index, 0)
5165 && (lo = tree_low_cst (lo_index, 0),
5166 hi = tree_low_cst (hi_index, 0),
5167 count = hi - lo + 1,
5168 (GET_CODE (target) != MEM
5169 || count <= 2
5170 || (host_integerp (TYPE_SIZE (elttype), 1)
5171 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5172 <= 40 * 8)))))
5174 lo -= minelt; hi -= minelt;
5175 for (; lo <= hi; lo++)
5177 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5179 if (GET_CODE (target) == MEM
5180 && !MEM_KEEP_ALIAS_SET_P (target)
5181 && TREE_CODE (type) == ARRAY_TYPE
5182 && TYPE_NONALIASED_COMPONENT (type))
5184 target = copy_rtx (target);
5185 MEM_KEEP_ALIAS_SET_P (target) = 1;
5188 store_constructor_field
5189 (target, bitsize, bitpos, mode, value, type, cleared,
5190 get_alias_set (elttype));
5193 else
5195 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5196 loop_end = gen_label_rtx ();
5198 unsignedp = TREE_UNSIGNED (domain);
5200 index = build_decl (VAR_DECL, NULL_TREE, domain);
5202 index_r
5203 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5204 &unsignedp, 0));
5205 SET_DECL_RTL (index, index_r);
5206 if (TREE_CODE (value) == SAVE_EXPR
5207 && SAVE_EXPR_RTL (value) == 0)
5209 /* Make sure value gets expanded once before the
5210 loop. */
5211 expand_expr (value, const0_rtx, VOIDmode, 0);
5212 emit_queue ();
5214 store_expr (lo_index, index_r, 0);
5215 loop = expand_start_loop (0);
5217 /* Assign value to element index. */
5218 position
5219 = convert (ssizetype,
5220 fold (build (MINUS_EXPR, TREE_TYPE (index),
5221 index, TYPE_MIN_VALUE (domain))));
5222 position = size_binop (MULT_EXPR, position,
5223 convert (ssizetype,
5224 TYPE_SIZE_UNIT (elttype)));
5226 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5227 xtarget = offset_address (target, pos_rtx,
5228 highest_pow2_factor (position));
5229 xtarget = adjust_address (xtarget, mode, 0);
5230 if (TREE_CODE (value) == CONSTRUCTOR)
5231 store_constructor (value, xtarget, cleared,
5232 bitsize / BITS_PER_UNIT);
5233 else
5234 store_expr (value, xtarget, 0);
5236 expand_exit_loop_if_false (loop,
5237 build (LT_EXPR, integer_type_node,
5238 index, hi_index));
5240 expand_increment (build (PREINCREMENT_EXPR,
5241 TREE_TYPE (index),
5242 index, integer_one_node), 0, 0);
5243 expand_end_loop ();
5244 emit_label (loop_end);
5247 else if ((index != 0 && ! host_integerp (index, 0))
5248 || ! host_integerp (TYPE_SIZE (elttype), 1))
5250 tree position;
5252 if (index == 0)
5253 index = ssize_int (1);
5255 if (minelt)
5256 index = convert (ssizetype,
5257 fold (build (MINUS_EXPR, index,
5258 TYPE_MIN_VALUE (domain))));
5260 position = size_binop (MULT_EXPR, index,
5261 convert (ssizetype,
5262 TYPE_SIZE_UNIT (elttype)));
5263 xtarget = offset_address (target,
5264 expand_expr (position, 0, VOIDmode, 0),
5265 highest_pow2_factor (position));
5266 xtarget = adjust_address (xtarget, mode, 0);
5267 store_expr (value, xtarget, 0);
5269 else
5271 if (index != 0)
5272 bitpos = ((tree_low_cst (index, 0) - minelt)
5273 * tree_low_cst (TYPE_SIZE (elttype), 1));
5274 else
5275 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5277 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5278 && TREE_CODE (type) == ARRAY_TYPE
5279 && TYPE_NONALIASED_COMPONENT (type))
5281 target = copy_rtx (target);
5282 MEM_KEEP_ALIAS_SET_P (target) = 1;
5285 store_constructor_field (target, bitsize, bitpos, mode, value,
5286 type, cleared, get_alias_set (elttype));
5292 /* Set constructor assignments. */
5293 else if (TREE_CODE (type) == SET_TYPE)
5295 tree elt = CONSTRUCTOR_ELTS (exp);
5296 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5297 tree domain = TYPE_DOMAIN (type);
5298 tree domain_min, domain_max, bitlength;
5300 /* The default implementation strategy is to extract the constant
5301 parts of the constructor, use that to initialize the target,
5302 and then "or" in whatever non-constant ranges we need in addition.
5304 If a large set is all zero or all ones, it is
5305 probably better to set it using memset (if available) or bzero.
5306 Also, if a large set has just a single range, it may also be
5307 better to first clear all the first clear the set (using
5308 bzero/memset), and set the bits we want. */
5310 /* Check for all zeros. */
5311 if (elt == NULL_TREE && size > 0)
5313 if (!cleared)
5314 clear_storage (target, GEN_INT (size));
5315 return;
5318 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5319 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5320 bitlength = size_binop (PLUS_EXPR,
5321 size_diffop (domain_max, domain_min),
5322 ssize_int (1));
5324 nbits = tree_low_cst (bitlength, 1);
5326 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5327 are "complicated" (more than one range), initialize (the
5328 constant parts) by copying from a constant. */
5329 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5330 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5332 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5333 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5334 char *bit_buffer = (char *) alloca (nbits);
5335 HOST_WIDE_INT word = 0;
5336 unsigned int bit_pos = 0;
5337 unsigned int ibit = 0;
5338 unsigned int offset = 0; /* In bytes from beginning of set. */
5340 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5341 for (;;)
5343 if (bit_buffer[ibit])
5345 if (BYTES_BIG_ENDIAN)
5346 word |= (1 << (set_word_size - 1 - bit_pos));
5347 else
5348 word |= 1 << bit_pos;
5351 bit_pos++; ibit++;
5352 if (bit_pos >= set_word_size || ibit == nbits)
5354 if (word != 0 || ! cleared)
5356 rtx datum = GEN_INT (word);
5357 rtx to_rtx;
5359 /* The assumption here is that it is safe to use
5360 XEXP if the set is multi-word, but not if
5361 it's single-word. */
5362 if (GET_CODE (target) == MEM)
5363 to_rtx = adjust_address (target, mode, offset);
5364 else if (offset == 0)
5365 to_rtx = target;
5366 else
5367 abort ();
5368 emit_move_insn (to_rtx, datum);
5371 if (ibit == nbits)
5372 break;
5373 word = 0;
5374 bit_pos = 0;
5375 offset += set_word_size / BITS_PER_UNIT;
5379 else if (!cleared)
5380 /* Don't bother clearing storage if the set is all ones. */
5381 if (TREE_CHAIN (elt) != NULL_TREE
5382 || (TREE_PURPOSE (elt) == NULL_TREE
5383 ? nbits != 1
5384 : ( ! host_integerp (TREE_VALUE (elt), 0)
5385 || ! host_integerp (TREE_PURPOSE (elt), 0)
5386 || (tree_low_cst (TREE_VALUE (elt), 0)
5387 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5388 != (HOST_WIDE_INT) nbits))))
5389 clear_storage (target, expr_size (exp));
5391 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5393 /* Start of range of element or NULL. */
5394 tree startbit = TREE_PURPOSE (elt);
5395 /* End of range of element, or element value. */
5396 tree endbit = TREE_VALUE (elt);
5397 HOST_WIDE_INT startb, endb;
5398 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5400 bitlength_rtx = expand_expr (bitlength,
5401 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5403 /* Handle non-range tuple element like [ expr ]. */
5404 if (startbit == NULL_TREE)
5406 startbit = save_expr (endbit);
5407 endbit = startbit;
5410 startbit = convert (sizetype, startbit);
5411 endbit = convert (sizetype, endbit);
5412 if (! integer_zerop (domain_min))
5414 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5415 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5417 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5418 EXPAND_CONST_ADDRESS);
5419 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5420 EXPAND_CONST_ADDRESS);
5422 if (REG_P (target))
5424 targetx
5425 = assign_temp
5426 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5427 (GET_MODE (target), 0),
5428 TYPE_QUAL_CONST)),
5429 0, 1, 1);
5430 emit_move_insn (targetx, target);
5433 else if (GET_CODE (target) == MEM)
5434 targetx = target;
5435 else
5436 abort ();
5438 /* Optimization: If startbit and endbit are constants divisible
5439 by BITS_PER_UNIT, call memset instead. */
5440 if (TARGET_MEM_FUNCTIONS
5441 && TREE_CODE (startbit) == INTEGER_CST
5442 && TREE_CODE (endbit) == INTEGER_CST
5443 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5444 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5446 emit_library_call (memset_libfunc, LCT_NORMAL,
5447 VOIDmode, 3,
5448 plus_constant (XEXP (targetx, 0),
5449 startb / BITS_PER_UNIT),
5450 Pmode,
5451 constm1_rtx, TYPE_MODE (integer_type_node),
5452 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5453 TYPE_MODE (sizetype));
5455 else
5456 emit_library_call (setbits_libfunc, LCT_NORMAL,
5457 VOIDmode, 4, XEXP (targetx, 0),
5458 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5459 startbit_rtx, TYPE_MODE (sizetype),
5460 endbit_rtx, TYPE_MODE (sizetype));
5462 if (REG_P (target))
5463 emit_move_insn (target, targetx);
5467 else
5468 abort ();
5471 /* Store the value of EXP (an expression tree)
5472 into a subfield of TARGET which has mode MODE and occupies
5473 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5474 If MODE is VOIDmode, it means that we are storing into a bit-field.
5476 If VALUE_MODE is VOIDmode, return nothing in particular.
5477 UNSIGNEDP is not used in this case.
5479 Otherwise, return an rtx for the value stored. This rtx
5480 has mode VALUE_MODE if that is convenient to do.
5481 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5483 TYPE is the type of the underlying object,
5485 ALIAS_SET is the alias set for the destination. This value will
5486 (in general) be different from that for TARGET, since TARGET is a
5487 reference to the containing structure. */
5489 static rtx
5490 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5491 alias_set)
5492 rtx target;
5493 HOST_WIDE_INT bitsize;
5494 HOST_WIDE_INT bitpos;
5495 enum machine_mode mode;
5496 tree exp;
5497 enum machine_mode value_mode;
5498 int unsignedp;
5499 tree type;
5500 int alias_set;
5502 HOST_WIDE_INT width_mask = 0;
5504 if (TREE_CODE (exp) == ERROR_MARK)
5505 return const0_rtx;
5507 /* If we have nothing to store, do nothing unless the expression has
5508 side-effects. */
5509 if (bitsize == 0)
5510 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5511 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5512 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5514 /* If we are storing into an unaligned field of an aligned union that is
5515 in a register, we may have the mode of TARGET being an integer mode but
5516 MODE == BLKmode. In that case, get an aligned object whose size and
5517 alignment are the same as TARGET and store TARGET into it (we can avoid
5518 the store if the field being stored is the entire width of TARGET). Then
5519 call ourselves recursively to store the field into a BLKmode version of
5520 that object. Finally, load from the object into TARGET. This is not
5521 very efficient in general, but should only be slightly more expensive
5522 than the otherwise-required unaligned accesses. Perhaps this can be
5523 cleaned up later. */
5525 if (mode == BLKmode
5526 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5528 rtx object
5529 = assign_temp
5530 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5531 0, 1, 1);
5532 rtx blk_object = adjust_address (object, BLKmode, 0);
5534 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5535 emit_move_insn (object, target);
5537 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5538 alias_set);
5540 emit_move_insn (target, object);
5542 /* We want to return the BLKmode version of the data. */
5543 return blk_object;
5546 if (GET_CODE (target) == CONCAT)
5548 /* We're storing into a struct containing a single __complex. */
5550 if (bitpos != 0)
5551 abort ();
5552 return store_expr (exp, target, 0);
5555 /* If the structure is in a register or if the component
5556 is a bit field, we cannot use addressing to access it.
5557 Use bit-field techniques or SUBREG to store in it. */
5559 if (mode == VOIDmode
5560 || (mode != BLKmode && ! direct_store[(int) mode]
5561 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5562 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5563 || GET_CODE (target) == REG
5564 || GET_CODE (target) == SUBREG
5565 /* If the field isn't aligned enough to store as an ordinary memref,
5566 store it as a bit field. */
5567 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5568 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5569 || bitpos % GET_MODE_ALIGNMENT (mode)))
5570 /* If the RHS and field are a constant size and the size of the
5571 RHS isn't the same size as the bitfield, we must use bitfield
5572 operations. */
5573 || (bitsize >= 0
5574 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5575 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5577 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5579 /* If BITSIZE is narrower than the size of the type of EXP
5580 we will be narrowing TEMP. Normally, what's wanted are the
5581 low-order bits. However, if EXP's type is a record and this is
5582 big-endian machine, we want the upper BITSIZE bits. */
5583 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5584 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5585 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5586 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5587 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5588 - bitsize),
5589 temp, 1);
5591 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5592 MODE. */
5593 if (mode != VOIDmode && mode != BLKmode
5594 && mode != TYPE_MODE (TREE_TYPE (exp)))
5595 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5597 /* If the modes of TARGET and TEMP are both BLKmode, both
5598 must be in memory and BITPOS must be aligned on a byte
5599 boundary. If so, we simply do a block copy. */
5600 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5602 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5603 || bitpos % BITS_PER_UNIT != 0)
5604 abort ();
5606 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5607 emit_block_move (target, temp,
5608 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5609 / BITS_PER_UNIT),
5610 BLOCK_OP_NORMAL);
5612 return value_mode == VOIDmode ? const0_rtx : target;
5615 /* Store the value in the bitfield. */
5616 store_bit_field (target, bitsize, bitpos, mode, temp,
5617 int_size_in_bytes (type));
5619 if (value_mode != VOIDmode)
5621 /* The caller wants an rtx for the value.
5622 If possible, avoid refetching from the bitfield itself. */
5623 if (width_mask != 0
5624 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5626 tree count;
5627 enum machine_mode tmode;
5629 tmode = GET_MODE (temp);
5630 if (tmode == VOIDmode)
5631 tmode = value_mode;
5633 if (unsignedp)
5634 return expand_and (tmode, temp,
5635 gen_int_mode (width_mask, tmode),
5636 NULL_RTX);
5638 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5639 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5640 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5643 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5644 NULL_RTX, value_mode, VOIDmode,
5645 int_size_in_bytes (type));
5647 return const0_rtx;
5649 else
5651 rtx addr = XEXP (target, 0);
5652 rtx to_rtx = target;
5654 /* If a value is wanted, it must be the lhs;
5655 so make the address stable for multiple use. */
5657 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5658 && ! CONSTANT_ADDRESS_P (addr)
5659 /* A frame-pointer reference is already stable. */
5660 && ! (GET_CODE (addr) == PLUS
5661 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5662 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5663 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5664 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5666 /* Now build a reference to just the desired component. */
5668 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5670 if (to_rtx == target)
5671 to_rtx = copy_rtx (to_rtx);
5673 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5674 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5675 set_mem_alias_set (to_rtx, alias_set);
5677 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5681 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5682 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5683 codes and find the ultimate containing object, which we return.
5685 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5686 bit position, and *PUNSIGNEDP to the signedness of the field.
5687 If the position of the field is variable, we store a tree
5688 giving the variable offset (in units) in *POFFSET.
5689 This offset is in addition to the bit position.
5690 If the position is not variable, we store 0 in *POFFSET.
5692 If any of the extraction expressions is volatile,
5693 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5695 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5696 is a mode that can be used to access the field. In that case, *PBITSIZE
5697 is redundant.
5699 If the field describes a variable-sized object, *PMODE is set to
5700 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5701 this case, but the address of the object can be found. */
5703 tree
5704 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5705 punsignedp, pvolatilep)
5706 tree exp;
5707 HOST_WIDE_INT *pbitsize;
5708 HOST_WIDE_INT *pbitpos;
5709 tree *poffset;
5710 enum machine_mode *pmode;
5711 int *punsignedp;
5712 int *pvolatilep;
5714 tree size_tree = 0;
5715 enum machine_mode mode = VOIDmode;
5716 tree offset = size_zero_node;
5717 tree bit_offset = bitsize_zero_node;
5718 tree placeholder_ptr = 0;
5719 tree tem;
5721 /* First get the mode, signedness, and size. We do this from just the
5722 outermost expression. */
5723 if (TREE_CODE (exp) == COMPONENT_REF)
5725 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5726 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5727 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5729 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5731 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5733 size_tree = TREE_OPERAND (exp, 1);
5734 *punsignedp = TREE_UNSIGNED (exp);
5736 else
5738 mode = TYPE_MODE (TREE_TYPE (exp));
5739 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5741 if (mode == BLKmode)
5742 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5743 else
5744 *pbitsize = GET_MODE_BITSIZE (mode);
5747 if (size_tree != 0)
5749 if (! host_integerp (size_tree, 1))
5750 mode = BLKmode, *pbitsize = -1;
5751 else
5752 *pbitsize = tree_low_cst (size_tree, 1);
5755 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5756 and find the ultimate containing object. */
5757 while (1)
5759 if (TREE_CODE (exp) == BIT_FIELD_REF)
5760 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5761 else if (TREE_CODE (exp) == COMPONENT_REF)
5763 tree field = TREE_OPERAND (exp, 1);
5764 tree this_offset = DECL_FIELD_OFFSET (field);
5766 /* If this field hasn't been filled in yet, don't go
5767 past it. This should only happen when folding expressions
5768 made during type construction. */
5769 if (this_offset == 0)
5770 break;
5771 else if (! TREE_CONSTANT (this_offset)
5772 && contains_placeholder_p (this_offset))
5773 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5775 offset = size_binop (PLUS_EXPR, offset, this_offset);
5776 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5777 DECL_FIELD_BIT_OFFSET (field));
5779 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5782 else if (TREE_CODE (exp) == ARRAY_REF
5783 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5785 tree index = TREE_OPERAND (exp, 1);
5786 tree array = TREE_OPERAND (exp, 0);
5787 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5788 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5789 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5791 /* We assume all arrays have sizes that are a multiple of a byte.
5792 First subtract the lower bound, if any, in the type of the
5793 index, then convert to sizetype and multiply by the size of the
5794 array element. */
5795 if (low_bound != 0 && ! integer_zerop (low_bound))
5796 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5797 index, low_bound));
5799 /* If the index has a self-referential type, pass it to a
5800 WITH_RECORD_EXPR; if the component size is, pass our
5801 component to one. */
5802 if (! TREE_CONSTANT (index)
5803 && contains_placeholder_p (index))
5804 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5805 if (! TREE_CONSTANT (unit_size)
5806 && contains_placeholder_p (unit_size))
5807 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5809 offset = size_binop (PLUS_EXPR, offset,
5810 size_binop (MULT_EXPR,
5811 convert (sizetype, index),
5812 unit_size));
5815 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5817 tree new = find_placeholder (exp, &placeholder_ptr);
5819 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5820 We might have been called from tree optimization where we
5821 haven't set up an object yet. */
5822 if (new == 0)
5823 break;
5824 else
5825 exp = new;
5827 continue;
5829 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5830 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5831 && ! ((TREE_CODE (exp) == NOP_EXPR
5832 || TREE_CODE (exp) == CONVERT_EXPR)
5833 && (TYPE_MODE (TREE_TYPE (exp))
5834 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5835 break;
5837 /* If any reference in the chain is volatile, the effect is volatile. */
5838 if (TREE_THIS_VOLATILE (exp))
5839 *pvolatilep = 1;
5841 exp = TREE_OPERAND (exp, 0);
5844 /* If OFFSET is constant, see if we can return the whole thing as a
5845 constant bit position. Otherwise, split it up. */
5846 if (host_integerp (offset, 0)
5847 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5848 bitsize_unit_node))
5849 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5850 && host_integerp (tem, 0))
5851 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5852 else
5853 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5855 *pmode = mode;
5856 return exp;
5859 /* Return 1 if T is an expression that get_inner_reference handles. */
5862 handled_component_p (t)
5863 tree t;
5865 switch (TREE_CODE (t))
5867 case BIT_FIELD_REF:
5868 case COMPONENT_REF:
5869 case ARRAY_REF:
5870 case ARRAY_RANGE_REF:
5871 case NON_LVALUE_EXPR:
5872 case VIEW_CONVERT_EXPR:
5873 return 1;
5875 case NOP_EXPR:
5876 case CONVERT_EXPR:
5877 return (TYPE_MODE (TREE_TYPE (t))
5878 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5880 default:
5881 return 0;
5885 /* Given an rtx VALUE that may contain additions and multiplications, return
5886 an equivalent value that just refers to a register, memory, or constant.
5887 This is done by generating instructions to perform the arithmetic and
5888 returning a pseudo-register containing the value.
5890 The returned value may be a REG, SUBREG, MEM or constant. */
5893 force_operand (value, target)
5894 rtx value, target;
5896 rtx op1, op2;
5897 /* Use subtarget as the target for operand 0 of a binary operation. */
5898 rtx subtarget = get_subtarget (target);
5899 enum rtx_code code = GET_CODE (value);
5901 /* Check for a PIC address load. */
5902 if ((code == PLUS || code == MINUS)
5903 && XEXP (value, 0) == pic_offset_table_rtx
5904 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5905 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5906 || GET_CODE (XEXP (value, 1)) == CONST))
5908 if (!subtarget)
5909 subtarget = gen_reg_rtx (GET_MODE (value));
5910 emit_move_insn (subtarget, value);
5911 return subtarget;
5914 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5916 if (!target)
5917 target = gen_reg_rtx (GET_MODE (value));
5918 convert_move (target, force_operand (XEXP (value, 0), NULL),
5919 code == ZERO_EXTEND);
5920 return target;
5923 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5925 op2 = XEXP (value, 1);
5926 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5927 subtarget = 0;
5928 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5930 code = PLUS;
5931 op2 = negate_rtx (GET_MODE (value), op2);
5934 /* Check for an addition with OP2 a constant integer and our first
5935 operand a PLUS of a virtual register and something else. In that
5936 case, we want to emit the sum of the virtual register and the
5937 constant first and then add the other value. This allows virtual
5938 register instantiation to simply modify the constant rather than
5939 creating another one around this addition. */
5940 if (code == PLUS && GET_CODE (op2) == CONST_INT
5941 && GET_CODE (XEXP (value, 0)) == PLUS
5942 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5943 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5944 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5946 rtx temp = expand_simple_binop (GET_MODE (value), code,
5947 XEXP (XEXP (value, 0), 0), op2,
5948 subtarget, 0, OPTAB_LIB_WIDEN);
5949 return expand_simple_binop (GET_MODE (value), code, temp,
5950 force_operand (XEXP (XEXP (value,
5951 0), 1), 0),
5952 target, 0, OPTAB_LIB_WIDEN);
5955 op1 = force_operand (XEXP (value, 0), subtarget);
5956 op2 = force_operand (op2, NULL_RTX);
5957 switch (code)
5959 case MULT:
5960 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5961 case DIV:
5962 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5963 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5964 target, 1, OPTAB_LIB_WIDEN);
5965 else
5966 return expand_divmod (0,
5967 FLOAT_MODE_P (GET_MODE (value))
5968 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5969 GET_MODE (value), op1, op2, target, 0);
5970 break;
5971 case MOD:
5972 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5973 target, 0);
5974 break;
5975 case UDIV:
5976 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5977 target, 1);
5978 break;
5979 case UMOD:
5980 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5981 target, 1);
5982 break;
5983 case ASHIFTRT:
5984 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5985 target, 0, OPTAB_LIB_WIDEN);
5986 break;
5987 default:
5988 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5989 target, 1, OPTAB_LIB_WIDEN);
5992 if (GET_RTX_CLASS (code) == '1')
5994 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5995 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5998 #ifdef INSN_SCHEDULING
5999 /* On machines that have insn scheduling, we want all memory reference to be
6000 explicit, so we need to deal with such paradoxical SUBREGs. */
6001 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
6002 && (GET_MODE_SIZE (GET_MODE (value))
6003 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6004 value
6005 = simplify_gen_subreg (GET_MODE (value),
6006 force_reg (GET_MODE (SUBREG_REG (value)),
6007 force_operand (SUBREG_REG (value),
6008 NULL_RTX)),
6009 GET_MODE (SUBREG_REG (value)),
6010 SUBREG_BYTE (value));
6011 #endif
6013 return value;
6016 /* Subroutine of expand_expr: return nonzero iff there is no way that
6017 EXP can reference X, which is being modified. TOP_P is nonzero if this
6018 call is going to be used to determine whether we need a temporary
6019 for EXP, as opposed to a recursive call to this function.
6021 It is always safe for this routine to return zero since it merely
6022 searches for optimization opportunities. */
6025 safe_from_p (x, exp, top_p)
6026 rtx x;
6027 tree exp;
6028 int top_p;
6030 rtx exp_rtl = 0;
6031 int i, nops;
6032 static tree save_expr_list;
6034 if (x == 0
6035 /* If EXP has varying size, we MUST use a target since we currently
6036 have no way of allocating temporaries of variable size
6037 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6038 So we assume here that something at a higher level has prevented a
6039 clash. This is somewhat bogus, but the best we can do. Only
6040 do this when X is BLKmode and when we are at the top level. */
6041 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6042 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6043 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6044 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6045 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6046 != INTEGER_CST)
6047 && GET_MODE (x) == BLKmode)
6048 /* If X is in the outgoing argument area, it is always safe. */
6049 || (GET_CODE (x) == MEM
6050 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6051 || (GET_CODE (XEXP (x, 0)) == PLUS
6052 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6053 return 1;
6055 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6056 find the underlying pseudo. */
6057 if (GET_CODE (x) == SUBREG)
6059 x = SUBREG_REG (x);
6060 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6061 return 0;
6064 /* A SAVE_EXPR might appear many times in the expression passed to the
6065 top-level safe_from_p call, and if it has a complex subexpression,
6066 examining it multiple times could result in a combinatorial explosion.
6067 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
6068 with optimization took about 28 minutes to compile -- even though it was
6069 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
6070 and turn that off when we are done. We keep a list of the SAVE_EXPRs
6071 we have processed. Note that the only test of top_p was above. */
6073 if (top_p)
6075 int rtn;
6076 tree t;
6078 save_expr_list = 0;
6080 rtn = safe_from_p (x, exp, 0);
6082 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
6083 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
6085 return rtn;
6088 /* Now look at our tree code and possibly recurse. */
6089 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6091 case 'd':
6092 exp_rtl = DECL_RTL_IF_SET (exp);
6093 break;
6095 case 'c':
6096 return 1;
6098 case 'x':
6099 if (TREE_CODE (exp) == TREE_LIST)
6101 while (1)
6103 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6104 return 0;
6105 exp = TREE_CHAIN (exp);
6106 if (!exp)
6107 return 1;
6108 if (TREE_CODE (exp) != TREE_LIST)
6109 return safe_from_p (x, exp, 0);
6112 else if (TREE_CODE (exp) == ERROR_MARK)
6113 return 1; /* An already-visited SAVE_EXPR? */
6114 else
6115 return 0;
6117 case '2':
6118 case '<':
6119 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6120 return 0;
6121 /* FALLTHRU */
6123 case '1':
6124 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6126 case 'e':
6127 case 'r':
6128 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6129 the expression. If it is set, we conflict iff we are that rtx or
6130 both are in memory. Otherwise, we check all operands of the
6131 expression recursively. */
6133 switch (TREE_CODE (exp))
6135 case ADDR_EXPR:
6136 /* If the operand is static or we are static, we can't conflict.
6137 Likewise if we don't conflict with the operand at all. */
6138 if (staticp (TREE_OPERAND (exp, 0))
6139 || TREE_STATIC (exp)
6140 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6141 return 1;
6143 /* Otherwise, the only way this can conflict is if we are taking
6144 the address of a DECL a that address if part of X, which is
6145 very rare. */
6146 exp = TREE_OPERAND (exp, 0);
6147 if (DECL_P (exp))
6149 if (!DECL_RTL_SET_P (exp)
6150 || GET_CODE (DECL_RTL (exp)) != MEM)
6151 return 0;
6152 else
6153 exp_rtl = XEXP (DECL_RTL (exp), 0);
6155 break;
6157 case INDIRECT_REF:
6158 if (GET_CODE (x) == MEM
6159 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6160 get_alias_set (exp)))
6161 return 0;
6162 break;
6164 case CALL_EXPR:
6165 /* Assume that the call will clobber all hard registers and
6166 all of memory. */
6167 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6168 || GET_CODE (x) == MEM)
6169 return 0;
6170 break;
6172 case RTL_EXPR:
6173 /* If a sequence exists, we would have to scan every instruction
6174 in the sequence to see if it was safe. This is probably not
6175 worthwhile. */
6176 if (RTL_EXPR_SEQUENCE (exp))
6177 return 0;
6179 exp_rtl = RTL_EXPR_RTL (exp);
6180 break;
6182 case WITH_CLEANUP_EXPR:
6183 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6184 break;
6186 case CLEANUP_POINT_EXPR:
6187 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6189 case SAVE_EXPR:
6190 exp_rtl = SAVE_EXPR_RTL (exp);
6191 if (exp_rtl)
6192 break;
6194 /* If we've already scanned this, don't do it again. Otherwise,
6195 show we've scanned it and record for clearing the flag if we're
6196 going on. */
6197 if (TREE_PRIVATE (exp))
6198 return 1;
6200 TREE_PRIVATE (exp) = 1;
6201 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6203 TREE_PRIVATE (exp) = 0;
6204 return 0;
6207 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6208 return 1;
6210 case BIND_EXPR:
6211 /* The only operand we look at is operand 1. The rest aren't
6212 part of the expression. */
6213 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6215 case METHOD_CALL_EXPR:
6216 /* This takes an rtx argument, but shouldn't appear here. */
6217 abort ();
6219 default:
6220 break;
6223 /* If we have an rtx, we do not need to scan our operands. */
6224 if (exp_rtl)
6225 break;
6227 nops = first_rtl_op (TREE_CODE (exp));
6228 for (i = 0; i < nops; i++)
6229 if (TREE_OPERAND (exp, i) != 0
6230 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6231 return 0;
6233 /* If this is a language-specific tree code, it may require
6234 special handling. */
6235 if ((unsigned int) TREE_CODE (exp)
6236 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6237 && !(*lang_hooks.safe_from_p) (x, exp))
6238 return 0;
6241 /* If we have an rtl, find any enclosed object. Then see if we conflict
6242 with it. */
6243 if (exp_rtl)
6245 if (GET_CODE (exp_rtl) == SUBREG)
6247 exp_rtl = SUBREG_REG (exp_rtl);
6248 if (GET_CODE (exp_rtl) == REG
6249 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6250 return 0;
6253 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6254 are memory and they conflict. */
6255 return ! (rtx_equal_p (x, exp_rtl)
6256 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6257 && true_dependence (exp_rtl, VOIDmode, x,
6258 rtx_addr_varies_p)));
6261 /* If we reach here, it is safe. */
6262 return 1;
6265 /* Subroutine of expand_expr: return rtx if EXP is a
6266 variable or parameter; else return 0. */
6268 static rtx
6269 var_rtx (exp)
6270 tree exp;
6272 STRIP_NOPS (exp);
6273 switch (TREE_CODE (exp))
6275 case PARM_DECL:
6276 case VAR_DECL:
6277 return DECL_RTL (exp);
6278 default:
6279 return 0;
6283 #ifdef MAX_INTEGER_COMPUTATION_MODE
6285 void
6286 check_max_integer_computation_mode (exp)
6287 tree exp;
6289 enum tree_code code;
6290 enum machine_mode mode;
6292 /* Strip any NOPs that don't change the mode. */
6293 STRIP_NOPS (exp);
6294 code = TREE_CODE (exp);
6296 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6297 if (code == NOP_EXPR
6298 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6299 return;
6301 /* First check the type of the overall operation. We need only look at
6302 unary, binary and relational operations. */
6303 if (TREE_CODE_CLASS (code) == '1'
6304 || TREE_CODE_CLASS (code) == '2'
6305 || TREE_CODE_CLASS (code) == '<')
6307 mode = TYPE_MODE (TREE_TYPE (exp));
6308 if (GET_MODE_CLASS (mode) == MODE_INT
6309 && mode > MAX_INTEGER_COMPUTATION_MODE)
6310 internal_error ("unsupported wide integer operation");
6313 /* Check operand of a unary op. */
6314 if (TREE_CODE_CLASS (code) == '1')
6316 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6317 if (GET_MODE_CLASS (mode) == MODE_INT
6318 && mode > MAX_INTEGER_COMPUTATION_MODE)
6319 internal_error ("unsupported wide integer operation");
6322 /* Check operands of a binary/comparison op. */
6323 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6325 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6326 if (GET_MODE_CLASS (mode) == MODE_INT
6327 && mode > MAX_INTEGER_COMPUTATION_MODE)
6328 internal_error ("unsupported wide integer operation");
6330 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6331 if (GET_MODE_CLASS (mode) == MODE_INT
6332 && mode > MAX_INTEGER_COMPUTATION_MODE)
6333 internal_error ("unsupported wide integer operation");
6336 #endif
6338 /* Return the highest power of two that EXP is known to be a multiple of.
6339 This is used in updating alignment of MEMs in array references. */
6341 static unsigned HOST_WIDE_INT
6342 highest_pow2_factor (exp)
6343 tree exp;
6345 unsigned HOST_WIDE_INT c0, c1;
6347 switch (TREE_CODE (exp))
6349 case INTEGER_CST:
6350 /* We can find the lowest bit that's a one. If the low
6351 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6352 We need to handle this case since we can find it in a COND_EXPR,
6353 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6354 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6355 later ICE. */
6356 if (TREE_CONSTANT_OVERFLOW (exp))
6357 return BIGGEST_ALIGNMENT;
6358 else
6360 /* Note: tree_low_cst is intentionally not used here,
6361 we don't care about the upper bits. */
6362 c0 = TREE_INT_CST_LOW (exp);
6363 c0 &= -c0;
6364 return c0 ? c0 : BIGGEST_ALIGNMENT;
6366 break;
6368 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6369 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6370 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6371 return MIN (c0, c1);
6373 case MULT_EXPR:
6374 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6375 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6376 return c0 * c1;
6378 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6379 case CEIL_DIV_EXPR:
6380 if (integer_pow2p (TREE_OPERAND (exp, 1))
6381 && host_integerp (TREE_OPERAND (exp, 1), 1))
6383 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6384 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6385 return MAX (1, c0 / c1);
6387 break;
6389 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6390 case SAVE_EXPR: case WITH_RECORD_EXPR:
6391 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6393 case COMPOUND_EXPR:
6394 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6396 case COND_EXPR:
6397 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6398 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6399 return MIN (c0, c1);
6401 default:
6402 break;
6405 return 1;
6408 /* Similar, except that it is known that the expression must be a multiple
6409 of the alignment of TYPE. */
6411 static unsigned HOST_WIDE_INT
6412 highest_pow2_factor_for_type (type, exp)
6413 tree type;
6414 tree exp;
6416 unsigned HOST_WIDE_INT type_align, factor;
6418 factor = highest_pow2_factor (exp);
6419 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6420 return MAX (factor, type_align);
6423 /* Return an object on the placeholder list that matches EXP, a
6424 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6425 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6426 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6427 is a location which initially points to a starting location in the
6428 placeholder list (zero means start of the list) and where a pointer into
6429 the placeholder list at which the object is found is placed. */
6431 tree
6432 find_placeholder (exp, plist)
6433 tree exp;
6434 tree *plist;
6436 tree type = TREE_TYPE (exp);
6437 tree placeholder_expr;
6439 for (placeholder_expr
6440 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6441 placeholder_expr != 0;
6442 placeholder_expr = TREE_CHAIN (placeholder_expr))
6444 tree need_type = TYPE_MAIN_VARIANT (type);
6445 tree elt;
6447 /* Find the outermost reference that is of the type we want. If none,
6448 see if any object has a type that is a pointer to the type we
6449 want. */
6450 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6451 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6452 || TREE_CODE (elt) == COND_EXPR)
6453 ? TREE_OPERAND (elt, 1)
6454 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6455 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6456 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6457 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6458 ? TREE_OPERAND (elt, 0) : 0))
6459 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6461 if (plist)
6462 *plist = placeholder_expr;
6463 return elt;
6466 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6468 = ((TREE_CODE (elt) == COMPOUND_EXPR
6469 || TREE_CODE (elt) == COND_EXPR)
6470 ? TREE_OPERAND (elt, 1)
6471 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6472 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6473 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6474 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6475 ? TREE_OPERAND (elt, 0) : 0))
6476 if (POINTER_TYPE_P (TREE_TYPE (elt))
6477 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6478 == need_type))
6480 if (plist)
6481 *plist = placeholder_expr;
6482 return build1 (INDIRECT_REF, need_type, elt);
6486 return 0;
6489 /* expand_expr: generate code for computing expression EXP.
6490 An rtx for the computed value is returned. The value is never null.
6491 In the case of a void EXP, const0_rtx is returned.
6493 The value may be stored in TARGET if TARGET is nonzero.
6494 TARGET is just a suggestion; callers must assume that
6495 the rtx returned may not be the same as TARGET.
6497 If TARGET is CONST0_RTX, it means that the value will be ignored.
6499 If TMODE is not VOIDmode, it suggests generating the
6500 result in mode TMODE. But this is done only when convenient.
6501 Otherwise, TMODE is ignored and the value generated in its natural mode.
6502 TMODE is just a suggestion; callers must assume that
6503 the rtx returned may not have mode TMODE.
6505 Note that TARGET may have neither TMODE nor MODE. In that case, it
6506 probably will not be used.
6508 If MODIFIER is EXPAND_SUM then when EXP is an addition
6509 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6510 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6511 products as above, or REG or MEM, or constant.
6512 Ordinarily in such cases we would output mul or add instructions
6513 and then return a pseudo reg containing the sum.
6515 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6516 it also marks a label as absolutely required (it can't be dead).
6517 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6518 This is used for outputting expressions used in initializers.
6520 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6521 with a constant address even if that address is not normally legitimate.
6522 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6524 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6525 a call parameter. Such targets require special care as we haven't yet
6526 marked TARGET so that it's safe from being trashed by libcalls. We
6527 don't want to use TARGET for anything but the final result;
6528 Intermediate values must go elsewhere. Additionally, calls to
6529 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
6532 expand_expr (exp, target, tmode, modifier)
6533 tree exp;
6534 rtx target;
6535 enum machine_mode tmode;
6536 enum expand_modifier modifier;
6538 rtx op0, op1, temp;
6539 tree type = TREE_TYPE (exp);
6540 int unsignedp = TREE_UNSIGNED (type);
6541 enum machine_mode mode;
6542 enum tree_code code = TREE_CODE (exp);
6543 optab this_optab;
6544 rtx subtarget, original_target;
6545 int ignore;
6546 tree context;
6548 /* Handle ERROR_MARK before anybody tries to access its type. */
6549 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6551 op0 = CONST0_RTX (tmode);
6552 if (op0 != 0)
6553 return op0;
6554 return const0_rtx;
6557 mode = TYPE_MODE (type);
6558 /* Use subtarget as the target for operand 0 of a binary operation. */
6559 subtarget = get_subtarget (target);
6560 original_target = target;
6561 ignore = (target == const0_rtx
6562 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6563 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6564 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6565 && TREE_CODE (type) == VOID_TYPE));
6567 /* If we are going to ignore this result, we need only do something
6568 if there is a side-effect somewhere in the expression. If there
6569 is, short-circuit the most common cases here. Note that we must
6570 not call expand_expr with anything but const0_rtx in case this
6571 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6573 if (ignore)
6575 if (! TREE_SIDE_EFFECTS (exp))
6576 return const0_rtx;
6578 /* Ensure we reference a volatile object even if value is ignored, but
6579 don't do this if all we are doing is taking its address. */
6580 if (TREE_THIS_VOLATILE (exp)
6581 && TREE_CODE (exp) != FUNCTION_DECL
6582 && mode != VOIDmode && mode != BLKmode
6583 && modifier != EXPAND_CONST_ADDRESS)
6585 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6586 if (GET_CODE (temp) == MEM)
6587 temp = copy_to_reg (temp);
6588 return const0_rtx;
6591 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6592 || code == INDIRECT_REF || code == BUFFER_REF)
6593 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6594 modifier);
6596 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6597 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6599 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6600 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6601 return const0_rtx;
6603 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6604 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6605 /* If the second operand has no side effects, just evaluate
6606 the first. */
6607 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6608 modifier);
6609 else if (code == BIT_FIELD_REF)
6611 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6612 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6613 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6614 return const0_rtx;
6617 target = 0;
6620 #ifdef MAX_INTEGER_COMPUTATION_MODE
6621 /* Only check stuff here if the mode we want is different from the mode
6622 of the expression; if it's the same, check_max_integer_computation_mode
6623 will handle it. Do we really need to check this stuff at all? */
6625 if (target
6626 && GET_MODE (target) != mode
6627 && TREE_CODE (exp) != INTEGER_CST
6628 && TREE_CODE (exp) != PARM_DECL
6629 && TREE_CODE (exp) != ARRAY_REF
6630 && TREE_CODE (exp) != ARRAY_RANGE_REF
6631 && TREE_CODE (exp) != COMPONENT_REF
6632 && TREE_CODE (exp) != BIT_FIELD_REF
6633 && TREE_CODE (exp) != INDIRECT_REF
6634 && TREE_CODE (exp) != CALL_EXPR
6635 && TREE_CODE (exp) != VAR_DECL
6636 && TREE_CODE (exp) != RTL_EXPR)
6638 enum machine_mode mode = GET_MODE (target);
6640 if (GET_MODE_CLASS (mode) == MODE_INT
6641 && mode > MAX_INTEGER_COMPUTATION_MODE)
6642 internal_error ("unsupported wide integer operation");
6645 if (tmode != mode
6646 && TREE_CODE (exp) != INTEGER_CST
6647 && TREE_CODE (exp) != PARM_DECL
6648 && TREE_CODE (exp) != ARRAY_REF
6649 && TREE_CODE (exp) != ARRAY_RANGE_REF
6650 && TREE_CODE (exp) != COMPONENT_REF
6651 && TREE_CODE (exp) != BIT_FIELD_REF
6652 && TREE_CODE (exp) != INDIRECT_REF
6653 && TREE_CODE (exp) != VAR_DECL
6654 && TREE_CODE (exp) != CALL_EXPR
6655 && TREE_CODE (exp) != RTL_EXPR
6656 && GET_MODE_CLASS (tmode) == MODE_INT
6657 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6658 internal_error ("unsupported wide integer operation");
6660 check_max_integer_computation_mode (exp);
6661 #endif
6663 /* If will do cse, generate all results into pseudo registers
6664 since 1) that allows cse to find more things
6665 and 2) otherwise cse could produce an insn the machine
6666 cannot support. An exception is a CONSTRUCTOR into a multi-word
6667 MEM: that's much more likely to be most efficient into the MEM.
6668 Another is a CALL_EXPR which must return in memory. */
6670 if (! cse_not_expected && mode != BLKmode && target
6671 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6672 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6673 && ! (code == CALL_EXPR && aggregate_value_p (exp)))
6674 target = 0;
6676 switch (code)
6678 case LABEL_DECL:
6680 tree function = decl_function_context (exp);
6681 /* Handle using a label in a containing function. */
6682 if (function != current_function_decl
6683 && function != inline_function_decl && function != 0)
6685 struct function *p = find_function_data (function);
6686 p->expr->x_forced_labels
6687 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6688 p->expr->x_forced_labels);
6690 else
6692 if (modifier == EXPAND_INITIALIZER)
6693 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6694 label_rtx (exp),
6695 forced_labels);
6698 temp = gen_rtx_MEM (FUNCTION_MODE,
6699 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6700 if (function != current_function_decl
6701 && function != inline_function_decl && function != 0)
6702 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6703 return temp;
6706 case PARM_DECL:
6707 if (!DECL_RTL_SET_P (exp))
6709 error_with_decl (exp, "prior parameter's size depends on `%s'");
6710 return CONST0_RTX (mode);
6713 /* ... fall through ... */
6715 case VAR_DECL:
6716 /* If a static var's type was incomplete when the decl was written,
6717 but the type is complete now, lay out the decl now. */
6718 if (DECL_SIZE (exp) == 0
6719 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6720 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6721 layout_decl (exp, 0);
6723 /* ... fall through ... */
6725 case FUNCTION_DECL:
6726 case RESULT_DECL:
6727 if (DECL_RTL (exp) == 0)
6728 abort ();
6730 /* Ensure variable marked as used even if it doesn't go through
6731 a parser. If it hasn't be used yet, write out an external
6732 definition. */
6733 if (! TREE_USED (exp))
6735 assemble_external (exp);
6736 TREE_USED (exp) = 1;
6739 /* Show we haven't gotten RTL for this yet. */
6740 temp = 0;
6742 /* Handle variables inherited from containing functions. */
6743 context = decl_function_context (exp);
6745 /* We treat inline_function_decl as an alias for the current function
6746 because that is the inline function whose vars, types, etc.
6747 are being merged into the current function.
6748 See expand_inline_function. */
6750 if (context != 0 && context != current_function_decl
6751 && context != inline_function_decl
6752 /* If var is static, we don't need a static chain to access it. */
6753 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6754 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6756 rtx addr;
6758 /* Mark as non-local and addressable. */
6759 DECL_NONLOCAL (exp) = 1;
6760 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6761 abort ();
6762 (*lang_hooks.mark_addressable) (exp);
6763 if (GET_CODE (DECL_RTL (exp)) != MEM)
6764 abort ();
6765 addr = XEXP (DECL_RTL (exp), 0);
6766 if (GET_CODE (addr) == MEM)
6767 addr
6768 = replace_equiv_address (addr,
6769 fix_lexical_addr (XEXP (addr, 0), exp));
6770 else
6771 addr = fix_lexical_addr (addr, exp);
6773 temp = replace_equiv_address (DECL_RTL (exp), addr);
6776 /* This is the case of an array whose size is to be determined
6777 from its initializer, while the initializer is still being parsed.
6778 See expand_decl. */
6780 else if (GET_CODE (DECL_RTL (exp)) == MEM
6781 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6782 temp = validize_mem (DECL_RTL (exp));
6784 /* If DECL_RTL is memory, we are in the normal case and either
6785 the address is not valid or it is not a register and -fforce-addr
6786 is specified, get the address into a register. */
6788 else if (GET_CODE (DECL_RTL (exp)) == MEM
6789 && modifier != EXPAND_CONST_ADDRESS
6790 && modifier != EXPAND_SUM
6791 && modifier != EXPAND_INITIALIZER
6792 && (! memory_address_p (DECL_MODE (exp),
6793 XEXP (DECL_RTL (exp), 0))
6794 || (flag_force_addr
6795 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6796 temp = replace_equiv_address (DECL_RTL (exp),
6797 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6799 /* If we got something, return it. But first, set the alignment
6800 if the address is a register. */
6801 if (temp != 0)
6803 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6804 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6806 return temp;
6809 /* If the mode of DECL_RTL does not match that of the decl, it
6810 must be a promoted value. We return a SUBREG of the wanted mode,
6811 but mark it so that we know that it was already extended. */
6813 if (GET_CODE (DECL_RTL (exp)) == REG
6814 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6816 /* Get the signedness used for this variable. Ensure we get the
6817 same mode we got when the variable was declared. */
6818 if (GET_MODE (DECL_RTL (exp))
6819 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6820 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6821 abort ();
6823 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6824 SUBREG_PROMOTED_VAR_P (temp) = 1;
6825 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6826 return temp;
6829 return DECL_RTL (exp);
6831 case INTEGER_CST:
6832 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6833 TREE_INT_CST_HIGH (exp), mode);
6835 /* ??? If overflow is set, fold will have done an incomplete job,
6836 which can result in (plus xx (const_int 0)), which can get
6837 simplified by validate_replace_rtx during virtual register
6838 instantiation, which can result in unrecognizable insns.
6839 Avoid this by forcing all overflows into registers. */
6840 if (TREE_CONSTANT_OVERFLOW (exp)
6841 && modifier != EXPAND_INITIALIZER)
6842 temp = force_reg (mode, temp);
6844 return temp;
6846 case VECTOR_CST:
6847 return const_vector_from_tree (exp);
6849 case CONST_DECL:
6850 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6852 case REAL_CST:
6853 /* If optimized, generate immediate CONST_DOUBLE
6854 which will be turned into memory by reload if necessary.
6856 We used to force a register so that loop.c could see it. But
6857 this does not allow gen_* patterns to perform optimizations with
6858 the constants. It also produces two insns in cases like "x = 1.0;".
6859 On most machines, floating-point constants are not permitted in
6860 many insns, so we'd end up copying it to a register in any case.
6862 Now, we do the copying in expand_binop, if appropriate. */
6863 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6864 TYPE_MODE (TREE_TYPE (exp)));
6866 case COMPLEX_CST:
6867 case STRING_CST:
6868 if (! TREE_CST_RTL (exp))
6869 output_constant_def (exp, 1);
6871 /* TREE_CST_RTL probably contains a constant address.
6872 On RISC machines where a constant address isn't valid,
6873 make some insns to get that address into a register. */
6874 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6875 && modifier != EXPAND_CONST_ADDRESS
6876 && modifier != EXPAND_INITIALIZER
6877 && modifier != EXPAND_SUM
6878 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6879 || (flag_force_addr
6880 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6881 return replace_equiv_address (TREE_CST_RTL (exp),
6882 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6883 return TREE_CST_RTL (exp);
6885 case EXPR_WITH_FILE_LOCATION:
6887 rtx to_return;
6888 const char *saved_input_filename = input_filename;
6889 int saved_lineno = lineno;
6890 input_filename = EXPR_WFL_FILENAME (exp);
6891 lineno = EXPR_WFL_LINENO (exp);
6892 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6893 emit_line_note (input_filename, lineno);
6894 /* Possibly avoid switching back and forth here. */
6895 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6896 input_filename = saved_input_filename;
6897 lineno = saved_lineno;
6898 return to_return;
6901 case SAVE_EXPR:
6902 context = decl_function_context (exp);
6904 /* If this SAVE_EXPR was at global context, assume we are an
6905 initialization function and move it into our context. */
6906 if (context == 0)
6907 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6909 /* We treat inline_function_decl as an alias for the current function
6910 because that is the inline function whose vars, types, etc.
6911 are being merged into the current function.
6912 See expand_inline_function. */
6913 if (context == current_function_decl || context == inline_function_decl)
6914 context = 0;
6916 /* If this is non-local, handle it. */
6917 if (context)
6919 /* The following call just exists to abort if the context is
6920 not of a containing function. */
6921 find_function_data (context);
6923 temp = SAVE_EXPR_RTL (exp);
6924 if (temp && GET_CODE (temp) == REG)
6926 put_var_into_stack (exp, /*rescan=*/true);
6927 temp = SAVE_EXPR_RTL (exp);
6929 if (temp == 0 || GET_CODE (temp) != MEM)
6930 abort ();
6931 return
6932 replace_equiv_address (temp,
6933 fix_lexical_addr (XEXP (temp, 0), exp));
6935 if (SAVE_EXPR_RTL (exp) == 0)
6937 if (mode == VOIDmode)
6938 temp = const0_rtx;
6939 else
6940 temp = assign_temp (build_qualified_type (type,
6941 (TYPE_QUALS (type)
6942 | TYPE_QUAL_CONST)),
6943 3, 0, 0);
6945 SAVE_EXPR_RTL (exp) = temp;
6946 if (!optimize && GET_CODE (temp) == REG)
6947 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6948 save_expr_regs);
6950 /* If the mode of TEMP does not match that of the expression, it
6951 must be a promoted value. We pass store_expr a SUBREG of the
6952 wanted mode but mark it so that we know that it was already
6953 extended. */
6955 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6957 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6958 promote_mode (type, mode, &unsignedp, 0);
6959 SUBREG_PROMOTED_VAR_P (temp) = 1;
6960 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6963 if (temp == const0_rtx)
6964 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6965 else
6966 store_expr (TREE_OPERAND (exp, 0), temp,
6967 modifier == EXPAND_STACK_PARM ? 2 : 0);
6969 TREE_USED (exp) = 1;
6972 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6973 must be a promoted value. We return a SUBREG of the wanted mode,
6974 but mark it so that we know that it was already extended. */
6976 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6977 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6979 /* Compute the signedness and make the proper SUBREG. */
6980 promote_mode (type, mode, &unsignedp, 0);
6981 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6982 SUBREG_PROMOTED_VAR_P (temp) = 1;
6983 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6984 return temp;
6987 return SAVE_EXPR_RTL (exp);
6989 case UNSAVE_EXPR:
6991 rtx temp;
6992 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6993 TREE_OPERAND (exp, 0)
6994 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6995 return temp;
6998 case PLACEHOLDER_EXPR:
7000 tree old_list = placeholder_list;
7001 tree placeholder_expr = 0;
7003 exp = find_placeholder (exp, &placeholder_expr);
7004 if (exp == 0)
7005 abort ();
7007 placeholder_list = TREE_CHAIN (placeholder_expr);
7008 temp = expand_expr (exp, original_target, tmode, modifier);
7009 placeholder_list = old_list;
7010 return temp;
7013 case WITH_RECORD_EXPR:
7014 /* Put the object on the placeholder list, expand our first operand,
7015 and pop the list. */
7016 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
7017 placeholder_list);
7018 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
7019 modifier);
7020 placeholder_list = TREE_CHAIN (placeholder_list);
7021 return target;
7023 case GOTO_EXPR:
7024 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7025 expand_goto (TREE_OPERAND (exp, 0));
7026 else
7027 expand_computed_goto (TREE_OPERAND (exp, 0));
7028 return const0_rtx;
7030 case EXIT_EXPR:
7031 expand_exit_loop_if_false (NULL,
7032 invert_truthvalue (TREE_OPERAND (exp, 0)));
7033 return const0_rtx;
7035 case LABELED_BLOCK_EXPR:
7036 if (LABELED_BLOCK_BODY (exp))
7037 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
7038 /* Should perhaps use expand_label, but this is simpler and safer. */
7039 do_pending_stack_adjust ();
7040 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
7041 return const0_rtx;
7043 case EXIT_BLOCK_EXPR:
7044 if (EXIT_BLOCK_RETURN (exp))
7045 sorry ("returned value in block_exit_expr");
7046 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
7047 return const0_rtx;
7049 case LOOP_EXPR:
7050 push_temp_slots ();
7051 expand_start_loop (1);
7052 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
7053 expand_end_loop ();
7054 pop_temp_slots ();
7056 return const0_rtx;
7058 case BIND_EXPR:
7060 tree vars = TREE_OPERAND (exp, 0);
7062 /* Need to open a binding contour here because
7063 if there are any cleanups they must be contained here. */
7064 expand_start_bindings (2);
7066 /* Mark the corresponding BLOCK for output in its proper place. */
7067 if (TREE_OPERAND (exp, 2) != 0
7068 && ! TREE_USED (TREE_OPERAND (exp, 2)))
7069 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
7071 /* If VARS have not yet been expanded, expand them now. */
7072 while (vars)
7074 if (!DECL_RTL_SET_P (vars))
7075 expand_decl (vars);
7076 expand_decl_init (vars);
7077 vars = TREE_CHAIN (vars);
7080 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
7082 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7084 return temp;
7087 case RTL_EXPR:
7088 if (RTL_EXPR_SEQUENCE (exp))
7090 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
7091 abort ();
7092 emit_insn (RTL_EXPR_SEQUENCE (exp));
7093 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
7095 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
7096 free_temps_for_rtl_expr (exp);
7097 return RTL_EXPR_RTL (exp);
7099 case CONSTRUCTOR:
7100 /* If we don't need the result, just ensure we evaluate any
7101 subexpressions. */
7102 if (ignore)
7104 tree elt;
7106 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
7107 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
7109 return const0_rtx;
7112 /* All elts simple constants => refer to a constant in memory. But
7113 if this is a non-BLKmode mode, let it store a field at a time
7114 since that should make a CONST_INT or CONST_DOUBLE when we
7115 fold. Likewise, if we have a target we can use, it is best to
7116 store directly into the target unless the type is large enough
7117 that memcpy will be used. If we are making an initializer and
7118 all operands are constant, put it in memory as well.
7120 FIXME: Avoid trying to fill vector constructors piece-meal.
7121 Output them with output_constant_def below unless we're sure
7122 they're zeros. This should go away when vector initializers
7123 are treated like VECTOR_CST instead of arrays.
7125 else if ((TREE_STATIC (exp)
7126 && ((mode == BLKmode
7127 && ! (target != 0 && safe_from_p (target, exp, 1)))
7128 || TREE_ADDRESSABLE (exp)
7129 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7130 && (! MOVE_BY_PIECES_P
7131 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7132 TYPE_ALIGN (type)))
7133 && ((TREE_CODE (type) == VECTOR_TYPE
7134 && !is_zeros_p (exp))
7135 || ! mostly_zeros_p (exp)))))
7136 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
7138 rtx constructor = output_constant_def (exp, 1);
7140 if (modifier != EXPAND_CONST_ADDRESS
7141 && modifier != EXPAND_INITIALIZER
7142 && modifier != EXPAND_SUM)
7143 constructor = validize_mem (constructor);
7145 return constructor;
7147 else
7149 /* Handle calls that pass values in multiple non-contiguous
7150 locations. The Irix 6 ABI has examples of this. */
7151 if (target == 0 || ! safe_from_p (target, exp, 1)
7152 || GET_CODE (target) == PARALLEL
7153 || modifier == EXPAND_STACK_PARM)
7154 target
7155 = assign_temp (build_qualified_type (type,
7156 (TYPE_QUALS (type)
7157 | (TREE_READONLY (exp)
7158 * TYPE_QUAL_CONST))),
7159 0, TREE_ADDRESSABLE (exp), 1);
7161 store_constructor (exp, target, 0, int_expr_size (exp));
7162 return target;
7165 case INDIRECT_REF:
7167 tree exp1 = TREE_OPERAND (exp, 0);
7168 tree index;
7169 tree string = string_constant (exp1, &index);
7171 /* Try to optimize reads from const strings. */
7172 if (string
7173 && TREE_CODE (string) == STRING_CST
7174 && TREE_CODE (index) == INTEGER_CST
7175 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7176 && GET_MODE_CLASS (mode) == MODE_INT
7177 && GET_MODE_SIZE (mode) == 1
7178 && modifier != EXPAND_WRITE)
7179 return gen_int_mode (TREE_STRING_POINTER (string)
7180 [TREE_INT_CST_LOW (index)], mode);
7182 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7183 op0 = memory_address (mode, op0);
7184 temp = gen_rtx_MEM (mode, op0);
7185 set_mem_attributes (temp, exp, 0);
7187 /* If we are writing to this object and its type is a record with
7188 readonly fields, we must mark it as readonly so it will
7189 conflict with readonly references to those fields. */
7190 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7191 RTX_UNCHANGING_P (temp) = 1;
7193 return temp;
7196 case ARRAY_REF:
7197 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7198 abort ();
7201 tree array = TREE_OPERAND (exp, 0);
7202 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7203 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7204 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7205 HOST_WIDE_INT i;
7207 /* Optimize the special-case of a zero lower bound.
7209 We convert the low_bound to sizetype to avoid some problems
7210 with constant folding. (E.g. suppose the lower bound is 1,
7211 and its mode is QI. Without the conversion, (ARRAY
7212 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7213 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7215 if (! integer_zerop (low_bound))
7216 index = size_diffop (index, convert (sizetype, low_bound));
7218 /* Fold an expression like: "foo"[2].
7219 This is not done in fold so it won't happen inside &.
7220 Don't fold if this is for wide characters since it's too
7221 difficult to do correctly and this is a very rare case. */
7223 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7224 && TREE_CODE (array) == STRING_CST
7225 && TREE_CODE (index) == INTEGER_CST
7226 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
7227 && GET_MODE_CLASS (mode) == MODE_INT
7228 && GET_MODE_SIZE (mode) == 1)
7229 return gen_int_mode (TREE_STRING_POINTER (array)
7230 [TREE_INT_CST_LOW (index)], mode);
7232 /* If this is a constant index into a constant array,
7233 just get the value from the array. Handle both the cases when
7234 we have an explicit constructor and when our operand is a variable
7235 that was declared const. */
7237 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7238 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
7239 && TREE_CODE (index) == INTEGER_CST
7240 && 0 > compare_tree_int (index,
7241 list_length (CONSTRUCTOR_ELTS
7242 (TREE_OPERAND (exp, 0)))))
7244 tree elem;
7246 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7247 i = TREE_INT_CST_LOW (index);
7248 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7251 if (elem)
7252 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7253 modifier);
7256 else if (optimize >= 1
7257 && modifier != EXPAND_CONST_ADDRESS
7258 && modifier != EXPAND_INITIALIZER
7259 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7260 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7261 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7263 if (TREE_CODE (index) == INTEGER_CST)
7265 tree init = DECL_INITIAL (array);
7267 if (TREE_CODE (init) == CONSTRUCTOR)
7269 tree elem;
7271 for (elem = CONSTRUCTOR_ELTS (init);
7272 (elem
7273 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7274 elem = TREE_CHAIN (elem))
7277 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7278 return expand_expr (fold (TREE_VALUE (elem)), target,
7279 tmode, modifier);
7281 else if (TREE_CODE (init) == STRING_CST
7282 && 0 > compare_tree_int (index,
7283 TREE_STRING_LENGTH (init)))
7285 tree type = TREE_TYPE (TREE_TYPE (init));
7286 enum machine_mode mode = TYPE_MODE (type);
7288 if (GET_MODE_CLASS (mode) == MODE_INT
7289 && GET_MODE_SIZE (mode) == 1)
7290 return gen_int_mode (TREE_STRING_POINTER (init)
7291 [TREE_INT_CST_LOW (index)], mode);
7296 /* Fall through. */
7298 case COMPONENT_REF:
7299 case BIT_FIELD_REF:
7300 case ARRAY_RANGE_REF:
7301 /* If the operand is a CONSTRUCTOR, we can just extract the
7302 appropriate field if it is present. Don't do this if we have
7303 already written the data since we want to refer to that copy
7304 and varasm.c assumes that's what we'll do. */
7305 if (code == COMPONENT_REF
7306 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
7307 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
7309 tree elt;
7311 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7312 elt = TREE_CHAIN (elt))
7313 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7314 /* We can normally use the value of the field in the
7315 CONSTRUCTOR. However, if this is a bitfield in
7316 an integral mode that we can fit in a HOST_WIDE_INT,
7317 we must mask only the number of bits in the bitfield,
7318 since this is done implicitly by the constructor. If
7319 the bitfield does not meet either of those conditions,
7320 we can't do this optimization. */
7321 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7322 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7323 == MODE_INT)
7324 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7325 <= HOST_BITS_PER_WIDE_INT))))
7327 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7328 && modifier == EXPAND_STACK_PARM)
7329 target = 0;
7330 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7331 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7333 HOST_WIDE_INT bitsize
7334 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7335 enum machine_mode imode
7336 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7338 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7340 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7341 op0 = expand_and (imode, op0, op1, target);
7343 else
7345 tree count
7346 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7349 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7350 target, 0);
7351 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7352 target, 0);
7356 return op0;
7361 enum machine_mode mode1;
7362 HOST_WIDE_INT bitsize, bitpos;
7363 tree offset;
7364 int volatilep = 0;
7365 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7366 &mode1, &unsignedp, &volatilep);
7367 rtx orig_op0;
7369 /* If we got back the original object, something is wrong. Perhaps
7370 we are evaluating an expression too early. In any event, don't
7371 infinitely recurse. */
7372 if (tem == exp)
7373 abort ();
7375 /* If TEM's type is a union of variable size, pass TARGET to the inner
7376 computation, since it will need a temporary and TARGET is known
7377 to have to do. This occurs in unchecked conversion in Ada. */
7379 orig_op0 = op0
7380 = expand_expr (tem,
7381 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7382 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7383 != INTEGER_CST)
7384 && modifier != EXPAND_STACK_PARM
7385 ? target : NULL_RTX),
7386 VOIDmode,
7387 (modifier == EXPAND_INITIALIZER
7388 || modifier == EXPAND_CONST_ADDRESS
7389 || modifier == EXPAND_STACK_PARM)
7390 ? modifier : EXPAND_NORMAL);
7392 /* If this is a constant, put it into a register if it is a
7393 legitimate constant and OFFSET is 0 and memory if it isn't. */
7394 if (CONSTANT_P (op0))
7396 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7397 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7398 && offset == 0)
7399 op0 = force_reg (mode, op0);
7400 else
7401 op0 = validize_mem (force_const_mem (mode, op0));
7404 if (offset != 0)
7406 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7407 EXPAND_SUM);
7409 /* If this object is in a register, put it into memory.
7410 This case can't occur in C, but can in Ada if we have
7411 unchecked conversion of an expression from a scalar type to
7412 an array or record type. */
7413 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7414 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7416 /* If the operand is a SAVE_EXPR, we can deal with this by
7417 forcing the SAVE_EXPR into memory. */
7418 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7420 put_var_into_stack (TREE_OPERAND (exp, 0),
7421 /*rescan=*/true);
7422 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7424 else
7426 tree nt
7427 = build_qualified_type (TREE_TYPE (tem),
7428 (TYPE_QUALS (TREE_TYPE (tem))
7429 | TYPE_QUAL_CONST));
7430 rtx memloc = assign_temp (nt, 1, 1, 1);
7432 emit_move_insn (memloc, op0);
7433 op0 = memloc;
7437 if (GET_CODE (op0) != MEM)
7438 abort ();
7440 #ifdef POINTERS_EXTEND_UNSIGNED
7441 if (GET_MODE (offset_rtx) != Pmode)
7442 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7443 #else
7444 if (GET_MODE (offset_rtx) != ptr_mode)
7445 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7446 #endif
7448 /* A constant address in OP0 can have VOIDmode, we must not try
7449 to call force_reg for that case. Avoid that case. */
7450 if (GET_CODE (op0) == MEM
7451 && GET_MODE (op0) == BLKmode
7452 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7453 && bitsize != 0
7454 && (bitpos % bitsize) == 0
7455 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7456 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7458 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7459 bitpos = 0;
7462 op0 = offset_address (op0, offset_rtx,
7463 highest_pow2_factor (offset));
7466 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7467 record its alignment as BIGGEST_ALIGNMENT. */
7468 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7469 && is_aligning_offset (offset, tem))
7470 set_mem_align (op0, BIGGEST_ALIGNMENT);
7472 /* Don't forget about volatility even if this is a bitfield. */
7473 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7475 if (op0 == orig_op0)
7476 op0 = copy_rtx (op0);
7478 MEM_VOLATILE_P (op0) = 1;
7481 /* The following code doesn't handle CONCAT.
7482 Assume only bitpos == 0 can be used for CONCAT, due to
7483 one element arrays having the same mode as its element. */
7484 if (GET_CODE (op0) == CONCAT)
7486 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7487 abort ();
7488 return op0;
7491 /* In cases where an aligned union has an unaligned object
7492 as a field, we might be extracting a BLKmode value from
7493 an integer-mode (e.g., SImode) object. Handle this case
7494 by doing the extract into an object as wide as the field
7495 (which we know to be the width of a basic mode), then
7496 storing into memory, and changing the mode to BLKmode. */
7497 if (mode1 == VOIDmode
7498 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7499 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7500 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7501 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7502 && modifier != EXPAND_CONST_ADDRESS
7503 && modifier != EXPAND_INITIALIZER)
7504 /* If the field isn't aligned enough to fetch as a memref,
7505 fetch it as a bit field. */
7506 || (mode1 != BLKmode
7507 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
7508 && ((TYPE_ALIGN (TREE_TYPE (tem))
7509 < GET_MODE_ALIGNMENT (mode))
7510 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7511 /* If the type and the field are a constant size and the
7512 size of the type isn't the same size as the bitfield,
7513 we must use bitfield operations. */
7514 || (bitsize >= 0
7515 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7516 == INTEGER_CST)
7517 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7518 bitsize)))
7520 enum machine_mode ext_mode = mode;
7522 if (ext_mode == BLKmode
7523 && ! (target != 0 && GET_CODE (op0) == MEM
7524 && GET_CODE (target) == MEM
7525 && bitpos % BITS_PER_UNIT == 0))
7526 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7528 if (ext_mode == BLKmode)
7530 /* In this case, BITPOS must start at a byte boundary and
7531 TARGET, if specified, must be a MEM. */
7532 if (GET_CODE (op0) != MEM
7533 || (target != 0 && GET_CODE (target) != MEM)
7534 || bitpos % BITS_PER_UNIT != 0)
7535 abort ();
7537 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7538 if (target == 0)
7539 target = assign_temp (type, 0, 1, 1);
7541 emit_block_move (target, op0,
7542 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7543 / BITS_PER_UNIT),
7544 (modifier == EXPAND_STACK_PARM
7545 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7547 return target;
7550 op0 = validize_mem (op0);
7552 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7553 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7555 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7556 (modifier == EXPAND_STACK_PARM
7557 ? NULL_RTX : target),
7558 ext_mode, ext_mode,
7559 int_size_in_bytes (TREE_TYPE (tem)));
7561 /* If the result is a record type and BITSIZE is narrower than
7562 the mode of OP0, an integral mode, and this is a big endian
7563 machine, we must put the field into the high-order bits. */
7564 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7565 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7566 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7567 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7568 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7569 - bitsize),
7570 op0, 1);
7572 if (mode == BLKmode)
7574 rtx new = assign_temp (build_qualified_type
7575 ((*lang_hooks.types.type_for_mode)
7576 (ext_mode, 0),
7577 TYPE_QUAL_CONST), 0, 1, 1);
7579 emit_move_insn (new, op0);
7580 op0 = copy_rtx (new);
7581 PUT_MODE (op0, BLKmode);
7582 set_mem_attributes (op0, exp, 1);
7585 return op0;
7588 /* If the result is BLKmode, use that to access the object
7589 now as well. */
7590 if (mode == BLKmode)
7591 mode1 = BLKmode;
7593 /* Get a reference to just this component. */
7594 if (modifier == EXPAND_CONST_ADDRESS
7595 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7596 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7597 else
7598 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7600 if (op0 == orig_op0)
7601 op0 = copy_rtx (op0);
7603 set_mem_attributes (op0, exp, 0);
7604 if (GET_CODE (XEXP (op0, 0)) == REG)
7605 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7607 MEM_VOLATILE_P (op0) |= volatilep;
7608 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7609 || modifier == EXPAND_CONST_ADDRESS
7610 || modifier == EXPAND_INITIALIZER)
7611 return op0;
7612 else if (target == 0)
7613 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7615 convert_move (target, op0, unsignedp);
7616 return target;
7619 case VTABLE_REF:
7621 rtx insn, before = get_last_insn (), vtbl_ref;
7623 /* Evaluate the interior expression. */
7624 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7625 tmode, modifier);
7627 /* Get or create an instruction off which to hang a note. */
7628 if (REG_P (subtarget))
7630 target = subtarget;
7631 insn = get_last_insn ();
7632 if (insn == before)
7633 abort ();
7634 if (! INSN_P (insn))
7635 insn = prev_nonnote_insn (insn);
7637 else
7639 target = gen_reg_rtx (GET_MODE (subtarget));
7640 insn = emit_move_insn (target, subtarget);
7643 /* Collect the data for the note. */
7644 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7645 vtbl_ref = plus_constant (vtbl_ref,
7646 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7647 /* Discard the initial CONST that was added. */
7648 vtbl_ref = XEXP (vtbl_ref, 0);
7650 REG_NOTES (insn)
7651 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7653 return target;
7656 /* Intended for a reference to a buffer of a file-object in Pascal.
7657 But it's not certain that a special tree code will really be
7658 necessary for these. INDIRECT_REF might work for them. */
7659 case BUFFER_REF:
7660 abort ();
7662 case IN_EXPR:
7664 /* Pascal set IN expression.
7666 Algorithm:
7667 rlo = set_low - (set_low%bits_per_word);
7668 the_word = set [ (index - rlo)/bits_per_word ];
7669 bit_index = index % bits_per_word;
7670 bitmask = 1 << bit_index;
7671 return !!(the_word & bitmask); */
7673 tree set = TREE_OPERAND (exp, 0);
7674 tree index = TREE_OPERAND (exp, 1);
7675 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7676 tree set_type = TREE_TYPE (set);
7677 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7678 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7679 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7680 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7681 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7682 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7683 rtx setaddr = XEXP (setval, 0);
7684 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7685 rtx rlow;
7686 rtx diff, quo, rem, addr, bit, result;
7688 /* If domain is empty, answer is no. Likewise if index is constant
7689 and out of bounds. */
7690 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7691 && TREE_CODE (set_low_bound) == INTEGER_CST
7692 && tree_int_cst_lt (set_high_bound, set_low_bound))
7693 || (TREE_CODE (index) == INTEGER_CST
7694 && TREE_CODE (set_low_bound) == INTEGER_CST
7695 && tree_int_cst_lt (index, set_low_bound))
7696 || (TREE_CODE (set_high_bound) == INTEGER_CST
7697 && TREE_CODE (index) == INTEGER_CST
7698 && tree_int_cst_lt (set_high_bound, index))))
7699 return const0_rtx;
7701 if (target == 0)
7702 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7704 /* If we get here, we have to generate the code for both cases
7705 (in range and out of range). */
7707 op0 = gen_label_rtx ();
7708 op1 = gen_label_rtx ();
7710 if (! (GET_CODE (index_val) == CONST_INT
7711 && GET_CODE (lo_r) == CONST_INT))
7712 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7713 GET_MODE (index_val), iunsignedp, op1);
7715 if (! (GET_CODE (index_val) == CONST_INT
7716 && GET_CODE (hi_r) == CONST_INT))
7717 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7718 GET_MODE (index_val), iunsignedp, op1);
7720 /* Calculate the element number of bit zero in the first word
7721 of the set. */
7722 if (GET_CODE (lo_r) == CONST_INT)
7723 rlow = GEN_INT (INTVAL (lo_r)
7724 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7725 else
7726 rlow = expand_binop (index_mode, and_optab, lo_r,
7727 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7728 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7730 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7731 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7733 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7734 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7735 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7736 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7738 addr = memory_address (byte_mode,
7739 expand_binop (index_mode, add_optab, diff,
7740 setaddr, NULL_RTX, iunsignedp,
7741 OPTAB_LIB_WIDEN));
7743 /* Extract the bit we want to examine. */
7744 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7745 gen_rtx_MEM (byte_mode, addr),
7746 make_tree (TREE_TYPE (index), rem),
7747 NULL_RTX, 1);
7748 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7749 GET_MODE (target) == byte_mode ? target : 0,
7750 1, OPTAB_LIB_WIDEN);
7752 if (result != target)
7753 convert_move (target, result, 1);
7755 /* Output the code to handle the out-of-range case. */
7756 emit_jump (op0);
7757 emit_label (op1);
7758 emit_move_insn (target, const0_rtx);
7759 emit_label (op0);
7760 return target;
7763 case WITH_CLEANUP_EXPR:
7764 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7766 WITH_CLEANUP_EXPR_RTL (exp)
7767 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7768 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7769 CLEANUP_EH_ONLY (exp));
7771 /* That's it for this cleanup. */
7772 TREE_OPERAND (exp, 1) = 0;
7774 return WITH_CLEANUP_EXPR_RTL (exp);
7776 case CLEANUP_POINT_EXPR:
7778 /* Start a new binding layer that will keep track of all cleanup
7779 actions to be performed. */
7780 expand_start_bindings (2);
7782 target_temp_slot_level = temp_slot_level;
7784 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7785 /* If we're going to use this value, load it up now. */
7786 if (! ignore)
7787 op0 = force_not_mem (op0);
7788 preserve_temp_slots (op0);
7789 expand_end_bindings (NULL_TREE, 0, 0);
7791 return op0;
7793 case CALL_EXPR:
7794 /* Check for a built-in function. */
7795 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7796 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7797 == FUNCTION_DECL)
7798 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7800 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7801 == BUILT_IN_FRONTEND)
7802 return (*lang_hooks.expand_expr) (exp, original_target,
7803 tmode, modifier);
7804 else
7805 return expand_builtin (exp, target, subtarget, tmode, ignore);
7808 return expand_call (exp, target, ignore);
7810 case NON_LVALUE_EXPR:
7811 case NOP_EXPR:
7812 case CONVERT_EXPR:
7813 case REFERENCE_EXPR:
7814 if (TREE_OPERAND (exp, 0) == error_mark_node)
7815 return const0_rtx;
7817 if (TREE_CODE (type) == UNION_TYPE)
7819 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7821 /* If both input and output are BLKmode, this conversion isn't doing
7822 anything except possibly changing memory attribute. */
7823 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7825 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7826 modifier);
7828 result = copy_rtx (result);
7829 set_mem_attributes (result, exp, 0);
7830 return result;
7833 if (target == 0)
7834 target = assign_temp (type, 0, 1, 1);
7836 if (GET_CODE (target) == MEM)
7837 /* Store data into beginning of memory target. */
7838 store_expr (TREE_OPERAND (exp, 0),
7839 adjust_address (target, TYPE_MODE (valtype), 0),
7840 modifier == EXPAND_STACK_PARM ? 2 : 0);
7842 else if (GET_CODE (target) == REG)
7843 /* Store this field into a union of the proper type. */
7844 store_field (target,
7845 MIN ((int_size_in_bytes (TREE_TYPE
7846 (TREE_OPERAND (exp, 0)))
7847 * BITS_PER_UNIT),
7848 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7849 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7850 VOIDmode, 0, type, 0);
7851 else
7852 abort ();
7854 /* Return the entire union. */
7855 return target;
7858 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7860 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7861 modifier);
7863 /* If the signedness of the conversion differs and OP0 is
7864 a promoted SUBREG, clear that indication since we now
7865 have to do the proper extension. */
7866 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7867 && GET_CODE (op0) == SUBREG)
7868 SUBREG_PROMOTED_VAR_P (op0) = 0;
7870 return op0;
7873 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7874 if (GET_MODE (op0) == mode)
7875 return op0;
7877 /* If OP0 is a constant, just convert it into the proper mode. */
7878 if (CONSTANT_P (op0))
7880 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7881 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7883 if (modifier == EXPAND_INITIALIZER)
7884 return simplify_gen_subreg (mode, op0, inner_mode,
7885 subreg_lowpart_offset (mode,
7886 inner_mode));
7887 else
7888 return convert_modes (mode, inner_mode, op0,
7889 TREE_UNSIGNED (inner_type));
7892 if (modifier == EXPAND_INITIALIZER)
7893 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7895 if (target == 0)
7896 return
7897 convert_to_mode (mode, op0,
7898 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7899 else
7900 convert_move (target, op0,
7901 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7902 return target;
7904 case VIEW_CONVERT_EXPR:
7905 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7907 /* If the input and output modes are both the same, we are done.
7908 Otherwise, if neither mode is BLKmode and both are within a word, we
7909 can use gen_lowpart. If neither is true, make sure the operand is
7910 in memory and convert the MEM to the new mode. */
7911 if (TYPE_MODE (type) == GET_MODE (op0))
7913 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7914 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7915 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7916 op0 = gen_lowpart (TYPE_MODE (type), op0);
7917 else if (GET_CODE (op0) != MEM)
7919 /* If the operand is not a MEM, force it into memory. Since we
7920 are going to be be changing the mode of the MEM, don't call
7921 force_const_mem for constants because we don't allow pool
7922 constants to change mode. */
7923 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7925 if (TREE_ADDRESSABLE (exp))
7926 abort ();
7928 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7929 target
7930 = assign_stack_temp_for_type
7931 (TYPE_MODE (inner_type),
7932 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7934 emit_move_insn (target, op0);
7935 op0 = target;
7938 /* At this point, OP0 is in the correct mode. If the output type is such
7939 that the operand is known to be aligned, indicate that it is.
7940 Otherwise, we need only be concerned about alignment for non-BLKmode
7941 results. */
7942 if (GET_CODE (op0) == MEM)
7944 op0 = copy_rtx (op0);
7946 if (TYPE_ALIGN_OK (type))
7947 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7948 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7949 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7951 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7952 HOST_WIDE_INT temp_size
7953 = MAX (int_size_in_bytes (inner_type),
7954 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7955 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7956 temp_size, 0, type);
7957 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7959 if (TREE_ADDRESSABLE (exp))
7960 abort ();
7962 if (GET_MODE (op0) == BLKmode)
7963 emit_block_move (new_with_op0_mode, op0,
7964 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7965 (modifier == EXPAND_STACK_PARM
7966 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7967 else
7968 emit_move_insn (new_with_op0_mode, op0);
7970 op0 = new;
7973 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7976 return op0;
7978 case PLUS_EXPR:
7979 this_optab = ! unsignedp && flag_trapv
7980 && (GET_MODE_CLASS (mode) == MODE_INT)
7981 ? addv_optab : add_optab;
7983 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7984 something else, make sure we add the register to the constant and
7985 then to the other thing. This case can occur during strength
7986 reduction and doing it this way will produce better code if the
7987 frame pointer or argument pointer is eliminated.
7989 fold-const.c will ensure that the constant is always in the inner
7990 PLUS_EXPR, so the only case we need to do anything about is if
7991 sp, ap, or fp is our second argument, in which case we must swap
7992 the innermost first argument and our second argument. */
7994 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7995 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7996 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7997 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7998 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7999 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8001 tree t = TREE_OPERAND (exp, 1);
8003 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8004 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8007 /* If the result is to be ptr_mode and we are adding an integer to
8008 something, we might be forming a constant. So try to use
8009 plus_constant. If it produces a sum and we can't accept it,
8010 use force_operand. This allows P = &ARR[const] to generate
8011 efficient code on machines where a SYMBOL_REF is not a valid
8012 address.
8014 If this is an EXPAND_SUM call, always return the sum. */
8015 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8016 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8018 if (modifier == EXPAND_STACK_PARM)
8019 target = 0;
8020 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8021 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8022 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8024 rtx constant_part;
8026 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8027 EXPAND_SUM);
8028 /* Use immed_double_const to ensure that the constant is
8029 truncated according to the mode of OP1, then sign extended
8030 to a HOST_WIDE_INT. Using the constant directly can result
8031 in non-canonical RTL in a 64x32 cross compile. */
8032 constant_part
8033 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8034 (HOST_WIDE_INT) 0,
8035 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8036 op1 = plus_constant (op1, INTVAL (constant_part));
8037 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8038 op1 = force_operand (op1, target);
8039 return op1;
8042 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8043 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
8044 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8046 rtx constant_part;
8048 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8049 (modifier == EXPAND_INITIALIZER
8050 ? EXPAND_INITIALIZER : EXPAND_SUM));
8051 if (! CONSTANT_P (op0))
8053 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8054 VOIDmode, modifier);
8055 /* Don't go to both_summands if modifier
8056 says it's not right to return a PLUS. */
8057 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8058 goto binop2;
8059 goto both_summands;
8061 /* Use immed_double_const to ensure that the constant is
8062 truncated according to the mode of OP1, then sign extended
8063 to a HOST_WIDE_INT. Using the constant directly can result
8064 in non-canonical RTL in a 64x32 cross compile. */
8065 constant_part
8066 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8067 (HOST_WIDE_INT) 0,
8068 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8069 op0 = plus_constant (op0, INTVAL (constant_part));
8070 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8071 op0 = force_operand (op0, target);
8072 return op0;
8076 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8077 subtarget = 0;
8079 /* No sense saving up arithmetic to be done
8080 if it's all in the wrong mode to form part of an address.
8081 And force_operand won't know whether to sign-extend or
8082 zero-extend. */
8083 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8084 || mode != ptr_mode)
8086 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8087 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8088 if (op0 == const0_rtx)
8089 return op1;
8090 if (op1 == const0_rtx)
8091 return op0;
8092 goto binop2;
8095 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8096 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8098 /* We come here from MINUS_EXPR when the second operand is a
8099 constant. */
8100 both_summands:
8101 /* Make sure any term that's a sum with a constant comes last. */
8102 if (GET_CODE (op0) == PLUS
8103 && CONSTANT_P (XEXP (op0, 1)))
8105 temp = op0;
8106 op0 = op1;
8107 op1 = temp;
8109 /* If adding to a sum including a constant,
8110 associate it to put the constant outside. */
8111 if (GET_CODE (op1) == PLUS
8112 && CONSTANT_P (XEXP (op1, 1)))
8114 rtx constant_term = const0_rtx;
8116 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
8117 if (temp != 0)
8118 op0 = temp;
8119 /* Ensure that MULT comes first if there is one. */
8120 else if (GET_CODE (op0) == MULT)
8121 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
8122 else
8123 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
8125 /* Let's also eliminate constants from op0 if possible. */
8126 op0 = eliminate_constant_term (op0, &constant_term);
8128 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
8129 their sum should be a constant. Form it into OP1, since the
8130 result we want will then be OP0 + OP1. */
8132 temp = simplify_binary_operation (PLUS, mode, constant_term,
8133 XEXP (op1, 1));
8134 if (temp != 0)
8135 op1 = temp;
8136 else
8137 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
8140 /* Put a constant term last and put a multiplication first. */
8141 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
8142 temp = op1, op1 = op0, op0 = temp;
8144 temp = simplify_binary_operation (PLUS, mode, op0, op1);
8145 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
8147 case MINUS_EXPR:
8148 /* For initializers, we are allowed to return a MINUS of two
8149 symbolic constants. Here we handle all cases when both operands
8150 are constant. */
8151 /* Handle difference of two symbolic constants,
8152 for the sake of an initializer. */
8153 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8154 && really_constant_p (TREE_OPERAND (exp, 0))
8155 && really_constant_p (TREE_OPERAND (exp, 1)))
8157 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
8158 modifier);
8159 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8160 modifier);
8162 /* If the last operand is a CONST_INT, use plus_constant of
8163 the negated constant. Else make the MINUS. */
8164 if (GET_CODE (op1) == CONST_INT)
8165 return plus_constant (op0, - INTVAL (op1));
8166 else
8167 return gen_rtx_MINUS (mode, op0, op1);
8170 this_optab = ! unsignedp && flag_trapv
8171 && (GET_MODE_CLASS(mode) == MODE_INT)
8172 ? subv_optab : sub_optab;
8174 /* No sense saving up arithmetic to be done
8175 if it's all in the wrong mode to form part of an address.
8176 And force_operand won't know whether to sign-extend or
8177 zero-extend. */
8178 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8179 || mode != ptr_mode)
8180 goto binop;
8182 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8183 subtarget = 0;
8185 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8186 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8188 /* Convert A - const to A + (-const). */
8189 if (GET_CODE (op1) == CONST_INT)
8191 op1 = negate_rtx (mode, op1);
8192 goto both_summands;
8195 goto binop2;
8197 case MULT_EXPR:
8198 /* If first operand is constant, swap them.
8199 Thus the following special case checks need only
8200 check the second operand. */
8201 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8203 tree t1 = TREE_OPERAND (exp, 0);
8204 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8205 TREE_OPERAND (exp, 1) = t1;
8208 /* Attempt to return something suitable for generating an
8209 indexed address, for machines that support that. */
8211 if (modifier == EXPAND_SUM && mode == ptr_mode
8212 && host_integerp (TREE_OPERAND (exp, 1), 0))
8214 tree exp1 = TREE_OPERAND (exp, 1);
8216 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8217 EXPAND_SUM);
8219 /* If we knew for certain that this is arithmetic for an array
8220 reference, and we knew the bounds of the array, then we could
8221 apply the distributive law across (PLUS X C) for constant C.
8222 Without such knowledge, we risk overflowing the computation
8223 when both X and C are large, but X+C isn't. */
8224 /* ??? Could perhaps special-case EXP being unsigned and C being
8225 positive. In that case we are certain that X+C is no smaller
8226 than X and so the transformed expression will overflow iff the
8227 original would have. */
8229 if (GET_CODE (op0) != REG)
8230 op0 = force_operand (op0, NULL_RTX);
8231 if (GET_CODE (op0) != REG)
8232 op0 = copy_to_mode_reg (mode, op0);
8234 return gen_rtx_MULT (mode, op0,
8235 gen_int_mode (tree_low_cst (exp1, 0),
8236 TYPE_MODE (TREE_TYPE (exp1))));
8239 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8240 subtarget = 0;
8242 if (modifier == EXPAND_STACK_PARM)
8243 target = 0;
8245 /* Check for multiplying things that have been extended
8246 from a narrower type. If this machine supports multiplying
8247 in that narrower type with a result in the desired type,
8248 do it that way, and avoid the explicit type-conversion. */
8249 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8250 && TREE_CODE (type) == INTEGER_TYPE
8251 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8252 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8253 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8254 && int_fits_type_p (TREE_OPERAND (exp, 1),
8255 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8256 /* Don't use a widening multiply if a shift will do. */
8257 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8258 > HOST_BITS_PER_WIDE_INT)
8259 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8261 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8262 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8264 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8265 /* If both operands are extended, they must either both
8266 be zero-extended or both be sign-extended. */
8267 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8269 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8271 enum machine_mode innermode
8272 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
8273 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8274 ? smul_widen_optab : umul_widen_optab);
8275 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8276 ? umul_widen_optab : smul_widen_optab);
8277 if (mode == GET_MODE_WIDER_MODE (innermode))
8279 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8281 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8282 NULL_RTX, VOIDmode, 0);
8283 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8284 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8285 VOIDmode, 0);
8286 else
8287 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8288 NULL_RTX, VOIDmode, 0);
8289 goto binop2;
8291 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8292 && innermode == word_mode)
8294 rtx htem;
8295 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8296 NULL_RTX, VOIDmode, 0);
8297 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8298 op1 = convert_modes (innermode, mode,
8299 expand_expr (TREE_OPERAND (exp, 1),
8300 NULL_RTX, VOIDmode, 0),
8301 unsignedp);
8302 else
8303 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8304 NULL_RTX, VOIDmode, 0);
8305 temp = expand_binop (mode, other_optab, op0, op1, target,
8306 unsignedp, OPTAB_LIB_WIDEN);
8307 htem = expand_mult_highpart_adjust (innermode,
8308 gen_highpart (innermode, temp),
8309 op0, op1,
8310 gen_highpart (innermode, temp),
8311 unsignedp);
8312 emit_move_insn (gen_highpart (innermode, temp), htem);
8313 return temp;
8317 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8318 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8319 return expand_mult (mode, op0, op1, target, unsignedp);
8321 case TRUNC_DIV_EXPR:
8322 case FLOOR_DIV_EXPR:
8323 case CEIL_DIV_EXPR:
8324 case ROUND_DIV_EXPR:
8325 case EXACT_DIV_EXPR:
8326 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8327 subtarget = 0;
8328 if (modifier == EXPAND_STACK_PARM)
8329 target = 0;
8330 /* Possible optimization: compute the dividend with EXPAND_SUM
8331 then if the divisor is constant can optimize the case
8332 where some terms of the dividend have coeffs divisible by it. */
8333 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8334 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8335 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8337 case RDIV_EXPR:
8338 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8339 expensive divide. If not, combine will rebuild the original
8340 computation. */
8341 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8342 && TREE_CODE (type) == REAL_TYPE
8343 && !real_onep (TREE_OPERAND (exp, 0)))
8344 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8345 build (RDIV_EXPR, type,
8346 build_real (type, dconst1),
8347 TREE_OPERAND (exp, 1))),
8348 target, tmode, modifier);
8349 this_optab = sdiv_optab;
8350 goto binop;
8352 case TRUNC_MOD_EXPR:
8353 case FLOOR_MOD_EXPR:
8354 case CEIL_MOD_EXPR:
8355 case ROUND_MOD_EXPR:
8356 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8357 subtarget = 0;
8358 if (modifier == EXPAND_STACK_PARM)
8359 target = 0;
8360 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8361 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8362 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8364 case FIX_ROUND_EXPR:
8365 case FIX_FLOOR_EXPR:
8366 case FIX_CEIL_EXPR:
8367 abort (); /* Not used for C. */
8369 case FIX_TRUNC_EXPR:
8370 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8371 if (target == 0 || modifier == EXPAND_STACK_PARM)
8372 target = gen_reg_rtx (mode);
8373 expand_fix (target, op0, unsignedp);
8374 return target;
8376 case FLOAT_EXPR:
8377 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8378 if (target == 0 || modifier == EXPAND_STACK_PARM)
8379 target = gen_reg_rtx (mode);
8380 /* expand_float can't figure out what to do if FROM has VOIDmode.
8381 So give it the correct mode. With -O, cse will optimize this. */
8382 if (GET_MODE (op0) == VOIDmode)
8383 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8384 op0);
8385 expand_float (target, op0,
8386 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8387 return target;
8389 case NEGATE_EXPR:
8390 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8391 if (modifier == EXPAND_STACK_PARM)
8392 target = 0;
8393 temp = expand_unop (mode,
8394 ! unsignedp && flag_trapv
8395 && (GET_MODE_CLASS(mode) == MODE_INT)
8396 ? negv_optab : neg_optab, op0, target, 0);
8397 if (temp == 0)
8398 abort ();
8399 return temp;
8401 case ABS_EXPR:
8402 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403 if (modifier == EXPAND_STACK_PARM)
8404 target = 0;
8406 /* Handle complex values specially. */
8407 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8408 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8409 return expand_complex_abs (mode, op0, target, unsignedp);
8411 /* Unsigned abs is simply the operand. Testing here means we don't
8412 risk generating incorrect code below. */
8413 if (TREE_UNSIGNED (type))
8414 return op0;
8416 return expand_abs (mode, op0, target, unsignedp,
8417 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8419 case MAX_EXPR:
8420 case MIN_EXPR:
8421 target = original_target;
8422 if (target == 0
8423 || modifier == EXPAND_STACK_PARM
8424 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8425 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8426 || GET_MODE (target) != mode
8427 || (GET_CODE (target) == REG
8428 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8429 target = gen_reg_rtx (mode);
8430 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8431 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8433 /* First try to do it with a special MIN or MAX instruction.
8434 If that does not win, use a conditional jump to select the proper
8435 value. */
8436 this_optab = (TREE_UNSIGNED (type)
8437 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8438 : (code == MIN_EXPR ? smin_optab : smax_optab));
8440 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8441 OPTAB_WIDEN);
8442 if (temp != 0)
8443 return temp;
8445 /* At this point, a MEM target is no longer useful; we will get better
8446 code without it. */
8448 if (GET_CODE (target) == MEM)
8449 target = gen_reg_rtx (mode);
8451 if (target != op0)
8452 emit_move_insn (target, op0);
8454 op0 = gen_label_rtx ();
8456 /* If this mode is an integer too wide to compare properly,
8457 compare word by word. Rely on cse to optimize constant cases. */
8458 if (GET_MODE_CLASS (mode) == MODE_INT
8459 && ! can_compare_p (GE, mode, ccp_jump))
8461 if (code == MAX_EXPR)
8462 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8463 target, op1, NULL_RTX, op0);
8464 else
8465 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8466 op1, target, NULL_RTX, op0);
8468 else
8470 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8471 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8472 unsignedp, mode, NULL_RTX, NULL_RTX,
8473 op0);
8475 emit_move_insn (target, op1);
8476 emit_label (op0);
8477 return target;
8479 case BIT_NOT_EXPR:
8480 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8481 if (modifier == EXPAND_STACK_PARM)
8482 target = 0;
8483 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8484 if (temp == 0)
8485 abort ();
8486 return temp;
8488 case FFS_EXPR:
8489 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8490 if (modifier == EXPAND_STACK_PARM)
8491 target = 0;
8492 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8493 if (temp == 0)
8494 abort ();
8495 return temp;
8497 case CLZ_EXPR:
8498 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8499 temp = expand_unop (mode, clz_optab, op0, target, 1);
8500 if (temp == 0)
8501 abort ();
8502 return temp;
8504 case CTZ_EXPR:
8505 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8506 temp = expand_unop (mode, ctz_optab, op0, target, 1);
8507 if (temp == 0)
8508 abort ();
8509 return temp;
8511 case POPCOUNT_EXPR:
8512 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8513 temp = expand_unop (mode, popcount_optab, op0, target, 1);
8514 if (temp == 0)
8515 abort ();
8516 return temp;
8518 case PARITY_EXPR:
8519 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8520 temp = expand_unop (mode, parity_optab, op0, target, 1);
8521 if (temp == 0)
8522 abort ();
8523 return temp;
8525 /* ??? Can optimize bitwise operations with one arg constant.
8526 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8527 and (a bitwise1 b) bitwise2 b (etc)
8528 but that is probably not worth while. */
8530 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8531 boolean values when we want in all cases to compute both of them. In
8532 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8533 as actual zero-or-1 values and then bitwise anding. In cases where
8534 there cannot be any side effects, better code would be made by
8535 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8536 how to recognize those cases. */
8538 case TRUTH_AND_EXPR:
8539 case BIT_AND_EXPR:
8540 this_optab = and_optab;
8541 goto binop;
8543 case TRUTH_OR_EXPR:
8544 case BIT_IOR_EXPR:
8545 this_optab = ior_optab;
8546 goto binop;
8548 case TRUTH_XOR_EXPR:
8549 case BIT_XOR_EXPR:
8550 this_optab = xor_optab;
8551 goto binop;
8553 case LSHIFT_EXPR:
8554 case RSHIFT_EXPR:
8555 case LROTATE_EXPR:
8556 case RROTATE_EXPR:
8557 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8558 subtarget = 0;
8559 if (modifier == EXPAND_STACK_PARM)
8560 target = 0;
8561 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8562 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8563 unsignedp);
8565 /* Could determine the answer when only additive constants differ. Also,
8566 the addition of one can be handled by changing the condition. */
8567 case LT_EXPR:
8568 case LE_EXPR:
8569 case GT_EXPR:
8570 case GE_EXPR:
8571 case EQ_EXPR:
8572 case NE_EXPR:
8573 case UNORDERED_EXPR:
8574 case ORDERED_EXPR:
8575 case UNLT_EXPR:
8576 case UNLE_EXPR:
8577 case UNGT_EXPR:
8578 case UNGE_EXPR:
8579 case UNEQ_EXPR:
8580 temp = do_store_flag (exp,
8581 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8582 tmode != VOIDmode ? tmode : mode, 0);
8583 if (temp != 0)
8584 return temp;
8586 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8587 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8588 && original_target
8589 && GET_CODE (original_target) == REG
8590 && (GET_MODE (original_target)
8591 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8593 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8594 VOIDmode, 0);
8596 /* If temp is constant, we can just compute the result. */
8597 if (GET_CODE (temp) == CONST_INT)
8599 if (INTVAL (temp) != 0)
8600 emit_move_insn (target, const1_rtx);
8601 else
8602 emit_move_insn (target, const0_rtx);
8604 return target;
8607 if (temp != original_target)
8609 enum machine_mode mode1 = GET_MODE (temp);
8610 if (mode1 == VOIDmode)
8611 mode1 = tmode != VOIDmode ? tmode : mode;
8613 temp = copy_to_mode_reg (mode1, temp);
8616 op1 = gen_label_rtx ();
8617 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8618 GET_MODE (temp), unsignedp, op1);
8619 emit_move_insn (temp, const1_rtx);
8620 emit_label (op1);
8621 return temp;
8624 /* If no set-flag instruction, must generate a conditional
8625 store into a temporary variable. Drop through
8626 and handle this like && and ||. */
8628 case TRUTH_ANDIF_EXPR:
8629 case TRUTH_ORIF_EXPR:
8630 if (! ignore
8631 && (target == 0
8632 || modifier == EXPAND_STACK_PARM
8633 || ! safe_from_p (target, exp, 1)
8634 /* Make sure we don't have a hard reg (such as function's return
8635 value) live across basic blocks, if not optimizing. */
8636 || (!optimize && GET_CODE (target) == REG
8637 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8638 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8640 if (target)
8641 emit_clr_insn (target);
8643 op1 = gen_label_rtx ();
8644 jumpifnot (exp, op1);
8646 if (target)
8647 emit_0_to_1_insn (target);
8649 emit_label (op1);
8650 return ignore ? const0_rtx : target;
8652 case TRUTH_NOT_EXPR:
8653 if (modifier == EXPAND_STACK_PARM)
8654 target = 0;
8655 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8656 /* The parser is careful to generate TRUTH_NOT_EXPR
8657 only with operands that are always zero or one. */
8658 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8659 target, 1, OPTAB_LIB_WIDEN);
8660 if (temp == 0)
8661 abort ();
8662 return temp;
8664 case COMPOUND_EXPR:
8665 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8666 emit_queue ();
8667 return expand_expr (TREE_OPERAND (exp, 1),
8668 (ignore ? const0_rtx : target),
8669 VOIDmode, modifier);
8671 case COND_EXPR:
8672 /* If we would have a "singleton" (see below) were it not for a
8673 conversion in each arm, bring that conversion back out. */
8674 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8675 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8676 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8677 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8679 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8680 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8682 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8683 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8684 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8685 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8686 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8687 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8688 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8689 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8690 return expand_expr (build1 (NOP_EXPR, type,
8691 build (COND_EXPR, TREE_TYPE (iftrue),
8692 TREE_OPERAND (exp, 0),
8693 iftrue, iffalse)),
8694 target, tmode, modifier);
8698 /* Note that COND_EXPRs whose type is a structure or union
8699 are required to be constructed to contain assignments of
8700 a temporary variable, so that we can evaluate them here
8701 for side effect only. If type is void, we must do likewise. */
8703 /* If an arm of the branch requires a cleanup,
8704 only that cleanup is performed. */
8706 tree singleton = 0;
8707 tree binary_op = 0, unary_op = 0;
8709 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8710 convert it to our mode, if necessary. */
8711 if (integer_onep (TREE_OPERAND (exp, 1))
8712 && integer_zerop (TREE_OPERAND (exp, 2))
8713 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8715 if (ignore)
8717 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8718 modifier);
8719 return const0_rtx;
8722 if (modifier == EXPAND_STACK_PARM)
8723 target = 0;
8724 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8725 if (GET_MODE (op0) == mode)
8726 return op0;
8728 if (target == 0)
8729 target = gen_reg_rtx (mode);
8730 convert_move (target, op0, unsignedp);
8731 return target;
8734 /* Check for X ? A + B : A. If we have this, we can copy A to the
8735 output and conditionally add B. Similarly for unary operations.
8736 Don't do this if X has side-effects because those side effects
8737 might affect A or B and the "?" operation is a sequence point in
8738 ANSI. (operand_equal_p tests for side effects.) */
8740 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8741 && operand_equal_p (TREE_OPERAND (exp, 2),
8742 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8743 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8744 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8745 && operand_equal_p (TREE_OPERAND (exp, 1),
8746 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8747 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8748 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8749 && operand_equal_p (TREE_OPERAND (exp, 2),
8750 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8751 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8752 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8753 && operand_equal_p (TREE_OPERAND (exp, 1),
8754 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8755 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8757 /* If we are not to produce a result, we have no target. Otherwise,
8758 if a target was specified use it; it will not be used as an
8759 intermediate target unless it is safe. If no target, use a
8760 temporary. */
8762 if (ignore)
8763 temp = 0;
8764 else if (modifier == EXPAND_STACK_PARM)
8765 temp = assign_temp (type, 0, 0, 1);
8766 else if (original_target
8767 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8768 || (singleton && GET_CODE (original_target) == REG
8769 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8770 && original_target == var_rtx (singleton)))
8771 && GET_MODE (original_target) == mode
8772 #ifdef HAVE_conditional_move
8773 && (! can_conditionally_move_p (mode)
8774 || GET_CODE (original_target) == REG
8775 || TREE_ADDRESSABLE (type))
8776 #endif
8777 && (GET_CODE (original_target) != MEM
8778 || TREE_ADDRESSABLE (type)))
8779 temp = original_target;
8780 else if (TREE_ADDRESSABLE (type))
8781 abort ();
8782 else
8783 temp = assign_temp (type, 0, 0, 1);
8785 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8786 do the test of X as a store-flag operation, do this as
8787 A + ((X != 0) << log C). Similarly for other simple binary
8788 operators. Only do for C == 1 if BRANCH_COST is low. */
8789 if (temp && singleton && binary_op
8790 && (TREE_CODE (binary_op) == PLUS_EXPR
8791 || TREE_CODE (binary_op) == MINUS_EXPR
8792 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8793 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8794 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8795 : integer_onep (TREE_OPERAND (binary_op, 1)))
8796 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8798 rtx result;
8799 tree cond;
8800 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8801 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8802 ? addv_optab : add_optab)
8803 : TREE_CODE (binary_op) == MINUS_EXPR
8804 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8805 ? subv_optab : sub_optab)
8806 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8807 : xor_optab);
8809 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8810 if (singleton == TREE_OPERAND (exp, 1))
8811 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8812 else
8813 cond = TREE_OPERAND (exp, 0);
8815 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8816 ? temp : NULL_RTX),
8817 mode, BRANCH_COST <= 1);
8819 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8820 result = expand_shift (LSHIFT_EXPR, mode, result,
8821 build_int_2 (tree_log2
8822 (TREE_OPERAND
8823 (binary_op, 1)),
8825 (safe_from_p (temp, singleton, 1)
8826 ? temp : NULL_RTX), 0);
8828 if (result)
8830 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8831 return expand_binop (mode, boptab, op1, result, temp,
8832 unsignedp, OPTAB_LIB_WIDEN);
8836 do_pending_stack_adjust ();
8837 NO_DEFER_POP;
8838 op0 = gen_label_rtx ();
8840 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8842 if (temp != 0)
8844 /* If the target conflicts with the other operand of the
8845 binary op, we can't use it. Also, we can't use the target
8846 if it is a hard register, because evaluating the condition
8847 might clobber it. */
8848 if ((binary_op
8849 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8850 || (GET_CODE (temp) == REG
8851 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8852 temp = gen_reg_rtx (mode);
8853 store_expr (singleton, temp,
8854 modifier == EXPAND_STACK_PARM ? 2 : 0);
8856 else
8857 expand_expr (singleton,
8858 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8859 if (singleton == TREE_OPERAND (exp, 1))
8860 jumpif (TREE_OPERAND (exp, 0), op0);
8861 else
8862 jumpifnot (TREE_OPERAND (exp, 0), op0);
8864 start_cleanup_deferral ();
8865 if (binary_op && temp == 0)
8866 /* Just touch the other operand. */
8867 expand_expr (TREE_OPERAND (binary_op, 1),
8868 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8869 else if (binary_op)
8870 store_expr (build (TREE_CODE (binary_op), type,
8871 make_tree (type, temp),
8872 TREE_OPERAND (binary_op, 1)),
8873 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8874 else
8875 store_expr (build1 (TREE_CODE (unary_op), type,
8876 make_tree (type, temp)),
8877 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8878 op1 = op0;
8880 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8881 comparison operator. If we have one of these cases, set the
8882 output to A, branch on A (cse will merge these two references),
8883 then set the output to FOO. */
8884 else if (temp
8885 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8886 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8887 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8888 TREE_OPERAND (exp, 1), 0)
8889 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8890 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8891 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8893 if (GET_CODE (temp) == REG
8894 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8895 temp = gen_reg_rtx (mode);
8896 store_expr (TREE_OPERAND (exp, 1), temp,
8897 modifier == EXPAND_STACK_PARM ? 2 : 0);
8898 jumpif (TREE_OPERAND (exp, 0), op0);
8900 start_cleanup_deferral ();
8901 store_expr (TREE_OPERAND (exp, 2), temp,
8902 modifier == EXPAND_STACK_PARM ? 2 : 0);
8903 op1 = op0;
8905 else if (temp
8906 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8907 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8908 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8909 TREE_OPERAND (exp, 2), 0)
8910 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8911 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8912 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8914 if (GET_CODE (temp) == REG
8915 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8916 temp = gen_reg_rtx (mode);
8917 store_expr (TREE_OPERAND (exp, 2), temp,
8918 modifier == EXPAND_STACK_PARM ? 2 : 0);
8919 jumpifnot (TREE_OPERAND (exp, 0), op0);
8921 start_cleanup_deferral ();
8922 store_expr (TREE_OPERAND (exp, 1), temp,
8923 modifier == EXPAND_STACK_PARM ? 2 : 0);
8924 op1 = op0;
8926 else
8928 op1 = gen_label_rtx ();
8929 jumpifnot (TREE_OPERAND (exp, 0), op0);
8931 start_cleanup_deferral ();
8933 /* One branch of the cond can be void, if it never returns. For
8934 example A ? throw : E */
8935 if (temp != 0
8936 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8937 store_expr (TREE_OPERAND (exp, 1), temp,
8938 modifier == EXPAND_STACK_PARM ? 2 : 0);
8939 else
8940 expand_expr (TREE_OPERAND (exp, 1),
8941 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8942 end_cleanup_deferral ();
8943 emit_queue ();
8944 emit_jump_insn (gen_jump (op1));
8945 emit_barrier ();
8946 emit_label (op0);
8947 start_cleanup_deferral ();
8948 if (temp != 0
8949 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8950 store_expr (TREE_OPERAND (exp, 2), temp,
8951 modifier == EXPAND_STACK_PARM ? 2 : 0);
8952 else
8953 expand_expr (TREE_OPERAND (exp, 2),
8954 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8957 end_cleanup_deferral ();
8959 emit_queue ();
8960 emit_label (op1);
8961 OK_DEFER_POP;
8963 return temp;
8966 case TARGET_EXPR:
8968 /* Something needs to be initialized, but we didn't know
8969 where that thing was when building the tree. For example,
8970 it could be the return value of a function, or a parameter
8971 to a function which lays down in the stack, or a temporary
8972 variable which must be passed by reference.
8974 We guarantee that the expression will either be constructed
8975 or copied into our original target. */
8977 tree slot = TREE_OPERAND (exp, 0);
8978 tree cleanups = NULL_TREE;
8979 tree exp1;
8981 if (TREE_CODE (slot) != VAR_DECL)
8982 abort ();
8984 if (! ignore)
8985 target = original_target;
8987 /* Set this here so that if we get a target that refers to a
8988 register variable that's already been used, put_reg_into_stack
8989 knows that it should fix up those uses. */
8990 TREE_USED (slot) = 1;
8992 if (target == 0)
8994 if (DECL_RTL_SET_P (slot))
8996 target = DECL_RTL (slot);
8997 /* If we have already expanded the slot, so don't do
8998 it again. (mrs) */
8999 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9000 return target;
9002 else
9004 target = assign_temp (type, 2, 0, 1);
9005 /* All temp slots at this level must not conflict. */
9006 preserve_temp_slots (target);
9007 SET_DECL_RTL (slot, target);
9008 if (TREE_ADDRESSABLE (slot))
9009 put_var_into_stack (slot, /*rescan=*/false);
9011 /* Since SLOT is not known to the called function
9012 to belong to its stack frame, we must build an explicit
9013 cleanup. This case occurs when we must build up a reference
9014 to pass the reference as an argument. In this case,
9015 it is very likely that such a reference need not be
9016 built here. */
9018 if (TREE_OPERAND (exp, 2) == 0)
9019 TREE_OPERAND (exp, 2)
9020 = (*lang_hooks.maybe_build_cleanup) (slot);
9021 cleanups = TREE_OPERAND (exp, 2);
9024 else
9026 /* This case does occur, when expanding a parameter which
9027 needs to be constructed on the stack. The target
9028 is the actual stack address that we want to initialize.
9029 The function we call will perform the cleanup in this case. */
9031 /* If we have already assigned it space, use that space,
9032 not target that we were passed in, as our target
9033 parameter is only a hint. */
9034 if (DECL_RTL_SET_P (slot))
9036 target = DECL_RTL (slot);
9037 /* If we have already expanded the slot, so don't do
9038 it again. (mrs) */
9039 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9040 return target;
9042 else
9044 SET_DECL_RTL (slot, target);
9045 /* If we must have an addressable slot, then make sure that
9046 the RTL that we just stored in slot is OK. */
9047 if (TREE_ADDRESSABLE (slot))
9048 put_var_into_stack (slot, /*rescan=*/true);
9052 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
9053 /* Mark it as expanded. */
9054 TREE_OPERAND (exp, 1) = NULL_TREE;
9056 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
9058 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
9060 return target;
9063 case INIT_EXPR:
9065 tree lhs = TREE_OPERAND (exp, 0);
9066 tree rhs = TREE_OPERAND (exp, 1);
9068 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9069 return temp;
9072 case MODIFY_EXPR:
9074 /* If lhs is complex, expand calls in rhs before computing it.
9075 That's so we don't compute a pointer and save it over a
9076 call. If lhs is simple, compute it first so we can give it
9077 as a target if the rhs is just a call. This avoids an
9078 extra temp and copy and that prevents a partial-subsumption
9079 which makes bad code. Actually we could treat
9080 component_ref's of vars like vars. */
9082 tree lhs = TREE_OPERAND (exp, 0);
9083 tree rhs = TREE_OPERAND (exp, 1);
9085 temp = 0;
9087 /* Check for |= or &= of a bitfield of size one into another bitfield
9088 of size 1. In this case, (unless we need the result of the
9089 assignment) we can do this more efficiently with a
9090 test followed by an assignment, if necessary.
9092 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9093 things change so we do, this code should be enhanced to
9094 support it. */
9095 if (ignore
9096 && TREE_CODE (lhs) == COMPONENT_REF
9097 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9098 || TREE_CODE (rhs) == BIT_AND_EXPR)
9099 && TREE_OPERAND (rhs, 0) == lhs
9100 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9101 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9102 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9104 rtx label = gen_label_rtx ();
9106 do_jump (TREE_OPERAND (rhs, 1),
9107 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
9108 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
9109 expand_assignment (lhs, convert (TREE_TYPE (rhs),
9110 (TREE_CODE (rhs) == BIT_IOR_EXPR
9111 ? integer_one_node
9112 : integer_zero_node)),
9113 0, 0);
9114 do_pending_stack_adjust ();
9115 emit_label (label);
9116 return const0_rtx;
9119 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
9121 return temp;
9124 case RETURN_EXPR:
9125 if (!TREE_OPERAND (exp, 0))
9126 expand_null_return ();
9127 else
9128 expand_return (TREE_OPERAND (exp, 0));
9129 return const0_rtx;
9131 case PREINCREMENT_EXPR:
9132 case PREDECREMENT_EXPR:
9133 return expand_increment (exp, 0, ignore);
9135 case POSTINCREMENT_EXPR:
9136 case POSTDECREMENT_EXPR:
9137 /* Faster to treat as pre-increment if result is not used. */
9138 return expand_increment (exp, ! ignore, ignore);
9140 case ADDR_EXPR:
9141 if (modifier == EXPAND_STACK_PARM)
9142 target = 0;
9143 /* Are we taking the address of a nested function? */
9144 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
9145 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
9146 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
9147 && ! TREE_STATIC (exp))
9149 op0 = trampoline_address (TREE_OPERAND (exp, 0));
9150 op0 = force_operand (op0, target);
9152 /* If we are taking the address of something erroneous, just
9153 return a zero. */
9154 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9155 return const0_rtx;
9156 /* If we are taking the address of a constant and are at the
9157 top level, we have to use output_constant_def since we can't
9158 call force_const_mem at top level. */
9159 else if (cfun == 0
9160 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9161 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
9162 == 'c')))
9163 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
9164 else
9166 /* We make sure to pass const0_rtx down if we came in with
9167 ignore set, to avoid doing the cleanups twice for something. */
9168 op0 = expand_expr (TREE_OPERAND (exp, 0),
9169 ignore ? const0_rtx : NULL_RTX, VOIDmode,
9170 (modifier == EXPAND_INITIALIZER
9171 ? modifier : EXPAND_CONST_ADDRESS));
9173 /* If we are going to ignore the result, OP0 will have been set
9174 to const0_rtx, so just return it. Don't get confused and
9175 think we are taking the address of the constant. */
9176 if (ignore)
9177 return op0;
9179 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9180 clever and returns a REG when given a MEM. */
9181 op0 = protect_from_queue (op0, 1);
9183 /* We would like the object in memory. If it is a constant, we can
9184 have it be statically allocated into memory. For a non-constant,
9185 we need to allocate some memory and store the value into it. */
9187 if (CONSTANT_P (op0))
9188 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
9189 op0);
9190 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9191 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
9192 || GET_CODE (op0) == PARALLEL)
9194 /* If the operand is a SAVE_EXPR, we can deal with this by
9195 forcing the SAVE_EXPR into memory. */
9196 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9198 put_var_into_stack (TREE_OPERAND (exp, 0),
9199 /*rescan=*/true);
9200 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9202 else
9204 /* If this object is in a register, it can't be BLKmode. */
9205 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9206 rtx memloc = assign_temp (inner_type, 1, 1, 1);
9208 if (GET_CODE (op0) == PARALLEL)
9209 /* Handle calls that pass values in multiple
9210 non-contiguous locations. The Irix 6 ABI has examples
9211 of this. */
9212 emit_group_store (memloc, op0,
9213 int_size_in_bytes (inner_type));
9214 else
9215 emit_move_insn (memloc, op0);
9217 op0 = memloc;
9221 if (GET_CODE (op0) != MEM)
9222 abort ();
9224 mark_temp_addr_taken (op0);
9225 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9227 op0 = XEXP (op0, 0);
9228 #ifdef POINTERS_EXTEND_UNSIGNED
9229 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9230 && mode == ptr_mode)
9231 op0 = convert_memory_address (ptr_mode, op0);
9232 #endif
9233 return op0;
9236 /* If OP0 is not aligned as least as much as the type requires, we
9237 need to make a temporary, copy OP0 to it, and take the address of
9238 the temporary. We want to use the alignment of the type, not of
9239 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9240 the test for BLKmode means that can't happen. The test for
9241 BLKmode is because we never make mis-aligned MEMs with
9242 non-BLKmode.
9244 We don't need to do this at all if the machine doesn't have
9245 strict alignment. */
9246 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9247 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9248 > MEM_ALIGN (op0))
9249 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9251 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9252 rtx new;
9254 if (TYPE_ALIGN_OK (inner_type))
9255 abort ();
9257 if (TREE_ADDRESSABLE (inner_type))
9259 /* We can't make a bitwise copy of this object, so fail. */
9260 error ("cannot take the address of an unaligned member");
9261 return const0_rtx;
9264 new = assign_stack_temp_for_type
9265 (TYPE_MODE (inner_type),
9266 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9267 : int_size_in_bytes (inner_type),
9268 1, build_qualified_type (inner_type,
9269 (TYPE_QUALS (inner_type)
9270 | TYPE_QUAL_CONST)));
9272 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9273 (modifier == EXPAND_STACK_PARM
9274 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9276 op0 = new;
9279 op0 = force_operand (XEXP (op0, 0), target);
9282 if (flag_force_addr
9283 && GET_CODE (op0) != REG
9284 && modifier != EXPAND_CONST_ADDRESS
9285 && modifier != EXPAND_INITIALIZER
9286 && modifier != EXPAND_SUM)
9287 op0 = force_reg (Pmode, op0);
9289 if (GET_CODE (op0) == REG
9290 && ! REG_USERVAR_P (op0))
9291 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9293 #ifdef POINTERS_EXTEND_UNSIGNED
9294 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9295 && mode == ptr_mode)
9296 op0 = convert_memory_address (ptr_mode, op0);
9297 #endif
9299 return op0;
9301 case ENTRY_VALUE_EXPR:
9302 abort ();
9304 /* COMPLEX type for Extended Pascal & Fortran */
9305 case COMPLEX_EXPR:
9307 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9308 rtx insns;
9310 /* Get the rtx code of the operands. */
9311 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9312 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9314 if (! target)
9315 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9317 start_sequence ();
9319 /* Move the real (op0) and imaginary (op1) parts to their location. */
9320 emit_move_insn (gen_realpart (mode, target), op0);
9321 emit_move_insn (gen_imagpart (mode, target), op1);
9323 insns = get_insns ();
9324 end_sequence ();
9326 /* Complex construction should appear as a single unit. */
9327 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9328 each with a separate pseudo as destination.
9329 It's not correct for flow to treat them as a unit. */
9330 if (GET_CODE (target) != CONCAT)
9331 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9332 else
9333 emit_insn (insns);
9335 return target;
9338 case REALPART_EXPR:
9339 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9340 return gen_realpart (mode, op0);
9342 case IMAGPART_EXPR:
9343 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9344 return gen_imagpart (mode, op0);
9346 case CONJ_EXPR:
9348 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9349 rtx imag_t;
9350 rtx insns;
9352 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9354 if (! target)
9355 target = gen_reg_rtx (mode);
9357 start_sequence ();
9359 /* Store the realpart and the negated imagpart to target. */
9360 emit_move_insn (gen_realpart (partmode, target),
9361 gen_realpart (partmode, op0));
9363 imag_t = gen_imagpart (partmode, target);
9364 temp = expand_unop (partmode,
9365 ! unsignedp && flag_trapv
9366 && (GET_MODE_CLASS(partmode) == MODE_INT)
9367 ? negv_optab : neg_optab,
9368 gen_imagpart (partmode, op0), imag_t, 0);
9369 if (temp != imag_t)
9370 emit_move_insn (imag_t, temp);
9372 insns = get_insns ();
9373 end_sequence ();
9375 /* Conjugate should appear as a single unit
9376 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9377 each with a separate pseudo as destination.
9378 It's not correct for flow to treat them as a unit. */
9379 if (GET_CODE (target) != CONCAT)
9380 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9381 else
9382 emit_insn (insns);
9384 return target;
9387 case TRY_CATCH_EXPR:
9389 tree handler = TREE_OPERAND (exp, 1);
9391 expand_eh_region_start ();
9393 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9395 expand_eh_region_end_cleanup (handler);
9397 return op0;
9400 case TRY_FINALLY_EXPR:
9402 tree try_block = TREE_OPERAND (exp, 0);
9403 tree finally_block = TREE_OPERAND (exp, 1);
9405 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9407 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9408 is not sufficient, so we cannot expand the block twice.
9409 So we play games with GOTO_SUBROUTINE_EXPR to let us
9410 expand the thing only once. */
9411 /* When not optimizing, we go ahead with this form since
9412 (1) user breakpoints operate more predictably without
9413 code duplication, and
9414 (2) we're not running any of the global optimizers
9415 that would explode in time/space with the highly
9416 connected CFG created by the indirect branching. */
9418 rtx finally_label = gen_label_rtx ();
9419 rtx done_label = gen_label_rtx ();
9420 rtx return_link = gen_reg_rtx (Pmode);
9421 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9422 (tree) finally_label, (tree) return_link);
9423 TREE_SIDE_EFFECTS (cleanup) = 1;
9425 /* Start a new binding layer that will keep track of all cleanup
9426 actions to be performed. */
9427 expand_start_bindings (2);
9428 target_temp_slot_level = temp_slot_level;
9430 expand_decl_cleanup (NULL_TREE, cleanup);
9431 op0 = expand_expr (try_block, target, tmode, modifier);
9433 preserve_temp_slots (op0);
9434 expand_end_bindings (NULL_TREE, 0, 0);
9435 emit_jump (done_label);
9436 emit_label (finally_label);
9437 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9438 emit_indirect_jump (return_link);
9439 emit_label (done_label);
9441 else
9443 expand_start_bindings (2);
9444 target_temp_slot_level = temp_slot_level;
9446 expand_decl_cleanup (NULL_TREE, finally_block);
9447 op0 = expand_expr (try_block, target, tmode, modifier);
9449 preserve_temp_slots (op0);
9450 expand_end_bindings (NULL_TREE, 0, 0);
9453 return op0;
9456 case GOTO_SUBROUTINE_EXPR:
9458 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9459 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9460 rtx return_address = gen_label_rtx ();
9461 emit_move_insn (return_link,
9462 gen_rtx_LABEL_REF (Pmode, return_address));
9463 emit_jump (subr);
9464 emit_label (return_address);
9465 return const0_rtx;
9468 case VA_ARG_EXPR:
9469 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9471 case EXC_PTR_EXPR:
9472 return get_exception_pointer (cfun);
9474 case FDESC_EXPR:
9475 /* Function descriptors are not valid except for as
9476 initialization constants, and should not be expanded. */
9477 abort ();
9479 default:
9480 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9483 /* Here to do an ordinary binary operator, generating an instruction
9484 from the optab already placed in `this_optab'. */
9485 binop:
9486 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9487 subtarget = 0;
9488 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9489 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9490 binop2:
9491 if (modifier == EXPAND_STACK_PARM)
9492 target = 0;
9493 temp = expand_binop (mode, this_optab, op0, op1, target,
9494 unsignedp, OPTAB_LIB_WIDEN);
9495 if (temp == 0)
9496 abort ();
9497 return temp;
9500 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9501 when applied to the address of EXP produces an address known to be
9502 aligned more than BIGGEST_ALIGNMENT. */
9504 static int
9505 is_aligning_offset (offset, exp)
9506 tree offset;
9507 tree exp;
9509 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9510 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9511 || TREE_CODE (offset) == NOP_EXPR
9512 || TREE_CODE (offset) == CONVERT_EXPR
9513 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9514 offset = TREE_OPERAND (offset, 0);
9516 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9517 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9518 if (TREE_CODE (offset) != BIT_AND_EXPR
9519 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9520 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9521 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9522 return 0;
9524 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9525 It must be NEGATE_EXPR. Then strip any more conversions. */
9526 offset = TREE_OPERAND (offset, 0);
9527 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9528 || TREE_CODE (offset) == NOP_EXPR
9529 || TREE_CODE (offset) == CONVERT_EXPR)
9530 offset = TREE_OPERAND (offset, 0);
9532 if (TREE_CODE (offset) != NEGATE_EXPR)
9533 return 0;
9535 offset = TREE_OPERAND (offset, 0);
9536 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9537 || TREE_CODE (offset) == NOP_EXPR
9538 || TREE_CODE (offset) == CONVERT_EXPR)
9539 offset = TREE_OPERAND (offset, 0);
9541 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9542 whose type is the same as EXP. */
9543 return (TREE_CODE (offset) == ADDR_EXPR
9544 && (TREE_OPERAND (offset, 0) == exp
9545 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9546 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9547 == TREE_TYPE (exp)))));
9550 /* Return the tree node if an ARG corresponds to a string constant or zero
9551 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9552 in bytes within the string that ARG is accessing. The type of the
9553 offset will be `sizetype'. */
9555 tree
9556 string_constant (arg, ptr_offset)
9557 tree arg;
9558 tree *ptr_offset;
9560 STRIP_NOPS (arg);
9562 if (TREE_CODE (arg) == ADDR_EXPR
9563 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9565 *ptr_offset = size_zero_node;
9566 return TREE_OPERAND (arg, 0);
9568 else if (TREE_CODE (arg) == PLUS_EXPR)
9570 tree arg0 = TREE_OPERAND (arg, 0);
9571 tree arg1 = TREE_OPERAND (arg, 1);
9573 STRIP_NOPS (arg0);
9574 STRIP_NOPS (arg1);
9576 if (TREE_CODE (arg0) == ADDR_EXPR
9577 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9579 *ptr_offset = convert (sizetype, arg1);
9580 return TREE_OPERAND (arg0, 0);
9582 else if (TREE_CODE (arg1) == ADDR_EXPR
9583 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9585 *ptr_offset = convert (sizetype, arg0);
9586 return TREE_OPERAND (arg1, 0);
9590 return 0;
9593 /* Expand code for a post- or pre- increment or decrement
9594 and return the RTX for the result.
9595 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9597 static rtx
9598 expand_increment (exp, post, ignore)
9599 tree exp;
9600 int post, ignore;
9602 rtx op0, op1;
9603 rtx temp, value;
9604 tree incremented = TREE_OPERAND (exp, 0);
9605 optab this_optab = add_optab;
9606 int icode;
9607 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9608 int op0_is_copy = 0;
9609 int single_insn = 0;
9610 /* 1 means we can't store into OP0 directly,
9611 because it is a subreg narrower than a word,
9612 and we don't dare clobber the rest of the word. */
9613 int bad_subreg = 0;
9615 /* Stabilize any component ref that might need to be
9616 evaluated more than once below. */
9617 if (!post
9618 || TREE_CODE (incremented) == BIT_FIELD_REF
9619 || (TREE_CODE (incremented) == COMPONENT_REF
9620 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9621 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9622 incremented = stabilize_reference (incremented);
9623 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9624 ones into save exprs so that they don't accidentally get evaluated
9625 more than once by the code below. */
9626 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9627 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9628 incremented = save_expr (incremented);
9630 /* Compute the operands as RTX.
9631 Note whether OP0 is the actual lvalue or a copy of it:
9632 I believe it is a copy iff it is a register or subreg
9633 and insns were generated in computing it. */
9635 temp = get_last_insn ();
9636 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9638 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9639 in place but instead must do sign- or zero-extension during assignment,
9640 so we copy it into a new register and let the code below use it as
9641 a copy.
9643 Note that we can safely modify this SUBREG since it is know not to be
9644 shared (it was made by the expand_expr call above). */
9646 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9648 if (post)
9649 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9650 else
9651 bad_subreg = 1;
9653 else if (GET_CODE (op0) == SUBREG
9654 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9656 /* We cannot increment this SUBREG in place. If we are
9657 post-incrementing, get a copy of the old value. Otherwise,
9658 just mark that we cannot increment in place. */
9659 if (post)
9660 op0 = copy_to_reg (op0);
9661 else
9662 bad_subreg = 1;
9665 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9666 && temp != get_last_insn ());
9667 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9669 /* Decide whether incrementing or decrementing. */
9670 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9671 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9672 this_optab = sub_optab;
9674 /* Convert decrement by a constant into a negative increment. */
9675 if (this_optab == sub_optab
9676 && GET_CODE (op1) == CONST_INT)
9678 op1 = GEN_INT (-INTVAL (op1));
9679 this_optab = add_optab;
9682 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9683 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9685 /* For a preincrement, see if we can do this with a single instruction. */
9686 if (!post)
9688 icode = (int) this_optab->handlers[(int) mode].insn_code;
9689 if (icode != (int) CODE_FOR_nothing
9690 /* Make sure that OP0 is valid for operands 0 and 1
9691 of the insn we want to queue. */
9692 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9693 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9694 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9695 single_insn = 1;
9698 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9699 then we cannot just increment OP0. We must therefore contrive to
9700 increment the original value. Then, for postincrement, we can return
9701 OP0 since it is a copy of the old value. For preincrement, expand here
9702 unless we can do it with a single insn.
9704 Likewise if storing directly into OP0 would clobber high bits
9705 we need to preserve (bad_subreg). */
9706 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9708 /* This is the easiest way to increment the value wherever it is.
9709 Problems with multiple evaluation of INCREMENTED are prevented
9710 because either (1) it is a component_ref or preincrement,
9711 in which case it was stabilized above, or (2) it is an array_ref
9712 with constant index in an array in a register, which is
9713 safe to reevaluate. */
9714 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9715 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9716 ? MINUS_EXPR : PLUS_EXPR),
9717 TREE_TYPE (exp),
9718 incremented,
9719 TREE_OPERAND (exp, 1));
9721 while (TREE_CODE (incremented) == NOP_EXPR
9722 || TREE_CODE (incremented) == CONVERT_EXPR)
9724 newexp = convert (TREE_TYPE (incremented), newexp);
9725 incremented = TREE_OPERAND (incremented, 0);
9728 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9729 return post ? op0 : temp;
9732 if (post)
9734 /* We have a true reference to the value in OP0.
9735 If there is an insn to add or subtract in this mode, queue it.
9736 Queueing the increment insn avoids the register shuffling
9737 that often results if we must increment now and first save
9738 the old value for subsequent use. */
9740 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9741 op0 = stabilize (op0);
9742 #endif
9744 icode = (int) this_optab->handlers[(int) mode].insn_code;
9745 if (icode != (int) CODE_FOR_nothing
9746 /* Make sure that OP0 is valid for operands 0 and 1
9747 of the insn we want to queue. */
9748 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9749 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9751 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9752 op1 = force_reg (mode, op1);
9754 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9756 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9758 rtx addr = (general_operand (XEXP (op0, 0), mode)
9759 ? force_reg (Pmode, XEXP (op0, 0))
9760 : copy_to_reg (XEXP (op0, 0)));
9761 rtx temp, result;
9763 op0 = replace_equiv_address (op0, addr);
9764 temp = force_reg (GET_MODE (op0), op0);
9765 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9766 op1 = force_reg (mode, op1);
9768 /* The increment queue is LIFO, thus we have to `queue'
9769 the instructions in reverse order. */
9770 enqueue_insn (op0, gen_move_insn (op0, temp));
9771 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9772 return result;
9776 /* Preincrement, or we can't increment with one simple insn. */
9777 if (post)
9778 /* Save a copy of the value before inc or dec, to return it later. */
9779 temp = value = copy_to_reg (op0);
9780 else
9781 /* Arrange to return the incremented value. */
9782 /* Copy the rtx because expand_binop will protect from the queue,
9783 and the results of that would be invalid for us to return
9784 if our caller does emit_queue before using our result. */
9785 temp = copy_rtx (value = op0);
9787 /* Increment however we can. */
9788 op1 = expand_binop (mode, this_optab, value, op1, op0,
9789 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9791 /* Make sure the value is stored into OP0. */
9792 if (op1 != op0)
9793 emit_move_insn (op0, op1);
9795 return temp;
9798 /* Generate code to calculate EXP using a store-flag instruction
9799 and return an rtx for the result. EXP is either a comparison
9800 or a TRUTH_NOT_EXPR whose operand is a comparison.
9802 If TARGET is nonzero, store the result there if convenient.
9804 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9805 cheap.
9807 Return zero if there is no suitable set-flag instruction
9808 available on this machine.
9810 Once expand_expr has been called on the arguments of the comparison,
9811 we are committed to doing the store flag, since it is not safe to
9812 re-evaluate the expression. We emit the store-flag insn by calling
9813 emit_store_flag, but only expand the arguments if we have a reason
9814 to believe that emit_store_flag will be successful. If we think that
9815 it will, but it isn't, we have to simulate the store-flag with a
9816 set/jump/set sequence. */
9818 static rtx
9819 do_store_flag (exp, target, mode, only_cheap)
9820 tree exp;
9821 rtx target;
9822 enum machine_mode mode;
9823 int only_cheap;
9825 enum rtx_code code;
9826 tree arg0, arg1, type;
9827 tree tem;
9828 enum machine_mode operand_mode;
9829 int invert = 0;
9830 int unsignedp;
9831 rtx op0, op1;
9832 enum insn_code icode;
9833 rtx subtarget = target;
9834 rtx result, label;
9836 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9837 result at the end. We can't simply invert the test since it would
9838 have already been inverted if it were valid. This case occurs for
9839 some floating-point comparisons. */
9841 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9842 invert = 1, exp = TREE_OPERAND (exp, 0);
9844 arg0 = TREE_OPERAND (exp, 0);
9845 arg1 = TREE_OPERAND (exp, 1);
9847 /* Don't crash if the comparison was erroneous. */
9848 if (arg0 == error_mark_node || arg1 == error_mark_node)
9849 return const0_rtx;
9851 type = TREE_TYPE (arg0);
9852 operand_mode = TYPE_MODE (type);
9853 unsignedp = TREE_UNSIGNED (type);
9855 /* We won't bother with BLKmode store-flag operations because it would mean
9856 passing a lot of information to emit_store_flag. */
9857 if (operand_mode == BLKmode)
9858 return 0;
9860 /* We won't bother with store-flag operations involving function pointers
9861 when function pointers must be canonicalized before comparisons. */
9862 #ifdef HAVE_canonicalize_funcptr_for_compare
9863 if (HAVE_canonicalize_funcptr_for_compare
9864 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9865 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9866 == FUNCTION_TYPE))
9867 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9868 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9869 == FUNCTION_TYPE))))
9870 return 0;
9871 #endif
9873 STRIP_NOPS (arg0);
9874 STRIP_NOPS (arg1);
9876 /* Get the rtx comparison code to use. We know that EXP is a comparison
9877 operation of some type. Some comparisons against 1 and -1 can be
9878 converted to comparisons with zero. Do so here so that the tests
9879 below will be aware that we have a comparison with zero. These
9880 tests will not catch constants in the first operand, but constants
9881 are rarely passed as the first operand. */
9883 switch (TREE_CODE (exp))
9885 case EQ_EXPR:
9886 code = EQ;
9887 break;
9888 case NE_EXPR:
9889 code = NE;
9890 break;
9891 case LT_EXPR:
9892 if (integer_onep (arg1))
9893 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9894 else
9895 code = unsignedp ? LTU : LT;
9896 break;
9897 case LE_EXPR:
9898 if (! unsignedp && integer_all_onesp (arg1))
9899 arg1 = integer_zero_node, code = LT;
9900 else
9901 code = unsignedp ? LEU : LE;
9902 break;
9903 case GT_EXPR:
9904 if (! unsignedp && integer_all_onesp (arg1))
9905 arg1 = integer_zero_node, code = GE;
9906 else
9907 code = unsignedp ? GTU : GT;
9908 break;
9909 case GE_EXPR:
9910 if (integer_onep (arg1))
9911 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9912 else
9913 code = unsignedp ? GEU : GE;
9914 break;
9916 case UNORDERED_EXPR:
9917 code = UNORDERED;
9918 break;
9919 case ORDERED_EXPR:
9920 code = ORDERED;
9921 break;
9922 case UNLT_EXPR:
9923 code = UNLT;
9924 break;
9925 case UNLE_EXPR:
9926 code = UNLE;
9927 break;
9928 case UNGT_EXPR:
9929 code = UNGT;
9930 break;
9931 case UNGE_EXPR:
9932 code = UNGE;
9933 break;
9934 case UNEQ_EXPR:
9935 code = UNEQ;
9936 break;
9938 default:
9939 abort ();
9942 /* Put a constant second. */
9943 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9945 tem = arg0; arg0 = arg1; arg1 = tem;
9946 code = swap_condition (code);
9949 /* If this is an equality or inequality test of a single bit, we can
9950 do this by shifting the bit being tested to the low-order bit and
9951 masking the result with the constant 1. If the condition was EQ,
9952 we xor it with 1. This does not require an scc insn and is faster
9953 than an scc insn even if we have it. */
9955 if ((code == NE || code == EQ)
9956 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9957 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9959 tree inner = TREE_OPERAND (arg0, 0);
9960 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
9961 int ops_unsignedp;
9963 /* If INNER is a right shift of a constant and it plus BITNUM does
9964 not overflow, adjust BITNUM and INNER. */
9966 if (TREE_CODE (inner) == RSHIFT_EXPR
9967 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9968 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
9969 && bitnum < TYPE_PRECISION (type)
9970 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
9971 bitnum - TYPE_PRECISION (type)))
9973 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
9974 inner = TREE_OPERAND (inner, 0);
9977 /* If we are going to be able to omit the AND below, we must do our
9978 operations as unsigned. If we must use the AND, we have a choice.
9979 Normally unsigned is faster, but for some machines signed is. */
9980 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
9981 #ifdef LOAD_EXTEND_OP
9982 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
9983 #else
9985 #endif
9988 if (! get_subtarget (subtarget)
9989 || GET_MODE (subtarget) != operand_mode
9990 || ! safe_from_p (subtarget, inner, 1))
9991 subtarget = 0;
9993 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
9995 if (bitnum != 0)
9996 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
9997 size_int (bitnum), subtarget, ops_unsignedp);
9999 if (GET_MODE (op0) != mode)
10000 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10002 if ((code == EQ && ! invert) || (code == NE && invert))
10003 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10004 ops_unsignedp, OPTAB_LIB_WIDEN);
10006 /* Put the AND last so it can combine with more things. */
10007 if (bitnum != TYPE_PRECISION (type) - 1)
10008 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10010 return op0;
10013 /* Now see if we are likely to be able to do this. Return if not. */
10014 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10015 return 0;
10017 icode = setcc_gen_code[(int) code];
10018 if (icode == CODE_FOR_nothing
10019 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10021 /* We can only do this if it is one of the special cases that
10022 can be handled without an scc insn. */
10023 if ((code == LT && integer_zerop (arg1))
10024 || (! only_cheap && code == GE && integer_zerop (arg1)))
10026 else if (BRANCH_COST >= 0
10027 && ! only_cheap && (code == NE || code == EQ)
10028 && TREE_CODE (type) != REAL_TYPE
10029 && ((abs_optab->handlers[(int) operand_mode].insn_code
10030 != CODE_FOR_nothing)
10031 || (ffs_optab->handlers[(int) operand_mode].insn_code
10032 != CODE_FOR_nothing)))
10034 else
10035 return 0;
10038 if (! get_subtarget (target)
10039 || GET_MODE (subtarget) != operand_mode
10040 || ! safe_from_p (subtarget, arg1, 1))
10041 subtarget = 0;
10043 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10044 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10046 if (target == 0)
10047 target = gen_reg_rtx (mode);
10049 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10050 because, if the emit_store_flag does anything it will succeed and
10051 OP0 and OP1 will not be used subsequently. */
10053 result = emit_store_flag (target, code,
10054 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10055 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10056 operand_mode, unsignedp, 1);
10058 if (result)
10060 if (invert)
10061 result = expand_binop (mode, xor_optab, result, const1_rtx,
10062 result, 0, OPTAB_LIB_WIDEN);
10063 return result;
10066 /* If this failed, we have to do this with set/compare/jump/set code. */
10067 if (GET_CODE (target) != REG
10068 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10069 target = gen_reg_rtx (GET_MODE (target));
10071 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10072 result = compare_from_rtx (op0, op1, code, unsignedp,
10073 operand_mode, NULL_RTX);
10074 if (GET_CODE (result) == CONST_INT)
10075 return (((result == const0_rtx && ! invert)
10076 || (result != const0_rtx && invert))
10077 ? const0_rtx : const1_rtx);
10079 /* The code of RESULT may not match CODE if compare_from_rtx
10080 decided to swap its operands and reverse the original code.
10082 We know that compare_from_rtx returns either a CONST_INT or
10083 a new comparison code, so it is safe to just extract the
10084 code from RESULT. */
10085 code = GET_CODE (result);
10087 label = gen_label_rtx ();
10088 if (bcc_gen_fctn[(int) code] == 0)
10089 abort ();
10091 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10092 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10093 emit_label (label);
10095 return target;
10099 /* Stubs in case we haven't got a casesi insn. */
10100 #ifndef HAVE_casesi
10101 # define HAVE_casesi 0
10102 # define gen_casesi(a, b, c, d, e) (0)
10103 # define CODE_FOR_casesi CODE_FOR_nothing
10104 #endif
10106 /* If the machine does not have a case insn that compares the bounds,
10107 this means extra overhead for dispatch tables, which raises the
10108 threshold for using them. */
10109 #ifndef CASE_VALUES_THRESHOLD
10110 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10111 #endif /* CASE_VALUES_THRESHOLD */
10113 unsigned int
10114 case_values_threshold ()
10116 return CASE_VALUES_THRESHOLD;
10119 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10120 0 otherwise (i.e. if there is no casesi instruction). */
10122 try_casesi (index_type, index_expr, minval, range,
10123 table_label, default_label)
10124 tree index_type, index_expr, minval, range;
10125 rtx table_label ATTRIBUTE_UNUSED;
10126 rtx default_label;
10128 enum machine_mode index_mode = SImode;
10129 int index_bits = GET_MODE_BITSIZE (index_mode);
10130 rtx op1, op2, index;
10131 enum machine_mode op_mode;
10133 if (! HAVE_casesi)
10134 return 0;
10136 /* Convert the index to SImode. */
10137 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10139 enum machine_mode omode = TYPE_MODE (index_type);
10140 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10142 /* We must handle the endpoints in the original mode. */
10143 index_expr = build (MINUS_EXPR, index_type,
10144 index_expr, minval);
10145 minval = integer_zero_node;
10146 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10147 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10148 omode, 1, default_label);
10149 /* Now we can safely truncate. */
10150 index = convert_to_mode (index_mode, index, 0);
10152 else
10154 if (TYPE_MODE (index_type) != index_mode)
10156 index_expr = convert ((*lang_hooks.types.type_for_size)
10157 (index_bits, 0), index_expr);
10158 index_type = TREE_TYPE (index_expr);
10161 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10163 emit_queue ();
10164 index = protect_from_queue (index, 0);
10165 do_pending_stack_adjust ();
10167 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10168 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10169 (index, op_mode))
10170 index = copy_to_mode_reg (op_mode, index);
10172 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10174 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10175 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10176 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10177 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10178 (op1, op_mode))
10179 op1 = copy_to_mode_reg (op_mode, op1);
10181 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10183 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10184 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10185 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10186 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10187 (op2, op_mode))
10188 op2 = copy_to_mode_reg (op_mode, op2);
10190 emit_jump_insn (gen_casesi (index, op1, op2,
10191 table_label, default_label));
10192 return 1;
10195 /* Attempt to generate a tablejump instruction; same concept. */
10196 #ifndef HAVE_tablejump
10197 #define HAVE_tablejump 0
10198 #define gen_tablejump(x, y) (0)
10199 #endif
10201 /* Subroutine of the next function.
10203 INDEX is the value being switched on, with the lowest value
10204 in the table already subtracted.
10205 MODE is its expected mode (needed if INDEX is constant).
10206 RANGE is the length of the jump table.
10207 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10209 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10210 index value is out of range. */
10212 static void
10213 do_tablejump (index, mode, range, table_label, default_label)
10214 rtx index, range, table_label, default_label;
10215 enum machine_mode mode;
10217 rtx temp, vector;
10219 if (INTVAL (range) > cfun->max_jumptable_ents)
10220 cfun->max_jumptable_ents = INTVAL (range);
10222 /* Do an unsigned comparison (in the proper mode) between the index
10223 expression and the value which represents the length of the range.
10224 Since we just finished subtracting the lower bound of the range
10225 from the index expression, this comparison allows us to simultaneously
10226 check that the original index expression value is both greater than
10227 or equal to the minimum value of the range and less than or equal to
10228 the maximum value of the range. */
10230 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10231 default_label);
10233 /* If index is in range, it must fit in Pmode.
10234 Convert to Pmode so we can index with it. */
10235 if (mode != Pmode)
10236 index = convert_to_mode (Pmode, index, 1);
10238 /* Don't let a MEM slip thru, because then INDEX that comes
10239 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10240 and break_out_memory_refs will go to work on it and mess it up. */
10241 #ifdef PIC_CASE_VECTOR_ADDRESS
10242 if (flag_pic && GET_CODE (index) != REG)
10243 index = copy_to_mode_reg (Pmode, index);
10244 #endif
10246 /* If flag_force_addr were to affect this address
10247 it could interfere with the tricky assumptions made
10248 about addresses that contain label-refs,
10249 which may be valid only very near the tablejump itself. */
10250 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10251 GET_MODE_SIZE, because this indicates how large insns are. The other
10252 uses should all be Pmode, because they are addresses. This code
10253 could fail if addresses and insns are not the same size. */
10254 index = gen_rtx_PLUS (Pmode,
10255 gen_rtx_MULT (Pmode, index,
10256 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10257 gen_rtx_LABEL_REF (Pmode, table_label));
10258 #ifdef PIC_CASE_VECTOR_ADDRESS
10259 if (flag_pic)
10260 index = PIC_CASE_VECTOR_ADDRESS (index);
10261 else
10262 #endif
10263 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10264 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10265 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10266 RTX_UNCHANGING_P (vector) = 1;
10267 convert_move (temp, vector, 0);
10269 emit_jump_insn (gen_tablejump (temp, table_label));
10271 /* If we are generating PIC code or if the table is PC-relative, the
10272 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10273 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10274 emit_barrier ();
10278 try_tablejump (index_type, index_expr, minval, range,
10279 table_label, default_label)
10280 tree index_type, index_expr, minval, range;
10281 rtx table_label, default_label;
10283 rtx index;
10285 if (! HAVE_tablejump)
10286 return 0;
10288 index_expr = fold (build (MINUS_EXPR, index_type,
10289 convert (index_type, index_expr),
10290 convert (index_type, minval)));
10291 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10292 emit_queue ();
10293 index = protect_from_queue (index, 0);
10294 do_pending_stack_adjust ();
10296 do_tablejump (index, TYPE_MODE (index_type),
10297 convert_modes (TYPE_MODE (index_type),
10298 TYPE_MODE (TREE_TYPE (range)),
10299 expand_expr (range, NULL_RTX,
10300 VOIDmode, 0),
10301 TREE_UNSIGNED (TREE_TYPE (range))),
10302 table_label, default_label);
10303 return 1;
10306 /* Nonzero if the mode is a valid vector mode for this architecture.
10307 This returns nonzero even if there is no hardware support for the
10308 vector mode, but we can emulate with narrower modes. */
10311 vector_mode_valid_p (mode)
10312 enum machine_mode mode;
10314 enum mode_class class = GET_MODE_CLASS (mode);
10315 enum machine_mode innermode;
10317 /* Doh! What's going on? */
10318 if (class != MODE_VECTOR_INT
10319 && class != MODE_VECTOR_FLOAT)
10320 return 0;
10322 /* Hardware support. Woo hoo! */
10323 if (VECTOR_MODE_SUPPORTED_P (mode))
10324 return 1;
10326 innermode = GET_MODE_INNER (mode);
10328 /* We should probably return 1 if requesting V4DI and we have no DI,
10329 but we have V2DI, but this is probably very unlikely. */
10331 /* If we have support for the inner mode, we can safely emulate it.
10332 We may not have V2DI, but me can emulate with a pair of DIs. */
10333 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
10336 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10337 static rtx
10338 const_vector_from_tree (exp)
10339 tree exp;
10341 rtvec v;
10342 int units, i;
10343 tree link, elt;
10344 enum machine_mode inner, mode;
10346 mode = TYPE_MODE (TREE_TYPE (exp));
10348 if (is_zeros_p (exp))
10349 return CONST0_RTX (mode);
10351 units = GET_MODE_NUNITS (mode);
10352 inner = GET_MODE_INNER (mode);
10354 v = rtvec_alloc (units);
10356 link = TREE_VECTOR_CST_ELTS (exp);
10357 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10359 elt = TREE_VALUE (link);
10361 if (TREE_CODE (elt) == REAL_CST)
10362 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10363 inner);
10364 else
10365 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10366 TREE_INT_CST_HIGH (elt),
10367 inner);
10370 return gen_rtx_raw_CONST_VECTOR (mode, v);
10373 #include "gt-expr.h"